diff --git a/.claude/rules/architecture.md b/.claude/rules/architecture.md index 535e08d79..96eb3a43c 100644 --- a/.claude/rules/architecture.md +++ b/.claude/rules/architecture.md @@ -1,11 +1,12 @@ # Architecture +When adding, moving, or deleting source files, update this doc to match. + ``` codeflash/ ├── main.py # CLI entry point ├── cli_cmds/ # Command handling, console output (Rich) ├── discovery/ # Find optimizable functions -├── context/ # Extract code dependencies and imports ├── optimization/ # Generate optimized code via AI │ ├── optimizer.py # Main optimization orchestration │ └── function_optimizer.py # Per-function optimization logic @@ -15,7 +16,21 @@ codeflash/ ├── api/ # AI service communication ├── code_utils/ # Code parsing, git utilities ├── models/ # Pydantic models and types -├── languages/ # Multi-language support (Python, JavaScript/TypeScript) +├── languages/ # Multi-language support (Python, JavaScript/TypeScript, Java planned) +│ ├── base.py # LanguageSupport protocol and shared data types +│ ├── registry.py # Language registration and lookup by extension/enum +│ ├── current.py # Current language singleton (set_current_language / current_language_support) +│ ├── code_replacer.py # Language-agnostic code replacement +│ ├── python/ +│ │ ├── support.py # PythonSupport (LanguageSupport implementation) +│ │ ├── function_optimizer.py # PythonFunctionOptimizer subclass +│ │ ├── optimizer.py # Python module preparation & AST resolution +│ │ └── normalizer.py # Python code normalization for deduplication +│ └── javascript/ +│ ├── support.py # JavaScriptSupport (LanguageSupport implementation) +│ ├── function_optimizer.py # JavaScriptFunctionOptimizer subclass +│ ├── optimizer.py # JS project root finding & module preparation +│ └── normalizer.py # JS/TS code normalization for deduplication ├── setup/ # Config schema, auto-detection, first-run experience ├── picklepatch/ # Serialization/deserialization utilities ├── tracing/ # Function call tracing @@ -33,10 +48,36 @@ codeflash/ |------|------------| | CLI arguments & commands | `cli_cmds/cli.py` | | Optimization orchestration | `optimization/optimizer.py` → `run()` | -| Per-function optimization | `optimization/function_optimizer.py` | +| Per-function optimization | `optimization/function_optimizer.py` (base), `languages/python/function_optimizer.py`, `languages/javascript/function_optimizer.py` | | Function discovery | `discovery/functions_to_optimize.py` | -| Context extraction | `context/code_context_extractor.py` | -| Test execution | `verification/test_runner.py`, `verification/pytest_plugin.py` | +| Context extraction | `languages//context/code_context_extractor.py` | +| Test execution | `languages//support.py` (`run_behavioral_tests`, etc.), `verification/pytest_plugin.py` | | Performance ranking | `benchmarking/function_ranker.py` | | Domain types | `models/models.py`, `models/function_types.py` | | Result handling | `either.py` (`Result`, `Success`, `Failure`, `is_successful`) | + +## LanguageSupport Protocol Methods + +Core protocol in `languages/base.py`. Each language (`PythonSupport`, `JavaScriptSupport`) implements these. + +| Category | Method/Property | Purpose | +|----------|----------------|---------| +| Identity | `language`, `file_extensions`, `default_file_extension` | Language identification | +| Identity | `comment_prefix`, `dir_excludes` | Language conventions | +| AI service | `default_language_version` | Language version for API payloads (`None` for Python, `"ES2022"` for JS) | +| AI service | `valid_test_frameworks` | Allowed test frameworks for validation | +| Discovery | `discover_functions`, `discover_tests` | Find optimizable functions and their tests | +| Discovery | `adjust_test_config_for_discovery` | Pre-discovery config adjustment (no-op default) | +| Context | `extract_code_context`, `find_helper_functions`, `find_references` | Code dependency extraction | +| Transform | `replace_function`, `format_code`, `normalize_code` | Code modification | +| Validation | `validate_syntax` | Syntax checking | +| Test execution | `run_behavioral_tests`, `run_benchmarking_tests`, `run_line_profile_tests` | Test runners | +| Test results | `test_result_serialization_format` | `"pickle"` (Python) or `"json"` (JS) | +| Test results | `load_coverage` | Load coverage from language-specific format | +| Test results | `compare_test_results` | Equivalence checking between original and candidate | +| Test gen | `postprocess_generated_tests` | Post-process `GeneratedTestsList` objects | +| Test gen | `process_generated_test_strings` | Instrument/transform raw generated test strings | +| Module | `detect_module_system` | Detect project module system (`None` for Python, `"esm"`/`"commonjs"` for JS) | +| Module | `prepare_module` | Parse/validate module before optimization | +| Setup | `setup_test_config` | One-time project setup after language detection | +| Optimizer | `function_optimizer_class` | Return `FunctionOptimizer` subclass for this language | diff --git a/.claude/rules/code-style.md b/.claude/rules/code-style.md index bcb8fd30b..6a2daef87 100644 --- a/.claude/rules/code-style.md +++ b/.claude/rules/code-style.md @@ -7,4 +7,5 @@ - **Comments**: Minimal - only explain "why", not "what" - **Docstrings**: Do not add unless explicitly requested - **Naming**: NEVER use leading underscores (`_function_name`) - Python has no true private functions, use public names -- **Paths**: Always use absolute paths, handle encoding explicitly (UTF-8) +- **Paths**: Always use absolute paths +- **Encoding**: Always pass `encoding="utf-8"` to `open()`, `read_text()`, `write_text()`, etc. in new or changed code — Windows defaults to `cp1252` which breaks on non-ASCII content. Don't flag pre-existing code that lacks it unless you're already modifying that line. diff --git a/.claude/rules/language-patterns.md b/.claude/rules/language-patterns.md index 8616eb478..34d61e605 100644 --- a/.claude/rules/language-patterns.md +++ b/.claude/rules/language-patterns.md @@ -9,4 +9,5 @@ paths: - Use `get_language_support(identifier)` from `languages/registry.py` to get a `LanguageSupport` instance — never import language classes directly - New language support classes must use the `@register_language` decorator to register with the extension and language registries - `languages/__init__.py` uses `__getattr__` for lazy imports to avoid circular dependencies — follow this pattern when adding new exports -- `is_javascript()` returns `True` for both JavaScript and TypeScript +- Prefer `LanguageSupport` protocol dispatch over `is_python()`/`is_javascript()` guards — remaining guards are being migrated to protocol methods +- `is_javascript()` returns `True` for both JavaScript and TypeScript (still used in ~15 call sites pending migration) diff --git a/.claude/rules/optimization-patterns.md b/.claude/rules/optimization-patterns.md index f677d48de..7a1e90dea 100644 --- a/.claude/rules/optimization-patterns.md +++ b/.claude/rules/optimization-patterns.md @@ -3,7 +3,7 @@ paths: - "codeflash/optimization/**/*.py" - "codeflash/verification/**/*.py" - "codeflash/benchmarking/**/*.py" - - "codeflash/context/**/*.py" + - "codeflash/languages/*/context/**/*.py" --- # Optimization Pipeline Patterns diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 6b17da886..a8c84e8bc 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -56,130 +56,116 @@ jobs: use_sticky_comment: true allowed_bots: "claude[bot],codeflash-ai[bot]" prompt: | - REPO: ${{ github.repository }} - PR NUMBER: ${{ github.event.pull_request.number }} - EVENT: ${{ github.event.action }} + + repo: ${{ github.repository }} + pr_number: ${{ github.event.pull_request.number }} + event: ${{ github.event.action }} + is_re_review: ${{ github.event.action == 'synchronize' }} + - ## STEP 1: Run prek and mypy checks, fix issues + + Execute these steps in order. If a step has no work, state that and continue to the next step. + Post all review findings in a single summary comment only — never as inline PR review comments. + - First, run these checks on files changed in this PR: - 1. `uv run prek run --from-ref origin/main` - linting/formatting issues - 2. `uv run mypy ` - type checking issues + + Run checks on files changed in this PR and auto-fix what you can. - If there are prek issues: - - For SAFE auto-fixable issues (formatting, import sorting, trailing whitespace, etc.), run `uv run prek run --from-ref origin/main` again to auto-fix them - - For issues that prek cannot auto-fix, do NOT attempt to fix them manually — report them as remaining issues in your summary + 1. Run `uv run prek run --from-ref origin/main` to check linting/formatting. + If there are auto-fixable issues, run it again to fix them. + Report any issues prek cannot auto-fix in your summary. - If there are mypy issues: - - Fix type annotation issues (missing return types, Optional/None unions, import errors for type hints, incorrect types) - - Do NOT add `type: ignore` comments - always fix the root cause + 2. Run `uv run mypy ` to check types. + Fix type annotation issues (missing return types, Optional unions, import errors). + Always fix the root cause instead of adding `type: ignore` comments. + Leave alone: type errors requiring logic changes, complex generics, anything changing runtime behavior. - After fixing issues: - - Stage the fixed files with `git add` - - Commit with message "style: auto-fix linting issues" or "fix: resolve mypy type errors" as appropriate - - Push the changes with `git push` + 3. After fixes: stage with `git add`, commit ("style: auto-fix linting issues" or "fix: resolve mypy type errors"), push. - IMPORTANT - Verification after fixing: - - After committing fixes, run `uv run prek run --from-ref origin/main` ONE MORE TIME to verify all issues are resolved - - If errors remain, either fix them or report them honestly as unfixed in your summary - - NEVER claim issues are fixed without verifying. If you cannot fix an issue, say so + 4. Verify by running `uv run prek run --from-ref origin/main` one more time. Report honestly if issues remain. + - Do NOT attempt to fix: - - Type errors that require logic changes or refactoring - - Complex generic type issues - - Anything that could change runtime behavior + + Before reviewing, resolve any stale review threads from previous runs. - ## STEP 2: Review the PR + 1. Fetch unresolved threads you created: + `gh api graphql -f query='{ repository(owner: "${{ github.repository_owner }}", name: "${{ github.event.repository.name }}") { pullRequest(number: ${{ github.event.pull_request.number }}) { reviewThreads(first: 100) { nodes { id isResolved path comments(first: 1) { nodes { body author { login } } } } } } } }' --jq '.data.repository.pullRequest.reviewThreads.nodes[] | select(.isResolved == false) | select(.comments.nodes[0].author.login == "claude") | {id: .id, path: .path, body: .comments.nodes[0].body}'` - ${{ github.event.action == 'synchronize' && 'This is a RE-REVIEW after new commits. First, get the list of changed files in this latest push using `gh pr diff`. Review ONLY the changed files. Check ALL existing review comments and resolve ones that are now fixed.' || 'This is the INITIAL REVIEW.' }} + 2. For each unresolved thread: + a. Read the file at that path to check if the issue still exists + b. If fixed → resolve it: `gh api graphql -f query='mutation { resolveReviewThread(input: {threadId: ""}) { thread { isResolved } } }'` + c. If still present → leave it - Review this PR focusing ONLY on: - 1. Critical bugs or logic errors + Read the actual code before deciding. If there are no unresolved threads, skip to the next step. + + + + Review the diff (`gh pr diff ${{ github.event.pull_request.number }}`) for: + 1. Bugs that will crash at runtime 2. Security vulnerabilities 3. Breaking API changes - 4. Test failures (methods with typos that wont run) - - IMPORTANT: - - First check existing review comments using `gh api repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/comments`. For each existing comment, check if the issue still exists in the current code. - - If an issue is fixed, use `gh api --method PATCH repos/${{ github.repository }}/pulls/comments/COMMENT_ID -f body="✅ Fixed in latest commit"` to resolve it. - - Only create NEW inline comments for HIGH-PRIORITY issues found in changed files. - - Limit to 5-7 NEW comments maximum per review. - - Use CLAUDE.md for project-specific guidance. - - Use `mcp__github_inline_comment__create_inline_comment` sparingly for critical code issues only. - ## STEP 3: Coverage analysis + Ignore style issues, type hints, and log message wording. + Record findings for the summary comment. Refer to CLAUDE.md for project conventions. + + Analyze test coverage for changed files: - 1. Get the list of Python files changed in this PR (excluding tests): - `git diff --name-only origin/main...HEAD -- '*.py' | grep -v test` - - 2. Run tests with coverage on the PR branch: - `uv run coverage run -m pytest tests/ -q --tb=no` - `uv run coverage json -o coverage-pr.json` - - 3. Get coverage for changed files only: - `uv run coverage report --include=""` - - 4. Compare with main branch coverage: - - Checkout main: `git checkout origin/main` - - Run coverage: `uv run coverage run -m pytest tests/ -q --tb=no && uv run coverage json -o coverage-main.json` - - Checkout back: `git checkout -` + 1. Get changed Python files (excluding tests): `git diff --name-only origin/main...HEAD -- '*.py' | grep -v test` + 2. Run coverage on PR branch: `uv run coverage run -m pytest tests/ -q --tb=no` then `uv run coverage json -o coverage-pr.json` + 3. Get per-file coverage: `uv run coverage report --include=""` + 4. Compare with main: checkout main, run coverage, checkout back + 5. Flag: new files below 75%, decreased coverage, untested changed lines + - 5. Analyze the diff to identify: - - NEW FILES: Files that don't exist on main (require good test coverage) - - MODIFIED FILES: Files with changes (changes must be covered by tests) + + Post exactly one summary comment containing all results from previous steps. - 6. Report in PR comment with a markdown table: - - Coverage % for each changed file (PR vs main) - - Overall coverage change - - For NEW files: Flag if coverage is below 75% - - For MODIFIED files: Flag if the changed lines are not covered by tests - - Flag if overall coverage decreased - - Coverage requirements: - - New implementations/files: Must have ≥75% test coverage - - Modified code: Changed lines should be exercised by existing or new tests - - No coverage regressions: Overall coverage should not decrease - - ## STEP 4: Post ONE consolidated summary comment - - CRITICAL: You must post exactly ONE summary comment containing ALL results (pre-commit, review, coverage). - DO NOT post multiple separate comments. Use this format: + To ensure one comment: find an existing claude[bot] comment and update it, or create one if none exists. + Delete any duplicate claude[bot] comments. ``` - ## PR Review Summary + gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments --jq '.[] | select(.user.login == "claude[bot]") | .id' | head -1 + ``` + Format: + ## PR Review Summary ### Prek Checks - [status and any fixes made] - ### Code Review - [critical issues found, if any] - ### Test Coverage - [coverage table and analysis] - --- *Last updated: * - ``` - - To ensure only ONE comment exists: - 1. Find existing claude[bot] comment: `gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments --jq '.[] | select(.user.login == "claude[bot]") | .id' | head -1` - 2. If found, UPDATE it: `gh api --method PATCH repos/${{ github.repository }}/issues/comments/ -f body=""` - 3. If not found, CREATE: `gh pr comment ${{ github.event.pull_request.number }} --body ""` - 4. Delete any OTHER claude[bot] comments to clean up duplicates: `gh api repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments --jq '.[] | select(.user.login == "claude[bot]") | .id' | tail -n +2 | xargs -I {} gh api --method DELETE repos/${{ github.repository }}/issues/comments/{}` - - ## STEP 5: Merge pending codeflash optimization PRs - - Check for open optimization PRs from codeflash and merge if CI passes: - - 1. List open PRs from codeflash bot: - `gh pr list --author "codeflash-ai[bot]" --state open --json number,title,headRefName` - - 2. For each optimization PR: - - Check if CI is passing: `gh pr checks ` - - If all checks pass, merge it: `gh pr merge --squash --delete-branch` - claude_args: '--model us.anthropic.claude-opus-4-6-v1 --allowedTools "mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*),Bash(gh pr checks:*),Bash(gh pr merge:*),Bash(gh issue view:*),Bash(gh issue list:*),Bash(gh api:*),Bash(uv run prek *),Bash(uv run mypy *),Bash(uv run coverage *),Bash(uv run pytest *),Bash(git status*),Bash(git add *),Bash(git commit *),Bash(git push*),Bash(git diff *),Bash(git checkout *),Read,Glob,Grep,Edit"' + + + + Run /simplify to review recently changed code for reuse, quality, and efficiency opportunities. + If improvements are found, commit with "refactor: simplify " and push. + Only make behavior-preserving changes. + + + + Check for open PRs from codeflash-ai[bot]: + `gh pr list --author "codeflash-ai[bot]" --state open --json number,title,headRefName,createdAt,mergeable` + + For each PR: + - If CI passes and the PR is mergeable → merge with `--squash --delete-branch` + - Close the PR as stale if ANY of these apply: + - Older than 7 days + - Has merge conflicts (mergeable state is "CONFLICTING") + - CI is failing + - The optimized function no longer exists in the target file (check the diff) + Close with: `gh pr close --comment "Closing stale optimization PR." --delete-branch` + + + + Before finishing, confirm: + - All steps were attempted (even if some had no work) + - Stale review threads were checked and resolved where appropriate + - All findings are in a single summary comment (no inline review comments were created) + - If fixes were made, they were verified with prek + + claude_args: '--model us.anthropic.claude-opus-4-6-v1 --allowedTools "Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*),Bash(gh pr checks:*),Bash(gh pr merge:*),Bash(gh issue view:*),Bash(gh issue list:*),Bash(gh api:*),Bash(uv run prek *),Bash(uv run mypy *),Bash(uv run coverage *),Bash(uv run pytest *),Bash(git status*),Bash(git add *),Bash(git commit *),Bash(git push*),Bash(git diff *),Bash(git checkout *),Read,Glob,Grep,Edit,Skill"' additional_permissions: | actions: read diff --git a/.github/workflows/duplicate-code-detector.yml b/.github/workflows/duplicate-code-detector.yml index 83896d1ea..0d12e8055 100644 --- a/.github/workflows/duplicate-code-detector.yml +++ b/.github/workflows/duplicate-code-detector.yml @@ -21,96 +21,99 @@ jobs: fetch-depth: 0 ref: ${{ github.event.pull_request.head.ref || github.ref }} - - name: Start Serena MCP server - run: | - docker pull ghcr.io/github/serena-mcp-server:latest - docker run -d --name serena \ - --network host \ - -v "${{ github.workspace }}:${{ github.workspace }}:rw" \ - ghcr.io/github/serena-mcp-server:latest \ - serena start-mcp-server --context codex --project "${{ github.workspace }}" - - mkdir -p /tmp/mcp-config - cat > /tmp/mcp-config/mcp-servers.json << 'EOF' - { - "mcpServers": { - "serena": { - "command": "docker", - "args": ["exec", "-i", "serena", "serena", "start-mcp-server", "--context", "codex", "--project", "${{ github.workspace }}"] - } - } - } - EOF - - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} aws-region: ${{ secrets.AWS_REGION }} + - name: Get changed source files + id: changed-files + run: | + FILES=$(git diff --name-only origin/main...HEAD -- '*.py' '*.js' '*.ts' '*.java' \ + | grep -v -E '(test_|_test\.(py|js|ts)|\.test\.(js|ts)|\.spec\.(js|ts)|conftest\.py|/tests/|/test/|/__tests__/)' \ + | grep -v -E '^(\.github/|code_to_optimize/|\.tessl/|node_modules/)' \ + || true) + if [ -z "$FILES" ]; then + echo "files=" >> "$GITHUB_OUTPUT" + echo "No changed source files to analyze." + else + echo "files<> "$GITHUB_OUTPUT" + echo "$FILES" >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + echo "Changed files:" + echo "$FILES" + fi + - name: Run Claude Code + if: steps.changed-files.outputs.files != '' uses: anthropics/claude-code-action@v1 with: use_bedrock: "true" use_sticky_comment: true allowed_bots: "claude[bot],codeflash-ai[bot]" - claude_args: '--mcp-config /tmp/mcp-config/mcp-servers.json --allowedTools "Read,Glob,Grep,Bash(git diff:*),Bash(git log:*),Bash(git show:*),Bash(wc *),Bash(find *),mcp__serena__*"' + claude_args: '--allowedTools "Read,Glob,Grep,Bash(git diff:*),Bash(git log:*),Bash(git show:*),Bash(wc *),Bash(gh pr comment:*)"' prompt: | - You are a duplicate code detector with access to Serena semantic code analysis. + REPO: ${{ github.repository }} + PR NUMBER: ${{ github.event.pull_request.number }} + + You are a duplicate code detector for a multi-language codebase (Python, JavaScript, TypeScript, Java). Check whether this PR introduces code that duplicates logic already present elsewhere in the repository — including across languages. Focus on finding true duplicates, not just similar-looking code. - ## Setup + ## Changed files - First activate the project in Serena: - - Use `mcp__serena__activate_project` with the workspace path `${{ github.workspace }}` + ``` + ${{ steps.changed-files.outputs.files }} + ``` ## Steps - 1. Get the list of changed .py files (excluding tests): - `git diff --name-only origin/main...HEAD -- '*.py' | grep -v -E '(test_|_test\.py|/tests/|/test/)'` - - 2. Use Serena's semantic analysis on changed files: - - `mcp__serena__get_symbols_overview` to understand file structure - - `mcp__serena__find_symbol` to search for similarly named symbols across the codebase - - `mcp__serena__find_referencing_symbols` to understand usage patterns - - `mcp__serena__search_for_pattern` to find similar code patterns - - 3. For each changed file, look for: - - **Exact Duplication**: Identical code blocks (>10 lines) in multiple locations - - **Structural Duplication**: Same logic with minor variations (different variable names) - - **Functional Duplication**: Different implementations of the same functionality - - **Copy-Paste Programming**: Similar blocks that could be extracted into shared utilities - - 4. Cross-reference against the rest of the codebase using Serena: - - Search for similar function signatures and logic patterns - - Check if new code duplicates existing utilities or helpers - - Look for repeated patterns across modules - - ## What to Report - - - Identical or nearly identical functions in different files - - Repeated code blocks that could be extracted to utilities - - Similar classes or modules with overlapping functionality - - Copy-pasted code with minor modifications - - Duplicated business logic across components - - ## What to Skip - - - Standard boilerplate (imports, __init__, etc.) - - Test setup/teardown code - - Configuration with similar structure - - Language-specific patterns (constructors, getters/setters) - - Small snippets (<5 lines) unless highly repetitive - - Workflow files under .github/ - - ## Output - - Post a single PR comment with your findings. For each pattern found: - - Severity (High/Medium/Low) - - File locations with line numbers - - Code samples showing the duplication - - Concrete refactoring suggestion - - If no significant duplication is found, say so briefly. Do not create issues — just comment on the PR. - - name: Stop Serena - if: always() - run: docker stop serena && docker rm serena || true + 1. **Read changed files.** For each file above, read it and identify functions or methods that were added or substantially modified (longer than 5 lines). + + 2. **Search for duplicates.** For each function, use Grep to search the codebase for: + - The same function name defined elsewhere (`def function_name` for Python, `function function_name` / `const function_name` / `module.exports` for the JS files under `packages/`) + - 2-3 distinctive operations from the body (specific API calls, algorithm patterns, string literals, exception types) — this catches duplicates that have different names but implement the same logic + + 3. **Cross-module check.** This codebase has parallel Python modules under `languages/python/`, `languages/javascript/`, and `languages/java/` that handle the same concerns (parsing, code replacement, test running, etc.) for different target languages. It also has a JS runtime under `packages/codeflash/runtime/` and a Java runtime under `codeflash-java-runtime/`. When a changed file is under one of these areas, also search the others for equivalent logic. For example: + - `languages/javascript/code_replacer.py` and `languages/python/static_analysis/code_replacer.py` both handle code replacement — shared logic should be extracted + - Shared concepts (AST traversal, scope analysis, import resolution, test running) are prime candidates for duplication across these modules + + 4. **Compare candidates.** When a Grep hit looks promising (not just a shared import or call site), read the full function and compare semantics. Flag it only if it matches one of these patterns: + - **Same function in two modules** — a function with the same or very similar body exists in another module. One should import from the other instead (within the same language). + - **Shared logic across sibling files** — the same helper logic repeated in files within the same package. Should be extracted to a common module. + - **Repeated pattern across classes** — multiple classes implement the same logic inline (e.g., identical traversal, identical validation). Should be a mixin or shared helper. + - **Cross-module reimplementation** — the same algorithm or utility implemented in both `languages/python/` and `languages/javascript/` (both are Python) or between Python orchestration code and JS runtime code in `packages/`. Note: some duplication is unavoidable (each target language needs its own parser, for example). Only flag cases where the logic is genuinely shared or where one module could import from the other. + + 5. **Report findings.** Post a single PR comment. Report at most 5 findings. + + **If duplicates found**, for each one: + - **Confidence**: HIGH (identical or near-identical logic) / MEDIUM (same intent, minor differences worth reviewing) + - **Locations**: `file_path:line_number` for both the new and existing code + - **What's duplicated**: One sentence describing the shared logic + - **Suggestion**: How to consolidate — import from canonical location, extract to shared module, create a mixin. For cross-module duplicates (between language directories or Python↔JS runtime), just flag it for a tech lead to review rather than prescribing a specific fix. + + **If no duplicates found**, post a comment that just says "No duplicates detected." so the sticky comment gets updated. + + ## Examples (illustrative — these are past cases, some already resolved) + + **IS a duplicate (HIGH):** A 12-line `is_build_output_dir()` function was defined identically in two modules (`setup/detector.py` and `code_utils/config_js.py`). Fix: delete one, import from the other. + + **IS a duplicate (MEDIUM):** `is_assignment_used()` was implemented separately in two context files with the same logic. Fix: move to a shared module, import from both call sites. + + **IS a duplicate (MEDIUM, cross-module):** `normalize_path()` implemented in both `languages/python/support.py` and `languages/javascript/support.py` with identical logic. Flagging for tech lead review — should likely be extracted to `languages/base.py` or a shared utility. + + **NOT a duplicate:** Two classes each define a `visit()` method that traverses an AST, but they handle different node types and produce different outputs. This is intentional polymorphism. + + **NOT a duplicate (cross-module):** `languages/python/static_analysis/code_extractor.py` and `languages/javascript/parse.py` both extract functions from source code, but they use fundamentally different parsing strategies (Python AST vs tree-sitter). The logic is necessarily different. + + ## DO NOT report + + - Standard boilerplate (`__init__`, `__repr__`, `__str__`, `__eq__`, simple property accessors, constructors) + - Functions under 5 lines + - Config/setup code that naturally has similar structure + - Intentional polymorphism (same method name, genuinely different behavior) + - Test files, conftest files, spec files + - Import statements and logging setup + - Files under `.github/`, `code_to_optimize/`, `.tessl/` + - Code across language modules that must differ due to target-language semantics (parsers, AST node types, runtime-specific APIs) + + Do NOT create issues or edit any files. Only post a PR comment. diff --git a/CLAUDE.md b/CLAUDE.md index c4628e91a..041dd7c74 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -2,7 +2,7 @@ ## Project Overview -CodeFlash is an AI-powered Python code optimizer that automatically improves code performance while maintaining correctness. It uses LLMs to generate optimization candidates, verifies correctness through test execution, and benchmarks performance improvements. +CodeFlash is an AI-powered code optimizer that automatically improves performance while maintaining correctness. It supports Python, JavaScript, and TypeScript, with more languages planned. It uses LLMs to generate optimization candidates, verifies correctness through test execution, and benchmarks performance improvements. ## Optimization Pipeline @@ -12,7 +12,7 @@ Discovery → Ranking → Context Extraction → Test Gen + Optimization → Bas 1. **Discovery** (`discovery/`): Find optimizable functions across the codebase 2. **Ranking** (`benchmarking/function_ranker.py`): Rank functions by addressable time using trace data -3. **Context** (`context/`): Extract code dependencies (read-writable code + read-only imports) +3. **Context** (`languages//context/`): Extract code dependencies (read-writable code + read-only imports) 4. **Optimization** (`optimization/`, `api/`): Generate candidates via AI service, run in parallel with test generation 5. **Verification** (`verification/`): Run candidates against tests, compare outputs via custom pytest plugin 6. **Benchmarking** (`benchmarking/`): Measure performance, select best candidate by speedup diff --git a/codeflash/api/aiservice.py b/codeflash/api/aiservice.py index cc59aadfb..ace67680b 100644 --- a/codeflash/api/aiservice.py +++ b/codeflash/api/aiservice.py @@ -14,7 +14,8 @@ from codeflash.code_utils.env_utils import get_codeflash_api_key from codeflash.code_utils.git_utils import get_last_commit_author_if_pr_exists, get_repo_owner_and_name from codeflash.code_utils.time_utils import humanize_runtime -from codeflash.languages import is_java, is_javascript, is_python +from codeflash.languages import Language, current_language +from codeflash.languages.current import current_language_support from codeflash.models.ExperimentMetadata import ExperimentMetadata from codeflash.models.models import ( AIServiceRefinerRequest, @@ -51,6 +52,18 @@ def get_next_sequence(self) -> int: """Get the next LLM call sequence number.""" return next(self.llm_call_counter) + @staticmethod + def add_language_metadata( + payload: dict[str, Any], language_version: str | None = None, module_system: str | None = None + ) -> None: + """Add language version and module system metadata to an API payload.""" + payload["python_version"] = platform.python_version() + default_lang_version = current_language_support().default_language_version + if default_lang_version is not None: + payload["language_version"] = language_version or default_lang_version + if module_system: + payload["module_system"] = module_system + def get_aiservice_base_url(self) -> str: if os.environ.get("CODEFLASH_AIS_SERVER", default="prod").lower() == "local": logger.info("Using local AI Service at http://localhost:8000") @@ -177,18 +190,7 @@ def optimize_code( "is_numerical_code": is_numerical_code, } - # Add language-specific version fields - # Always include python_version for backward compatibility with older backend - payload["python_version"] = platform.python_version() - if is_python(): - pass # python_version already set - elif is_java(): - payload["language_version"] = language_version or "17" # Default Java version - else: - payload["language_version"] = language_version or "ES2022" - # Add module system for JavaScript/TypeScript (esm or commonjs) - if module_system: - payload["module_system"] = module_system + self.add_language_metadata(payload, language_version, module_system) # DEBUG: Print payload language field logger.debug( @@ -434,14 +436,7 @@ def optimize_code_refinement(self, request: list[AIServiceRefinerRequest]) -> li "language": opt.language, } - # Add language version - always include python_version for backward compatibility - item["python_version"] = platform.python_version() - if is_python(): - pass # python_version already set - elif opt.language_version: - item["language_version"] = opt.language_version - else: - item["language_version"] = "ES2022" # Default for JS/TS + self.add_language_metadata(item, opt.language_version) # Add multi-file context if provided if opt.additional_context_files: @@ -754,21 +749,11 @@ def generate_regression_tests( """ # Validate test framework based on language - python_frameworks = ["pytest", "unittest"] - javascript_frameworks = ["jest", "mocha", "vitest"] - java_frameworks = ["junit5", "junit4", "testng"] - if is_python(): - assert test_framework in python_frameworks, ( - f"Invalid test framework for Python, got {test_framework} but expected one of {python_frameworks}" - ) - elif is_javascript(): - assert test_framework in javascript_frameworks, ( - f"Invalid test framework for JavaScript, got {test_framework} but expected one of {javascript_frameworks}" - ) - elif is_java(): - assert test_framework in java_frameworks, ( - f"Invalid test framework for Java, got {test_framework} but expected one of {java_frameworks}" - ) + lang_support = current_language_support() + valid_frameworks = lang_support.valid_test_frameworks + assert test_framework in valid_frameworks, ( + f"Invalid test framework for {current_language()}, got {test_framework} but expected one of {list(valid_frameworks)}" + ) payload: dict[str, Any] = { "source_code_being_tested": source_code_being_tested, @@ -787,18 +772,7 @@ def generate_regression_tests( "is_numerical_code": is_numerical_code, } - # Add language-specific version fields - # Always include python_version for backward compatibility with older backend - payload["python_version"] = platform.python_version() - if is_python(): - pass # python_version already set - elif is_java(): - payload["language_version"] = language_version or "17" # Default Java version - else: - payload["language_version"] = language_version or "ES2022" - # Add module system for JavaScript/TypeScript (esm or commonjs) - if module_system: - payload["module_system"] = module_system + self.add_language_metadata(payload, language_version, module_system) # DEBUG: Print payload language field logger.debug(f"Sending testgen request with language='{payload['language']}', framework='{test_framework}'") @@ -884,7 +858,7 @@ def get_optimization_review( "codeflash_version": codeflash_version, "calling_fn_details": calling_fn_details, "language": language, - "python_version": platform.python_version() if is_python() else None, + "python_version": platform.python_version() if current_language() == Language.PYTHON else None, "call_sequence": self.get_next_sequence(), } console.rule() diff --git a/codeflash/cli_cmds/cli.py b/codeflash/cli_cmds/cli.py index 630311347..8cd205dab 100644 --- a/codeflash/cli_cmds/cli.py +++ b/codeflash/cli_cmds/cli.py @@ -130,9 +130,18 @@ def parse_args() -> Namespace: "--reset-config", action="store_true", help="Remove codeflash configuration from project config file." ) parser.add_argument("-y", "--yes", action="store_true", help="Skip confirmation prompts (useful for CI/scripts).") + parser.add_argument( + "--subagent", + action="store_true", + help="Subagent mode: skip all interactive prompts with sensible defaults. Designed for AI agent integrations.", + ) args, unknown_args = parser.parse_known_args() sys.argv[:] = [sys.argv[0], *unknown_args] + if args.subagent: + args.yes = True + args.no_pr = True + args.worktree = True return process_and_validate_cmd_args(args) @@ -237,7 +246,18 @@ def process_pyproject_config(args: Namespace) -> Namespace: set_current_test_framework(pyproject_config["test_framework"]) if args.tests_root is None: - if is_js_ts_project: + if is_java_project: + # Try standard Maven/Gradle test directories + for test_dir in ["src/test/java", "test", "tests"]: + test_path = Path(args.module_root).parent / test_dir if "/" in test_dir else Path(test_dir) + if not test_path.is_absolute(): + test_path = Path.cwd() / test_path + if test_path.is_dir(): + args.tests_root = str(test_path) + break + if args.tests_root is None: + args.tests_root = str(Path.cwd() / "src" / "test" / "java") + elif is_js_ts_project: # Try common JS test directories at project root first for test_dir in ["test", "tests", "__tests__"]: if Path(test_dir).is_dir(): @@ -256,17 +276,6 @@ def process_pyproject_config(args: Namespace) -> Namespace: # In such cases, the user should explicitly configure testsRoot in package.json if args.tests_root is None: args.tests_root = args.module_root - elif is_java_project: - # Try standard Maven/Gradle test directories - for test_dir in ["src/test/java", "test", "tests"]: - test_path = Path(args.module_root).parent / test_dir if "/" in test_dir else Path(test_dir) - if not test_path.is_absolute(): - test_path = Path.cwd() / test_path - if test_path.is_dir(): - args.tests_root = str(test_path) - break - if args.tests_root is None: - args.tests_root = str(Path.cwd() / "src" / "test" / "java") else: raise AssertionError("--tests-root must be specified") assert Path(args.tests_root).is_dir(), f"--tests-root {args.tests_root} must be a valid directory" @@ -327,7 +336,6 @@ def project_root_from_module_root(module_root: Path, pyproject_file_path: Path) return current.resolve() if (current / "build.gradle").exists() or (current / "build.gradle.kts").exists(): return current.resolve() - # Check for config file (pyproject.toml for Python, codeflash.toml for other languages) if (current / "codeflash.toml").exists(): return current.resolve() current = current.parent @@ -378,32 +386,52 @@ def _handle_show_config() -> None: from codeflash.setup.detector import detect_project, has_existing_config project_root = Path.cwd() - detected = detect_project(project_root) + config_exists, _ = has_existing_config(project_root) - # Check if config exists or is auto-detected - config_exists, config_file = has_existing_config(project_root) - status = "Saved config" if config_exists else "Auto-detected (not saved)" + if config_exists: + from codeflash.code_utils.config_parser import parse_config_file - console.print() - console.print(f"[bold]Codeflash Configuration[/bold] ({status})") - if config_exists and config_file: - console.print(f"[dim]Config file: {project_root / config_file}[/dim]") - console.print() + config, config_file_path = parse_config_file() + status = "Saved config" - table = Table(show_header=True, header_style="bold cyan") - table.add_column("Setting", style="dim") - table.add_column("Value") - - table.add_row("Language", detected.language) - table.add_row("Project root", str(detected.project_root)) - table.add_row("Module root", str(detected.module_root)) - table.add_row("Tests root", str(detected.tests_root) if detected.tests_root else "(not detected)") - table.add_row("Test runner", detected.test_runner or "(not detected)") - table.add_row("Formatter", ", ".join(detected.formatter_cmds) if detected.formatter_cmds else "(not detected)") - table.add_row( - "Ignore paths", ", ".join(str(p) for p in detected.ignore_paths) if detected.ignore_paths else "(none)" - ) - table.add_row("Confidence", f"{detected.confidence:.0%}") + console.print() + console.print(f"[bold]Codeflash Configuration[/bold] ({status})") + console.print(f"[dim]Config file: {config_file_path}[/dim]") + console.print() + + table = Table(show_header=True, header_style="bold cyan") + table.add_column("Setting", style="dim") + table.add_column("Value") + + table.add_row("Project root", str(project_root)) + table.add_row("Module root", config.get("module_root", "(not set)")) + table.add_row("Tests root", config.get("tests_root", "(not set)")) + table.add_row("Test runner", config.get("test_framework", config.get("pytest_cmd", "(not set)"))) + table.add_row("Formatter", ", ".join(config["formatter_cmds"]) if config.get("formatter_cmds") else "(not set)") + ignore_paths = config.get("ignore_paths", []) + table.add_row("Ignore paths", ", ".join(str(p) for p in ignore_paths) if ignore_paths else "(none)") + else: + detected = detect_project(project_root) + status = "Auto-detected (not saved)" + + console.print() + console.print(f"[bold]Codeflash Configuration[/bold] ({status})") + console.print() + + table = Table(show_header=True, header_style="bold cyan") + table.add_column("Setting", style="dim") + table.add_column("Value") + + table.add_row("Language", detected.language) + table.add_row("Project root", str(detected.project_root)) + table.add_row("Module root", str(detected.module_root)) + table.add_row("Tests root", str(detected.tests_root) if detected.tests_root else "(not detected)") + table.add_row("Test runner", detected.test_runner or "(not detected)") + table.add_row("Formatter", ", ".join(detected.formatter_cmds) if detected.formatter_cmds else "(not detected)") + table.add_row( + "Ignore paths", ", ".join(str(p) for p in detected.ignore_paths) if detected.ignore_paths else "(none)" + ) + table.add_row("Confidence", f"{detected.confidence:.0%}") console.print(table) console.print() @@ -436,7 +464,7 @@ def _handle_reset_config(confirm: bool = True) -> None: console.print("[bold]This will remove Codeflash configuration from your project.[/bold]") console.print() - config_file = {"python": "pyproject.toml", "java": "codeflash.toml"}.get(detected.language, "package.json") + config_file = "pyproject.toml" if detected.language == "python" else "package.json" console.print(f" Config file: {project_root / config_file}") console.print() diff --git a/codeflash/cli_cmds/console.py b/codeflash/cli_cmds/console.py index 5ff215057..8b64cee18 100644 --- a/codeflash/cli_cmds/console.py +++ b/codeflash/cli_cmds/console.py @@ -22,7 +22,7 @@ from codeflash.cli_cmds.console_constants import SPINNER_TYPES from codeflash.cli_cmds.logging_config import BARE_LOGGING_FORMAT -from codeflash.lsp.helpers import is_LSP_enabled +from codeflash.lsp.helpers import is_LSP_enabled, is_subagent_mode from codeflash.lsp.lsp_logger import enhanced_log from codeflash.lsp.lsp_message import LspCodeMessage, LspTextMessage @@ -35,42 +35,69 @@ from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.base import DependencyResolver, IndexResult from codeflash.lsp.lsp_message import LspMessage + from codeflash.models.models import TestResults DEBUG_MODE = logging.getLogger().getEffectiveLevel() == logging.DEBUG console = Console(highlighter=NullHighlighter()) -if is_LSP_enabled(): +if is_LSP_enabled() or is_subagent_mode(): console.quiet = True -logging.basicConfig( - level=logging.INFO, - handlers=[ - RichHandler( - rich_tracebacks=True, - markup=False, - highlighter=NullHighlighter(), - console=console, - show_path=False, - show_time=False, - ) - ], - format=BARE_LOGGING_FORMAT, -) +if is_subagent_mode(): + import re + import sys + + _lsp_prefix_re = re.compile(r"^(?:!?lsp,?|h[2-4]|loading)\|") + _subagent_drop_patterns = ( + "Test log -", + "Test failed to load", + "Examining file ", + "Generated ", + "Add custom marker", + "Disabling all autouse", + "Reverting code and helpers", + ) + + class _AgentLogFilter(logging.Filter): + def filter(self, record: logging.LogRecord) -> bool: + record.msg = _lsp_prefix_re.sub("", str(record.msg)) + msg = record.getMessage() + return not any(msg.startswith(p) for p in _subagent_drop_patterns) + + _agent_handler = logging.StreamHandler(sys.stderr) + _agent_handler.addFilter(_AgentLogFilter()) + logging.basicConfig(level=logging.INFO, handlers=[_agent_handler], format="%(levelname)s: %(message)s") +else: + logging.basicConfig( + level=logging.INFO, + handlers=[ + RichHandler( + rich_tracebacks=True, + markup=False, + highlighter=NullHighlighter(), + console=console, + show_path=False, + show_time=False, + ) + ], + format=BARE_LOGGING_FORMAT, + ) logger = logging.getLogger("rich") logging.getLogger("parso").setLevel(logging.WARNING) # override the logger to reformat the messages for the lsp -for level in ("info", "debug", "warning", "error"): - real_fn = getattr(logger, level) - setattr( - logger, - level, - lambda msg, *args, _real_fn=real_fn, _level=level, **kwargs: enhanced_log( - msg, _real_fn, _level, *args, **kwargs - ), - ) +if not is_subagent_mode(): + for level in ("info", "debug", "warning", "error"): + real_fn = getattr(logger, level) + setattr( + logger, + level, + lambda msg, *args, _real_fn=real_fn, _level=level, **kwargs: enhanced_log( + msg, _real_fn, _level, *args, **kwargs + ), + ) class DummyTask: @@ -97,6 +124,8 @@ def paneled_text( text: str, panel_args: dict[str, str | bool] | None = None, text_args: dict[str, str] | None = None ) -> None: """Print text in a panel.""" + if is_subagent_mode(): + return from rich.panel import Panel from rich.text import Text @@ -125,6 +154,8 @@ def code_print( language: Programming language for syntax highlighting ('python', 'javascript', 'typescript') """ + if is_subagent_mode(): + return if is_LSP_enabled(): lsp_log( LspCodeMessage(code=code_str, file_name=file_name, function_name=function_name, message_id=lsp_message_id) @@ -162,6 +193,10 @@ def progress_bar( """ global _progress_bar_active + if is_subagent_mode(): + yield DummyTask().id + return + if is_LSP_enabled(): lsp_log(LspTextMessage(text=message, takes_time=True)) yield @@ -193,6 +228,10 @@ def progress_bar( @contextmanager def test_files_progress_bar(total: int, description: str) -> Generator[tuple[Progress, TaskID], None, None]: """Progress bar for test files.""" + if is_subagent_mode(): + yield DummyProgress(), DummyTask().id + return + if is_LSP_enabled(): lsp_log(LspTextMessage(text=description, takes_time=True)) dummy_progress = DummyProgress() @@ -226,6 +265,10 @@ def call_graph_live_display( from rich.text import Text from rich.tree import Tree + if is_subagent_mode(): + yield lambda _: None + return + if is_LSP_enabled(): lsp_log(LspTextMessage(text="Building call graph", takes_time=True)) yield lambda _: None @@ -333,6 +376,9 @@ def call_graph_summary(call_graph: DependencyResolver, file_to_funcs: dict[Path, if not total_functions: return + if is_subagent_mode(): + return + # Build the mapping expected by the dependency resolver file_items = file_to_funcs.items() mapping = {file_path: {func.qualified_name for func in funcs} for file_path, funcs in file_items} @@ -359,3 +405,92 @@ def call_graph_summary(call_graph: DependencyResolver, file_to_funcs: dict[Path, return console.print(Panel(summary, title="Call Graph Summary", border_style="cyan")) + + +def subagent_log_optimization_result( + function_name: str, + file_path: Path, + perf_improvement_line: str, + original_runtime_ns: int, + best_runtime_ns: int, + raw_explanation: str, + original_code: dict[Path, str], + new_code: dict[Path, str], + review: str, + test_results: TestResults, +) -> None: + import sys + from xml.sax.saxutils import escape + + from codeflash.code_utils.code_utils import unified_diff_strings + from codeflash.code_utils.time_utils import humanize_runtime + from codeflash.models.test_type import TestType + + diff_parts = [] + for path in original_code: + old = original_code.get(path, "") + new = new_code.get(path, "") + if old != new: + diff = unified_diff_strings(old, new, fromfile=str(path), tofile=str(path)) + if diff: + diff_parts.append(diff) + + diff_str = "\n".join(diff_parts) + + original_runtime = humanize_runtime(original_runtime_ns) + optimized_runtime = humanize_runtime(best_runtime_ns) + + report = test_results.get_test_pass_fail_report_by_type() + verification_rows = [] + for test_type in TestType: + if test_type is TestType.INIT_STATE_TEST: + continue + name = test_type.to_name() + if not name: + continue + passed = report[test_type]["passed"] + failed = report[test_type]["failed"] + if passed == 0 and failed == 0: + status = "None Found" + elif failed > 0: + status = f"{failed} Failed, {passed} Passed" + else: + status = f"{passed} Passed" + verification_rows.append(f' ') + + xml = [ + "", + f" {escape(function_name)}", + f" {escape(str(file_path))}", + f" {escape(perf_improvement_line)}", + f" {escape(original_runtime)}", + f" {escape(optimized_runtime)}", + ] + if review: + xml.append(f" {escape(review)}") + xml.append(f" {escape(raw_explanation.strip())}") + xml.append(" ") + xml.extend(verification_rows) + xml.append(" ") + if diff_str: + xml.append(f" {escape(diff_str)}") + for path in new_code: + if new_code[path] != original_code.get(path, ""): + xml.append(f' {escape(new_code[path])}') + xml.append(" ") + xml.append(" First, present a short summary: function name, performance improvement, and explanation.") + xml.append(" If you are running inside an IDE (VS Code, Cursor, Windsurf, etc.):") + xml.append(" For each optimized-code element, use the Write tool to write the optimized code to the file.") + xml.append(" This will surface the diff in the IDE's native diff view for the user to accept or reject.") + xml.append(" If you are running in a terminal without an IDE connection:") + xml.append(" Present this optimization to the user using AskUserQuestion.") + xml.append(" Show the function name, performance improvement, and explanation as the question text.") + xml.append(" Provide two options: 'Apply' (write the optimized code to the file) and 'Reject' (do nothing).") + xml.append( + " Put the full diff in the 'Apply' option's markdown preview so the user can review the exact changes." + ) + xml.append(" If the user chooses 'Apply', write the content from optimized-code to the corresponding file.") + xml.append(" ") + xml.append("") + + sys.stdout.write("\n".join(xml) + "\n") diff --git a/codeflash/cli_cmds/init_javascript.py b/codeflash/cli_cmds/init_javascript.py index c608d1705..5e9f11a47 100644 --- a/codeflash/cli_cmds/init_javascript.py +++ b/codeflash/cli_cmds/init_javascript.py @@ -35,7 +35,6 @@ class ProjectLanguage(Enum): PYTHON = auto() JAVASCRIPT = auto() TYPESCRIPT = auto() - JAVA = auto() class JsPackageManager(Enum): @@ -91,13 +90,6 @@ def detect_project_language(project_root: Path | None = None) -> ProjectLanguage has_setup_py = (root / "setup.py").exists() has_package_json = (root / "package.json").exists() has_tsconfig = (root / "tsconfig.json").exists() - has_pom_xml = (root / "pom.xml").exists() - has_build_gradle = (root / "build.gradle").exists() or (root / "build.gradle.kts").exists() - has_java_src = (root / "src" / "main" / "java").is_dir() - - # Java project (Maven or Gradle) - if has_pom_xml or has_build_gradle or has_java_src: - return ProjectLanguage.JAVA # TypeScript project (tsconfig.json is definitive) if has_tsconfig: @@ -216,7 +208,7 @@ def get_package_install_command(project_root: Path, package: str, dev: bool = Tr return cmd -def init_js_project(language: ProjectLanguage) -> None: +def init_js_project(language: ProjectLanguage, *, skip_confirm: bool = False, skip_api_key: bool = False) -> None: """Initialize Codeflash for a JavaScript/TypeScript project.""" from codeflash.cli_cmds.cmd_init import install_github_actions, install_github_app, prompt_api_key @@ -234,15 +226,15 @@ def init_js_project(language: ProjectLanguage) -> None: console.print(lang_panel) console.print() - did_add_new_key = prompt_api_key() + did_add_new_key = False if skip_api_key else prompt_api_key() - should_modify, _config = should_modify_package_json_config() + should_modify, _config = should_modify_package_json_config(skip_confirm=skip_confirm) # Default git remote git_remote = "origin" if should_modify: - setup_info = collect_js_setup_info(language) + setup_info = collect_js_setup_info(language, skip_confirm=skip_confirm) git_remote = setup_info.git_remote or "origin" configured = configure_package_json(setup_info) if not configured: @@ -287,7 +279,7 @@ def init_js_project(language: ProjectLanguage) -> None: sys.exit(0) -def should_modify_package_json_config() -> tuple[bool, dict[str, Any] | None]: +def should_modify_package_json_config(*, skip_confirm: bool = False) -> tuple[bool, dict[str, Any] | None]: """Check if package.json has valid codeflash config for JS/TS projects.""" package_json_path = Path("package.json") @@ -313,6 +305,10 @@ def should_modify_package_json_config() -> tuple[bool, dict[str, Any] | None]: if tests_root and not Path(tests_root).is_dir(): return True, None + # In skip_confirm mode, don't reconfigure a valid config + if skip_confirm: + return False, config + # Config is valid - ask if user wants to reconfigure return Confirm.ask( "✅ A valid Codeflash config already exists in package.json. Do you want to re-configure it?", @@ -323,13 +319,12 @@ def should_modify_package_json_config() -> tuple[bool, dict[str, Any] | None]: return True, None -def collect_js_setup_info(language: ProjectLanguage) -> JSSetupInfo: +def collect_js_setup_info(language: ProjectLanguage, *, skip_confirm: bool = False) -> JSSetupInfo: """Collect setup information for JavaScript/TypeScript projects. Uses auto-detection for most settings and only asks for overrides if needed. + When skip_confirm is True, uses all auto-detected defaults without prompting. """ - from rich.prompt import Confirm - from codeflash.cli_cmds.cmd_init import ask_for_telemetry, get_valid_subdirs from codeflash.code_utils.config_js import ( detect_formatter, @@ -355,6 +350,20 @@ def collect_js_setup_info(language: ProjectLanguage) -> JSSetupInfo: detected_test_runner = detect_test_runner(curdir, package_data) detected_formatter = detect_formatter(curdir, package_data) + # In skip_confirm mode, use all auto-detected defaults + if skip_confirm: + git_remote = "origin" + try: + repo = Repo(Path.cwd(), search_parent_directories=True) + git_remotes = get_git_remotes(repo) + if git_remotes: + git_remote = git_remotes[0] + except InvalidGitRepositoryError: + pass + return JSSetupInfo(git_remote=git_remote) + + from rich.prompt import Confirm + # Build detection summary formatter_display = detected_formatter[0] if detected_formatter else "none detected" detection_table = Table(show_header=False, box=None, padding=(0, 2)) diff --git a/codeflash/cli_cmds/logging_config.py b/codeflash/cli_cmds/logging_config.py index dbb3663bd..296a0b0fa 100644 --- a/codeflash/cli_cmds/logging_config.py +++ b/codeflash/cli_cmds/logging_config.py @@ -5,8 +5,18 @@ def set_level(level: int, *, echo_setting: bool = True) -> None: import logging + import sys import time + from codeflash.lsp.helpers import is_subagent_mode + + if is_subagent_mode(): + logging.basicConfig( + level=level, handlers=[logging.StreamHandler(sys.stderr)], format="%(levelname)s: %(message)s", force=True + ) + logging.getLogger().setLevel(level) + return + from rich.highlighter import NullHighlighter from rich.logging import RichHandler diff --git a/codeflash/code_utils/checkpoint.py b/codeflash/code_utils/checkpoint.py index 1160bf2e0..367e150b7 100644 --- a/codeflash/code_utils/checkpoint.py +++ b/codeflash/code_utils/checkpoint.py @@ -141,12 +141,18 @@ def get_all_historical_functions(module_root: Path, checkpoint_dir: Path) -> dic def ask_should_use_checkpoint_get_functions(args: argparse.Namespace) -> Optional[dict[str, dict[str, str]]]: previous_checkpoint_functions = None + if getattr(args, "subagent", False): + console.rule() + return None if args.all and codeflash_temp_dir.is_dir(): previous_checkpoint_functions = get_all_historical_functions(args.module_root, codeflash_temp_dir) - if previous_checkpoint_functions and Confirm.ask( - "Previous Checkpoint detected from an incomplete optimization run, shall I continue the optimization from that point?", - default=True, - console=console, + if previous_checkpoint_functions and ( + getattr(args, "yes", False) + or Confirm.ask( + "Previous Checkpoint detected from an incomplete optimization run, shall I continue the optimization from that point?", + default=True, + console=console, + ) ): console.rule() else: diff --git a/codeflash/code_utils/code_utils.py b/codeflash/code_utils/code_utils.py index 7a9afc96f..45a64f0fc 100644 --- a/codeflash/code_utils/code_utils.py +++ b/codeflash/code_utils/code_utils.py @@ -408,9 +408,10 @@ def get_all_function_names(code: str) -> tuple[bool, list[str]]: def get_run_tmp_file(file_path: Path | str) -> Path: if isinstance(file_path, str): file_path = Path(file_path) - if not hasattr(get_run_tmp_file, "tmpdir"): + if not hasattr(get_run_tmp_file, "tmpdir_path"): get_run_tmp_file.tmpdir = TemporaryDirectory(prefix="codeflash_") - return Path(get_run_tmp_file.tmpdir.name) / file_path + get_run_tmp_file.tmpdir_path = Path(get_run_tmp_file.tmpdir.name) + return get_run_tmp_file.tmpdir_path / file_path def path_belongs_to_site_packages(file_path: Path) -> bool: diff --git a/codeflash/code_utils/config_consts.py b/codeflash/code_utils/config_consts.py index 73af5607e..5f231e15b 100644 --- a/codeflash/code_utils/config_consts.py +++ b/codeflash/code_utils/config_consts.py @@ -6,8 +6,10 @@ MAX_TEST_RUN_ITERATIONS = 5 OPTIMIZATION_CONTEXT_TOKEN_LIMIT = 64000 TESTGEN_CONTEXT_TOKEN_LIMIT = 64000 -INDIVIDUAL_TESTCASE_TIMEOUT = 15 # For Python pytest -JAVA_TESTCASE_TIMEOUT = 120 # Java Maven tests need more time due to startup overhead +READ_WRITABLE_LIMIT_ERROR = "Read-writable code has exceeded token limit, cannot proceed" +TESTGEN_LIMIT_ERROR = "Testgen code context has exceeded token limit, cannot proceed" +INDIVIDUAL_TESTCASE_TIMEOUT = 15 +JAVA_TESTCASE_TIMEOUT = 120 MAX_FUNCTION_TEST_SECONDS = 60 MIN_IMPROVEMENT_THRESHOLD = 0.05 MIN_THROUGHPUT_IMPROVEMENT_THRESHOLD = 0.10 # 10% minimum improvement for async throughput diff --git a/codeflash/code_utils/deduplicate_code.py b/codeflash/code_utils/deduplicate_code.py index a69c52ef3..c91d8f067 100644 --- a/codeflash/code_utils/deduplicate_code.py +++ b/codeflash/code_utils/deduplicate_code.py @@ -10,7 +10,7 @@ import re from codeflash.code_utils.normalizers import get_normalizer -from codeflash.languages import current_language, is_python +from codeflash.languages import current_language def normalize_code( @@ -36,20 +36,20 @@ def normalize_code( try: normalizer = get_normalizer(language) - # Python has additional options - if is_python(): - if return_ast_dump: - return normalizer.normalize_for_hash(code) + if return_ast_dump: + return normalizer.normalize_for_hash(code) + # Only Python normalizer accepts remove_docstrings; pass it via **kwargs + # so non-Python normalizers (which don't accept it) still work + try: return normalizer.normalize(code, remove_docstrings=remove_docstrings) - - # For other languages, use standard normalization - return normalizer.normalize(code) + except TypeError: + return normalizer.normalize(code) except ValueError: # Unknown language - fall back to basic normalization return _basic_normalize(code) except Exception: # Parsing error - try other languages or fall back - if is_python(): + if language == "python": # Try JavaScript as fallback try: js_normalizer = get_normalizer("javascript") diff --git a/codeflash/code_utils/instrument_existing_tests.py b/codeflash/code_utils/instrument_existing_tests.py index b2d9e4143..786ec8400 100644 --- a/codeflash/code_utils/instrument_existing_tests.py +++ b/codeflash/code_utils/instrument_existing_tests.py @@ -11,7 +11,6 @@ from codeflash.code_utils.code_utils import get_run_tmp_file, module_name_from_file_path from codeflash.code_utils.formatter import sort_imports from codeflash.discovery.functions_to_optimize import FunctionToOptimize -from codeflash.languages import is_java, is_javascript from codeflash.models.models import FunctionParent, TestingMode, VerificationType if TYPE_CHECKING: @@ -711,24 +710,6 @@ def inject_profiling_into_existing_test( mode: TestingMode = TestingMode.BEHAVIOR, ) -> tuple[bool, str | None]: tests_project_root = tests_project_root.resolve() - # Route to language-specific implementations - if is_javascript(): - from codeflash.languages.javascript.instrument import inject_profiling_into_existing_js_test - - return inject_profiling_into_existing_js_test( - test_string=test_string, - call_positions=call_positions, - function_to_optimize=function_to_optimize, - tests_project_root=tests_project_root, - mode=mode.value, - test_path=test_path, - ) - - if is_java(): - from codeflash.languages.java.instrumentation import instrument_existing_test - - return instrument_existing_test(test_path, call_positions, function_to_optimize, tests_project_root, mode.value) - if function_to_optimize.is_async: return inject_async_profiling_into_existing_test( test_string=test_string, diff --git a/codeflash/code_utils/shell_utils.py b/codeflash/code_utils/shell_utils.py index 2052f3e96..1569b51a1 100644 --- a/codeflash/code_utils/shell_utils.py +++ b/codeflash/code_utils/shell_utils.py @@ -259,6 +259,10 @@ def get_cross_platform_subprocess_run_args( capture_output: bool = True, ) -> dict[str, str]: run_args = {"cwd": cwd, "env": env, "text": text, "timeout": timeout, "check": check} + # When text=True, use errors='replace' to handle non-UTF-8 bytes gracefully + # instead of raising UnicodeDecodeError + if text: + run_args["errors"] = "replace" if sys.platform == "win32": creationflags = subprocess.CREATE_NEW_PROCESS_GROUP run_args["creationflags"] = creationflags diff --git a/codeflash/discovery/discover_unit_tests.py b/codeflash/discovery/discover_unit_tests.py index 3ca9ff1ff..fa1ebb16e 100644 --- a/codeflash/discovery/discover_unit_tests.py +++ b/codeflash/discovery/discover_unit_tests.py @@ -641,20 +641,14 @@ def discover_unit_tests( discover_only_these_tests: list[Path] | None = None, file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]] | None = None, ) -> tuple[dict[str, set[FunctionCalledInTest]], int, int]: - from codeflash.languages import is_java, is_javascript, is_python + from codeflash.languages.current import current_language_support # Detect language from functions being optimized language = _detect_language_from_functions(file_to_funcs_to_optimize) # Route to language-specific test discovery for non-Python languages - if not is_python(): - # For JavaScript/TypeScript and Java, tests_project_rootdir should be tests_root itself - # The Jest helper will be configured to NOT include "tests." prefix to match - # For Java, this ensures test file resolution works correctly in parse_test_xml - if is_javascript(): - cfg.tests_project_rootdir = cfg.tests_root - if is_java(): - cfg.tests_project_rootdir = cfg.tests_root + if current_language_support().test_result_serialization_format != "pickle": + current_language_support().adjust_test_config_for_discovery(cfg) return discover_tests_for_language(cfg, language, file_to_funcs_to_optimize) # Existing Python logic @@ -731,6 +725,10 @@ def discover_tests_pytest( logger.debug(f"Pytest collection exit code: {exitcode}") if pytest_rootdir is not None: cfg.tests_project_rootdir = Path(pytest_rootdir) + if discover_only_these_tests: + resolved_discover_only = {p.resolve() for p in discover_only_these_tests} + else: + resolved_discover_only = None file_to_test_map: dict[Path, list[FunctionCalledInTest]] = defaultdict(list) for test in tests: if "__replay_test" in test["test_file"]: @@ -740,13 +738,14 @@ def discover_tests_pytest( else: test_type = TestType.EXISTING_UNIT_TEST + test_file_path = Path(test["test_file"]).resolve() test_obj = TestsInFile( - test_file=Path(test["test_file"]), + test_file=test_file_path, test_class=test["test_class"], test_function=test["test_function"], test_type=test_type, ) - if discover_only_these_tests and test_obj.test_file not in discover_only_these_tests: + if resolved_discover_only and test_obj.test_file not in resolved_discover_only: continue file_to_test_map[test_obj.test_file].append(test_obj) # Within these test files, find the project functions they are referring to and return their names/locations diff --git a/codeflash/discovery/functions_to_optimize.py b/codeflash/discovery/functions_to_optimize.py index 56504875a..0ad6117ab 100644 --- a/codeflash/discovery/functions_to_optimize.py +++ b/codeflash/discovery/functions_to_optimize.py @@ -1,22 +1,22 @@ from __future__ import annotations import ast +import contextlib import os import random import warnings -from _ast import AsyncFunctionDef, ClassDef, FunctionDef from collections import defaultdict from functools import cache from pathlib import Path from typing import TYPE_CHECKING, Any, Optional import git -import libcst as cst from pydantic.dataclasses import dataclass +from rich.text import Text from rich.tree import Tree from codeflash.api.cfapi import get_blocklisted_functions, is_function_being_optimized_again -from codeflash.cli_cmds.console import DEBUG_MODE, console, logger +from codeflash.cli_cmds.console import console, logger from codeflash.code_utils.code_utils import ( exit_with_message, is_class_defined_in_file, @@ -38,18 +38,8 @@ if TYPE_CHECKING: from argparse import Namespace - from libcst import CSTNode - from libcst.metadata import CodeRange - from codeflash.models.models import CodeOptimizationContext from codeflash.verification.verification_utils import TestConfig -import contextlib - -from rich.text import Text - -_property_id = "property" - -_ast_name = ast.Name @dataclass(frozen=True) @@ -61,85 +51,6 @@ class FunctionProperties: staticmethod_class_name: Optional[str] -class ReturnStatementVisitor(cst.CSTVisitor): - def __init__(self) -> None: - super().__init__() - self.has_return_statement: bool = False - - def visit_Return(self, node: cst.Return) -> None: - self.has_return_statement = True - - -class FunctionVisitor(cst.CSTVisitor): - METADATA_DEPENDENCIES = (cst.metadata.PositionProvider, cst.metadata.ParentNodeProvider) - - def __init__(self, file_path: str) -> None: - super().__init__() - self.file_path: str = file_path - self.functions: list[FunctionToOptimize] = [] - - @staticmethod - def is_pytest_fixture(node: cst.FunctionDef) -> bool: - for decorator in node.decorators: - dec = decorator.decorator - if isinstance(dec, cst.Call): - dec = dec.func - if isinstance(dec, cst.Attribute) and dec.attr.value == "fixture": - if isinstance(dec.value, cst.Name) and dec.value.value == "pytest": - return True - if isinstance(dec, cst.Name) and dec.value == "fixture": - return True - return False - - def visit_FunctionDef(self, node: cst.FunctionDef) -> None: - return_visitor: ReturnStatementVisitor = ReturnStatementVisitor() - node.visit(return_visitor) - if return_visitor.has_return_statement and not self.is_pytest_fixture(node): - pos: CodeRange = self.get_metadata(cst.metadata.PositionProvider, node) - parents: CSTNode | None = self.get_metadata(cst.metadata.ParentNodeProvider, node) - ast_parents: list[FunctionParent] = [] - while parents is not None: - if isinstance(parents, (cst.FunctionDef, cst.ClassDef)): - ast_parents.append(FunctionParent(parents.name.value, parents.__class__.__name__)) - parents = self.get_metadata(cst.metadata.ParentNodeProvider, parents, default=None) - self.functions.append( - FunctionToOptimize( - function_name=node.name.value, - file_path=self.file_path, - parents=list(reversed(ast_parents)), - starting_line=pos.start.line, - ending_line=pos.end.line, - is_async=bool(node.asynchronous), - ) - ) - - -def find_functions_with_return_statement(ast_module: ast.Module, file_path: Path) -> list[FunctionToOptimize]: - results: list[FunctionToOptimize] = [] - # (node, parent_path) — iterative DFS avoids RecursionError on deeply nested ASTs - stack: list[tuple[ast.AST, list[FunctionParent]]] = [(ast_module, [])] - while stack: - node, ast_path = stack.pop() - if isinstance(node, (FunctionDef, AsyncFunctionDef)): - if function_has_return_statement(node) and not function_is_a_property(node): - results.append( - FunctionToOptimize( - function_name=node.name, - file_path=file_path, - parents=ast_path[:], - is_async=isinstance(node, AsyncFunctionDef), - ) - ) - # Don't recurse into function bodies (matches original visitor behaviour) - continue - child_path = ( - [*ast_path, FunctionParent(node.name, node.__class__.__name__)] if isinstance(node, ClassDef) else ast_path - ) - for child in reversed(list(ast.iter_child_nodes(node))): - stack.append((child, child_path)) - return results - - # ============================================================================= # Multi-language support helpers # ============================================================================= @@ -250,21 +161,26 @@ def _is_js_ts_function_exported(file_path: Path, function_name: str) -> tuple[bo return True, None -def _find_all_functions_in_python_file(file_path: Path) -> dict[Path, list[FunctionToOptimize]]: - """Find all optimizable functions in a Python file using AST parsing. +def _is_js_ts_function_exists_but_not_exported(file_path: Path, function_name: str) -> bool: + """Check if a JS/TS function exists in the file but is not exported. - This is the original Python implementation preserved for backward compatibility. + Returns True only if the function name is found as a defined function + but is_exported is False. """ - functions: dict[Path, list[FunctionToOptimize]] = {} - with file_path.open(encoding="utf8") as f: - try: - ast_module = ast.parse(f.read()) - except Exception as e: - if DEBUG_MODE: - logger.exception(e) - return functions - functions[file_path] = find_functions_with_return_statement(ast_module, file_path) - return functions + from codeflash.languages.javascript.treesitter import get_analyzer_for_file + + try: + source = file_path.read_text(encoding="utf-8") + analyzer = get_analyzer_for_file(file_path) + all_funcs = analyzer.find_functions( + source, include_methods=True, include_arrow_functions=True, require_name=True + ) + for func in all_funcs: + if func.name == function_name: + return not func.is_exported + except Exception as e: + logger.debug(f"Failed to check function existence for {function_name}: {e}") + return False def _find_all_functions_via_language_support(file_path: Path) -> dict[Path, list[FunctionToOptimize]]: @@ -280,7 +196,6 @@ def _find_all_functions_via_language_support(file_path: Path) -> dict[Path, list try: lang_support = get_language_support(file_path) criteria = FunctionFilterCriteria(require_return=True) - # discover_functions already returns FunctionToOptimize objects functions[file_path] = lang_support.discover_functions(file_path, criteria) except Exception as e: logger.debug(f"Failed to discover functions in {file_path}: {e}") @@ -302,7 +217,7 @@ def get_functions_to_optimize( assert sum([bool(optimize_all), bool(replay_test), bool(file)]) <= 1, ( "Only one of optimize_all, replay_test, or file should be provided" ) - functions: dict[str, list[FunctionToOptimize]] + functions: dict[Path, list[FunctionToOptimize]] trace_file_path: Path | None = None is_lsp = is_LSP_enabled() with warnings.catch_warnings(): @@ -319,7 +234,7 @@ def get_functions_to_optimize( logger.info("!lsp|Finding all functions in the file '%s'…", file) console.rule() file = Path(file) if isinstance(file, str) else file - functions: dict[Path, list[FunctionToOptimize]] = find_all_functions_in_file(file) + functions = find_all_functions_in_file(file) if only_get_this_function is not None: split_function = only_get_this_function.split(".") if len(split_function) > 2: @@ -342,6 +257,18 @@ def get_functions_to_optimize( if found_function is None: if is_lsp: return functions, 0, None + + # For JS/TS: check if the function exists but is not exported + if is_language_supported(file): + lang_support = get_language_support(file) + if lang_support.language in (Language.JAVASCRIPT, Language.TYPESCRIPT): + if _is_js_ts_function_exists_but_not_exported(file, only_function_name): + exit_with_message( + f"Function '{only_function_name}' exists in {file} but is not exported.\n" + f"In JavaScript/TypeScript, only exported functions can be optimized.\n" + f"Add: export {{ {only_function_name} }}" + ) + found = closest_matching_file_function_name(only_get_this_function, functions) if found is not None: file, found_function = found @@ -354,6 +281,7 @@ def get_functions_to_optimize( f"Function {only_get_this_function} not found in file {file}\nor the function does not have a 'return' statement or is a property" ) + assert found_function is not None # For JavaScript/TypeScript, verify that the function (or its parent class) is exported # Non-exported functions cannot be imported by tests if found_function.language in ("javascript", "typescript"): @@ -397,7 +325,7 @@ def get_functions_to_optimize( return filtered_modified_functions, functions_count, trace_file_path -def get_functions_within_git_diff(uncommitted_changes: bool) -> dict[str, list[FunctionToOptimize]]: +def get_functions_within_git_diff(uncommitted_changes: bool) -> dict[Path, list[FunctionToOptimize]]: modified_lines: dict[str, list[int]] = get_git_diff(uncommitted_changes=uncommitted_changes) return get_functions_within_lines(modified_lines) @@ -438,7 +366,7 @@ def closest_matching_file_function_name( closest_match = function closest_file = file_path - if closest_match is not None: + if closest_match is not None and closest_file is not None: return closest_file, closest_match return None @@ -472,39 +400,31 @@ def levenshtein_distance(s1: str, s2: str) -> int: return previous[len1] -def get_functions_inside_a_commit(commit_hash: str) -> dict[str, list[FunctionToOptimize]]: +def get_functions_inside_a_commit(commit_hash: str) -> dict[Path, list[FunctionToOptimize]]: modified_lines: dict[str, list[int]] = get_git_diff(only_this_commit=commit_hash) return get_functions_within_lines(modified_lines) -def get_functions_within_lines(modified_lines: dict[str, list[int]]) -> dict[str, list[FunctionToOptimize]]: - functions: dict[str, list[FunctionToOptimize]] = {} +def get_functions_within_lines(modified_lines: dict[str, list[int]]) -> dict[Path, list[FunctionToOptimize]]: + functions: dict[Path, list[FunctionToOptimize]] = {} for path_str, lines_in_file in modified_lines.items(): path = Path(path_str) if not path.exists(): continue - with path.open(encoding="utf8") as f: - file_content = f.read() - try: - wrapper = cst.metadata.MetadataWrapper(cst.parse_module(file_content)) - except Exception as e: - logger.exception(e) - continue - function_lines = FunctionVisitor(file_path=str(path)) - wrapper.visit(function_lines) - functions[str(path)] = [ - function_to_optimize - for function_to_optimize in function_lines.functions - if (start_line := function_to_optimize.starting_line) is not None - and (end_line := function_to_optimize.ending_line) is not None - and any(start_line <= line <= end_line for line in lines_in_file) - ] + all_functions = find_all_functions_in_file(path) + functions[path] = [ + func + for func in all_functions.get(path, []) + if func.starting_line is not None + and func.ending_line is not None + and any(func.starting_line <= line <= func.ending_line for line in lines_in_file) + ] return functions def get_all_files_and_functions( module_root_path: Path, ignore_paths: list[Path], language: Language | None = None -) -> dict[str, list[FunctionToOptimize]]: +) -> dict[Path, list[FunctionToOptimize]]: """Get all optimizable functions from files in the module root. Args: @@ -516,9 +436,8 @@ def get_all_files_and_functions( Dictionary mapping file paths to lists of FunctionToOptimize. """ - functions: dict[str, list[FunctionToOptimize]] = {} + functions: dict[Path, list[FunctionToOptimize]] = {} for file_path in get_files_for_language(module_root_path, ignore_paths, language): - # Find all the functions in the file functions.update(find_all_functions_in_file(file_path).items()) # Randomize the order of the files to optimize to avoid optimizing the same file in the same order every time. # Helpful if an optimize-all run is stuck and we restart it. @@ -528,35 +447,20 @@ def get_all_files_and_functions( def find_all_functions_in_file(file_path: Path) -> dict[Path, list[FunctionToOptimize]]: - """Find all optimizable functions in a file, routing to the appropriate language handler. - - This function checks if the file extension is supported and routes to either - the Python-specific implementation (for backward compatibility) or the - language support abstraction for other languages. - - Args: - file_path: Path to the source file. - - Returns: - Dictionary mapping file path to list of FunctionToOptimize. - - """ - # Check if the file extension is supported + """Find all optimizable functions in a file using the language support abstraction.""" if not is_language_supported(file_path): return {} - try: + from codeflash.languages.base import FunctionFilterCriteria + lang_support = get_language_support(file_path) - except Exception: + criteria = FunctionFilterCriteria(require_return=True) + source = file_path.read_text(encoding="utf-8") + return {file_path: lang_support.discover_functions(source, file_path, criteria)} + except Exception as e: + logger.debug(f"Failed to discover functions in {file_path}: {e}") return {} - # Route to Python-specific implementation for backward compatibility - if lang_support.language == Language.PYTHON: - return _find_all_functions_in_python_file(file_path) - - # Use language support abstraction for other languages - return _find_all_functions_via_language_support(file_path) - def get_all_replay_test_functions( replay_test: list[Path], test_cfg: TestConfig, project_root_path: Path @@ -833,7 +737,7 @@ def filter_functions( disable_logs: bool = False, ) -> tuple[dict[Path, list[FunctionToOptimize]], int]: resolved_project_root = project_root.resolve() - filtered_modified_functions: dict[str, list[FunctionToOptimize]] = {} + filtered_modified_functions: dict[Path, list[FunctionToOptimize]] = {} blocklist_funcs = get_blocklisted_functions() logger.debug(f"Blocklisted functions: {blocklist_funcs}") # Remove any function that we don't want to optimize @@ -940,7 +844,7 @@ def is_test_file(file_path_normalized: str) -> bool: functions_tmp.append(function) _functions = functions_tmp - filtered_modified_functions[file_path] = _functions + filtered_modified_functions[file_path_path] = _functions functions_count += len(_functions) if not disable_logs: @@ -961,7 +865,7 @@ def is_test_file(file_path_normalized: str) -> bool: if len(tree.children) > 0: console.print(tree) console.rule() - return {Path(k): v for k, v in filtered_modified_functions.items() if v}, functions_count + return {k: v for k, v in filtered_modified_functions.items() if v}, functions_count def filter_files_optimized(file_path: Path, tests_root: Path, ignore_paths: list[Path], module_root: Path) -> bool: @@ -984,31 +888,3 @@ def filter_files_optimized(file_path: Path, tests_root: Path, ignore_paths: list file_path in submodule_paths or any(file_path.is_relative_to(submodule_path) for submodule_path in submodule_paths) ) - - -def function_has_return_statement(function_node: FunctionDef | AsyncFunctionDef) -> bool: - # Custom DFS, return True as soon as a Return node is found - stack: list[ast.AST] = list(function_node.body) - while stack: - node = stack.pop() - if isinstance(node, ast.Return): - return True - # Only push child nodes that are statements; Return nodes are statements, - # so this preserves correctness while avoiding unnecessary traversal into expr/Name/etc. - for field in getattr(node, "_fields", ()): - child = getattr(node, field, None) - if isinstance(child, list): - for item in child: - if isinstance(item, ast.stmt): - stack.append(item) - elif isinstance(child, ast.stmt): - stack.append(child) - return False - - -def function_is_a_property(function_node: FunctionDef | AsyncFunctionDef) -> bool: - for node in function_node.decorator_list: # noqa: SIM110 - # Use isinstance rather than type(...) is ... for better performance with single inheritance trees like ast - if isinstance(node, _ast_name) and node.id == _property_id: - return True - return False diff --git a/codeflash/languages/__init__.py b/codeflash/languages/__init__.py index e63f19a5a..b0daea0fb 100644 --- a/codeflash/languages/__init__.py +++ b/codeflash/languages/__init__.py @@ -11,7 +11,7 @@ lang = get_language_support(Path("example.py")) # Discover functions - functions = lang.discover_functions(file_path) + functions = lang.discover_functions(source, file_path) # Replace a function new_source = lang.replace_function(file_path, function, new_code) @@ -38,6 +38,9 @@ reset_current_language, set_current_language, ) + +# Language support modules are imported lazily to avoid circular imports +# They get registered when first accessed via get_language_support() from codeflash.languages.registry import ( detect_project_language, get_language_support, @@ -72,20 +75,19 @@ def __getattr__(name: str): from codeflash.languages.javascript.support import TypeScriptSupport return TypeScriptSupport - if name == "JavaSupport": - from codeflash.languages.java.support import JavaSupport - - return JavaSupport if name == "PythonSupport": from codeflash.languages.python.support import PythonSupport return PythonSupport + if name == "JavaSupport": + from codeflash.languages.java.support import JavaSupport + + return JavaSupport msg = f"module {__name__!r} has no attribute {name!r}" raise AttributeError(msg) __all__ = [ - # Base types "CodeContext", "DependencyResolver", "FunctionInfo", @@ -96,7 +98,6 @@ def __getattr__(name: str): "ParentInfo", "TestInfo", "TestResult", - # Current language singleton "current_language", "current_language_support", "current_test_framework", diff --git a/codeflash/languages/base.py b/codeflash/languages/base.py index 60aa064b2..be569e5ec 100644 --- a/codeflash/languages/base.py +++ b/codeflash/languages/base.py @@ -13,11 +13,13 @@ from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable if TYPE_CHECKING: + import ast from collections.abc import Callable, Iterable, Sequence from pathlib import Path from codeflash.discovery.functions_to_optimize import FunctionToOptimize - from codeflash.models.models import FunctionSource, GeneratedTestsList, InvocationId + from codeflash.models.models import FunctionSource, GeneratedTestsList, InvocationId, ValidCode + from codeflash.verification.verification_utils import TestConfig from codeflash.languages.language_enum import Language from codeflash.models.function_types import FunctionParent @@ -93,9 +95,9 @@ class CodeContext: target_file: Path helper_functions: list[HelperFunction] = field(default_factory=list) read_only_context: str = "" + imported_type_skeletons: str = "" imports: list[str] = field(default_factory=list) language: Language = Language.PYTHON - imported_type_skeletons: str = "" @dataclass @@ -169,6 +171,7 @@ class FunctionFilterCriteria: include_patterns: list[str] = field(default_factory=list) exclude_patterns: list[str] = field(default_factory=list) require_return: bool = True + require_export: bool = True include_async: bool = True include_methods: bool = True min_lines: int | None = None @@ -271,7 +274,7 @@ class PythonSupport(LanguageSupport): def language(self) -> Language: return Language.PYTHON - def discover_functions(self, file_path: Path, ...) -> list[FunctionInfo]: + def discover_functions(self, source: str, file_path: Path, ...) -> list[FunctionInfo]: # Python-specific implementation using LibCST ... @@ -322,15 +325,55 @@ def dir_excludes(self) -> frozenset[str]: """ ... + @property + def default_language_version(self) -> str | None: + """Default language version string sent to AI service. + + Returns None for languages where the runtime version is auto-detected (e.g. Python). + Returns a version string (e.g. "ES2022") for languages that need an explicit default. + """ + return None + + @property + def valid_test_frameworks(self) -> tuple[str, ...]: + """Valid test frameworks for this language.""" + ... + + @property + def test_result_serialization_format(self) -> str: + """How test return values are serialized: "pickle" or "json".""" + return "pickle" + + def parse_test_xml( + self, test_xml_file_path: Path, test_files: Any, test_config: Any, run_result: Any = None + ) -> Any: + """Parse JUnit XML test results with language-specific timing markers.""" + ... + + def load_coverage( + self, + coverage_database_file: Path, + function_name: str, + code_context: Any, + source_file: Path, + coverage_config_file: Path | None = None, + ) -> Any: + """Load coverage data from language-specific format. + + Returns a CoverageData instance. + """ + ... + # === Discovery === def discover_functions( - self, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None + self, source: str, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None ) -> list[FunctionToOptimize]: - """Find all optimizable functions in a file. + """Find all optimizable functions in source code. Args: - file_path: Path to the source file to analyze. + source: Source code to analyze. + file_path: Path to the source file (used for context and language detection). filter_criteria: Optional criteria to filter functions. Returns: @@ -657,6 +700,45 @@ def compare_test_results( """ ... + @property + def function_optimizer_class(self) -> type: + """Return the FunctionOptimizer subclass for this language.""" + from codeflash.optimization.function_optimizer import FunctionOptimizer + + return FunctionOptimizer + + def prepare_module( + self, module_code: str, module_path: Path, project_root: Path + ) -> tuple[dict[Path, ValidCode], ast.Module | None] | None: + """Parse/validate a module before optimization.""" + ... + + def setup_test_config(self, test_cfg: TestConfig, file_path: Path) -> None: + """One-time project setup after language detection. Default: no-op.""" + + def adjust_test_config_for_discovery(self, test_cfg: TestConfig) -> None: + """Adjust test config before test discovery. Default: no-op.""" + + def detect_module_system(self, project_root: Path, source_file: Path) -> str | None: + """Detect the module system used by the project. Default: None (not applicable).""" + return None + + def process_generated_test_strings( + self, + generated_test_source: str, + instrumented_behavior_test_source: str, + instrumented_perf_test_source: str, + function_to_optimize: FunctionToOptimize, + test_path: Path, + test_cfg: Any, + project_module_system: str | None, + ) -> tuple[str, str, str]: + """Process raw generated test strings (instrumentation, placeholder replacement, etc.). + + Returns (generated_test_source, instrumented_behavior_source, instrumented_perf_source). + """ + ... + # === Configuration === def get_test_file_suffix(self) -> str: @@ -668,6 +750,58 @@ def get_test_file_suffix(self) -> str: """ ... + def get_test_dir_for_source(self, test_dir: Path, source_file: Path | None) -> Path | None: + """Find the appropriate test directory for a source file. + + For monorepos (JS), this finds the package's test directory from the source file path. + Default implementation returns None (no special directory resolution needed). + + Args: + test_dir: The root tests directory. + source_file: Path to the source file being tested. + + Returns: + The test directory path, or None if no special handling is needed. + + """ + return None + + def resolve_test_file_from_class_path(self, test_class_path: str, base_dir: Path) -> Path | None: + """Resolve a test file path from a class path string. + + Languages with non-Python module systems (e.g., Java package names like + "com.example.TestClass") override this to provide custom resolution. + Default: returns None (fall through to shared Python/file-path logic). + + Args: + test_class_path: The class path string from JUnit XML (e.g., "com.example.TestClass"). + base_dir: The base directory for tests. + + Returns: + Path to the test file if found, None to fall through to default logic. + + """ + return None + + def resolve_test_module_path_for_pr( + self, test_module_path: str, tests_project_rootdir: Path, non_generated_tests: set[Path] + ) -> Path | None: + """Resolve test module path to an absolute file path for PR creation. + + Languages with non-Python module naming (e.g., Java class names) + override this. Default: returns None (fall through to shared logic). + + Args: + test_module_path: The test module path string. + tests_project_rootdir: The tests project root directory. + non_generated_tests: Set of known non-generated test file paths. + + Returns: + Resolved absolute path, or None to fall through to default logic. + + """ + return None + def find_test_root(self, project_root: Path) -> Path | None: """Find the test root directory for a project. @@ -730,11 +864,11 @@ def instrument_existing_test( Args: test_string: String containing the test file contents. - test_path: Path to the test file. call_positions: List of code positions where the function is called. function_to_optimize: The function being optimized. tests_project_root: Root directory of tests. mode: Testing mode - "behavior" or "performance". + test_path: Path to the test file. Returns: Tuple of (success, instrumented_code). @@ -754,6 +888,20 @@ def parse_line_profile_results(self, line_profiler_output_file: Path) -> dict: # === Test Execution === + def generate_concolic_tests( + self, + test_cfg: TestConfig, + project_root: Path, + function_to_optimize: FunctionToOptimize, + function_to_optimize_ast: Any, + ) -> tuple[dict, str]: + """Generate concolic tests for a function. + + Default implementation returns empty results. Override for languages + that support concolic testing (e.g. Python via CrossHair). + """ + return {}, "" + def run_behavioral_tests( self, test_paths: Any, @@ -812,6 +960,31 @@ def run_benchmarking_tests( """ ... + def run_line_profile_tests( + self, + test_paths: Any, + test_env: dict[str, str], + cwd: Path, + timeout: int | None = None, + project_root: Path | None = None, + line_profile_output_file: Path | None = None, + ) -> tuple[Path, Any]: + """Run tests for line profiling. + + Args: + test_paths: TestFiles object containing test file information. + test_env: Environment variables for the test run. + cwd: Working directory for running tests. + timeout: Optional timeout in seconds. + project_root: Project root directory. + line_profile_output_file: Path where line profile results will be written. + + Returns: + Tuple of (result_file_path, subprocess_result). + + """ + ... + def convert_parents_to_tuple(parents: list | tuple) -> tuple[FunctionParent, ...]: """Convert a list of parent objects to a tuple of FunctionParent. diff --git a/codeflash/languages/code_replacer.py b/codeflash/languages/code_replacer.py new file mode 100644 index 000000000..5cd5fa476 --- /dev/null +++ b/codeflash/languages/code_replacer.py @@ -0,0 +1,175 @@ +"""Language-agnostic code replacement utilities. + +Used by non-Python language optimizers to replace function definitions +via the LanguageSupport protocol. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from codeflash.cli_cmds.console import logger +from codeflash.languages.base import FunctionFilterCriteria, Language + +if TYPE_CHECKING: + from pathlib import Path + + from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.languages.base import LanguageSupport + from codeflash.models.models import CodeStringsMarkdown + +# Permissive criteria for discovering functions in code snippets (no export/return filtering) +_SOURCE_CRITERIA = FunctionFilterCriteria(require_return=False, require_export=False) + + +def get_optimized_code_for_module(relative_path: Path, optimized_code: CodeStringsMarkdown) -> str: + from codeflash.languages.current import is_python + + file_to_code_context = optimized_code.file_to_path() + relative_path_str = str(relative_path) + module_optimized_code = file_to_code_context.get(relative_path_str) + if module_optimized_code is None: + # Fallback: if there's only one code block with None file path, + # use it regardless of the expected path (the AI server doesn't always include file paths) + if "None" in file_to_code_context and len(file_to_code_context) == 1: + module_optimized_code = file_to_code_context["None"] + logger.debug(f"Using code block with None file_path for {relative_path}") + else: + # Fallback: try to match by just the filename (for Java/JS where the AI + # might return just the class name like "Algorithms.java" instead of + # the full path like "src/main/java/com/example/Algorithms.java") + target_filename = relative_path.name + for file_path_str, code in file_to_code_context.items(): + if file_path_str: + if file_path_str.endswith(target_filename) and ( + len(file_path_str) == len(target_filename) + or file_path_str[-len(target_filename) - 1] in ("/", "\\") + ): + module_optimized_code = code + logger.debug(f"Matched {file_path_str} to {relative_path} by filename") + break + + if module_optimized_code is None: + # Also try matching if there's only one code file, but ONLY for non-Python + # languages where path matching is less strict. + if len(file_to_code_context) == 1 and not is_python(): + only_key = next(iter(file_to_code_context.keys())) + module_optimized_code = file_to_code_context[only_key] + logger.debug(f"Using only code block {only_key} for {relative_path}") + else: + if logger.isEnabledFor(logger.level): + logger.warning( + f"Optimized code not found for {relative_path} In the context\n-------\n{optimized_code}\n-------\n" + "re-check your 'markdown code structure'" + f"existing files are {file_to_code_context.keys()}" + ) + module_optimized_code = "" + return module_optimized_code + + +def replace_function_definitions_for_language( + function_names: list[str], + optimized_code: CodeStringsMarkdown, + module_abspath: Path, + project_root_path: Path, + lang_support: LanguageSupport, + function_to_optimize: FunctionToOptimize | None = None, +) -> bool: + """Replace function definitions using the LanguageSupport protocol. + + Works for any language that implements LanguageSupport.replace_function + and LanguageSupport.discover_functions. + """ + original_source_code: str = module_abspath.read_text(encoding="utf8") + code_to_apply = get_optimized_code_for_module(module_abspath.relative_to(project_root_path), optimized_code) + + if not code_to_apply.strip(): + return False + + original_source_code = lang_support.add_global_declarations( + optimized_code=code_to_apply, original_source=original_source_code, module_abspath=module_abspath + ) + + language = lang_support.language + + if ( + function_to_optimize + and function_to_optimize.starting_line + and function_to_optimize.ending_line + and function_to_optimize.file_path == module_abspath + ): + # For Java, we need to pass the full optimized code so replace_function can + # extract and add any new class members (static fields, helper methods). + # For other languages, we extract just the target function. + if language == Language.JAVA: + new_code = lang_support.replace_function(original_source_code, function_to_optimize, code_to_apply) + else: + optimized_func = _extract_function_from_code( + lang_support, code_to_apply, function_to_optimize.function_name, module_abspath + ) + if optimized_func: + new_code = lang_support.replace_function(original_source_code, function_to_optimize, optimized_func) + else: + new_code = lang_support.replace_function(original_source_code, function_to_optimize, code_to_apply) + else: + new_code = original_source_code + modified = False + + functions_to_replace = list(function_names) + + for func_name in functions_to_replace: + current_functions = lang_support.discover_functions(new_code, module_abspath, _SOURCE_CRITERIA) + + func = None + for f in current_functions: + if func_name in (f.qualified_name, f.function_name): + func = f + break + + if func is None: + continue + + # For Java, pass the full optimized code to handle class member insertion. + # For other languages, extract just the target function. + if language == Language.JAVA: + new_code = lang_support.replace_function(new_code, func, code_to_apply) + modified = True + else: + optimized_func = _extract_function_from_code( + lang_support, code_to_apply, func.function_name, module_abspath + ) + if optimized_func: + new_code = lang_support.replace_function(new_code, func, optimized_func) + modified = True + + if not modified: + logger.warning(f"Could not find function {function_names} in {module_abspath}") + return False + + if original_source_code.strip() == new_code.strip(): + return False + + module_abspath.write_text(new_code, encoding="utf8") + return True + + +def _extract_function_from_code( + lang_support: LanguageSupport, source_code: str, function_name: str, file_path: Path +) -> str | None: + """Extract a specific function's source code from a code string. + + Includes JSDoc/docstring comments if present. + """ + try: + functions = lang_support.discover_functions(source_code, file_path, _SOURCE_CRITERIA) + for func in functions: + if func.function_name == function_name: + lines = source_code.splitlines(keepends=True) + effective_start = func.doc_start_line or func.starting_line + if effective_start and func.ending_line and effective_start <= len(lines): + func_lines = lines[effective_start - 1 : func.ending_line] + return "".join(func_lines) + except Exception as e: + logger.debug(f"Error extracting function {function_name}: {e}") + + return None diff --git a/codeflash/languages/java/function_optimizer.py b/codeflash/languages/java/function_optimizer.py new file mode 100644 index 000000000..21b498695 --- /dev/null +++ b/codeflash/languages/java/function_optimizer.py @@ -0,0 +1,399 @@ +from __future__ import annotations + +import hashlib +import re +from collections import defaultdict +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from codeflash.cli_cmds.console import logger +from codeflash.code_utils.code_utils import encoded_tokens_len, get_run_tmp_file +from codeflash.code_utils.config_consts import ( + OPTIMIZATION_CONTEXT_TOKEN_LIMIT, + READ_WRITABLE_LIMIT_ERROR, + TESTGEN_CONTEXT_TOKEN_LIMIT, + TESTGEN_LIMIT_ERROR, + TOTAL_LOOPING_TIME_EFFECTIVE, +) +from codeflash.either import Failure, Success +from codeflash.models.models import ( + CodeOptimizationContext, + CodeString, + CodeStringsMarkdown, + FunctionSource, + TestingMode, + TestResults, +) +from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.verification.equivalence import compare_test_results + +if TYPE_CHECKING: + from codeflash.either import Result + from codeflash.languages.base import CodeContext, HelperFunction + from codeflash.models.models import CoverageData, GeneratedTestsList, OriginalCodeBaseline, TestDiff + + +class JavaFunctionOptimizer(FunctionOptimizer): + def get_code_optimization_context(self) -> Result[CodeOptimizationContext, str]: + from codeflash.languages import get_language_support + from codeflash.languages.base import Language + + language = Language(self.function_to_optimize.language) + lang_support = get_language_support(language) + + try: + code_context = lang_support.extract_code_context( + self.function_to_optimize, self.project_root, self.project_root + ) + return Success( + self._build_optimization_context( + code_context, + self.function_to_optimize.file_path, + self.function_to_optimize.language, + self.project_root, + ) + ) + except ValueError as e: + return Failure(str(e)) + + @staticmethod + def _build_optimization_context( + code_context: CodeContext, + file_path: Path, + language: str, + project_root: Path, + optim_token_limit: int = OPTIMIZATION_CONTEXT_TOKEN_LIMIT, + testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT, + ) -> CodeOptimizationContext: + imports_code = "\n".join(code_context.imports) if code_context.imports else "" + + try: + target_relative_path = file_path.resolve().relative_to(project_root.resolve()) + except ValueError: + target_relative_path = file_path + + helpers_by_file: dict[Path, list[HelperFunction]] = defaultdict(list) + helper_function_sources = [] + + for helper in code_context.helper_functions: + helpers_by_file[helper.file_path].append(helper) + helper_function_sources.append( + FunctionSource( + file_path=helper.file_path, + qualified_name=helper.qualified_name, + fully_qualified_name=helper.qualified_name, + only_function_name=helper.name, + source_code=helper.source_code, + ) + ) + + target_file_code = code_context.target_code + same_file_helpers = helpers_by_file.get(file_path, []) + if same_file_helpers: + helper_code = "\n\n".join(h.source_code for h in same_file_helpers) + target_file_code = target_file_code + "\n\n" + helper_code + + if imports_code: + target_file_code = imports_code + "\n\n" + target_file_code + + read_writable_code_strings = [ + CodeString(code=target_file_code, file_path=target_relative_path, language=language) + ] + + for helper_file_path, file_helpers in helpers_by_file.items(): + if helper_file_path == file_path: + continue + try: + helper_relative_path = helper_file_path.resolve().relative_to(project_root.resolve()) + except ValueError: + helper_relative_path = helper_file_path + combined_helper_code = "\n\n".join(h.source_code for h in file_helpers) + read_writable_code_strings.append( + CodeString(code=combined_helper_code, file_path=helper_relative_path, language=language) + ) + + read_writable_code = CodeStringsMarkdown(code_strings=read_writable_code_strings, language=language) + + testgen_code_strings = read_writable_code_strings.copy() + if code_context.imported_type_skeletons: + testgen_code_strings.append( + CodeString(code=code_context.imported_type_skeletons, file_path=None, language=language) + ) + testgen_context = CodeStringsMarkdown(code_strings=testgen_code_strings, language=language) + + read_writable_tokens = encoded_tokens_len(read_writable_code.markdown) + if read_writable_tokens > optim_token_limit: + raise ValueError(READ_WRITABLE_LIMIT_ERROR) + + testgen_tokens = encoded_tokens_len(testgen_context.markdown) + if testgen_tokens > testgen_token_limit: + raise ValueError(TESTGEN_LIMIT_ERROR) + + code_hash = hashlib.sha256(read_writable_code.flat.encode("utf-8")).hexdigest() + + return CodeOptimizationContext( + testgen_context=testgen_context, + read_writable_code=read_writable_code, + read_only_context_code=code_context.read_only_context, + hashing_code_context=read_writable_code.flat, + hashing_code_context_hash=code_hash, + helper_functions=helper_function_sources, + testgen_helper_fqns=[fs.fully_qualified_name for fs in helper_function_sources], + preexisting_objects=set(), + ) + + def _get_java_sources_root(self) -> Path: + """Get the Java sources root directory for test files. + + For Java projects, tests_root might include the package path + (e.g., test/src/com/aerospike/test). We need to find the base directory + that should contain the package directories, not the tests_root itself. + + This method looks for standard Java package prefixes (com, org, net, io, edu, gov) + in the tests_root path and returns everything before that prefix. + + Returns: + Path to the Java sources root directory. + + """ + tests_root = self.test_cfg.tests_root + parts = tests_root.parts + + if tests_root.name == "src": + return tests_root + + if len(parts) >= 3 and parts[-3:] == ("src", "test", "java"): + return tests_root + + src_subdir = tests_root / "src" + if src_subdir.exists() and src_subdir.is_dir(): + return src_subdir + + maven_test_dir = tests_root / "src" / "test" / "java" + if maven_test_dir.exists() and maven_test_dir.is_dir(): + return maven_test_dir + + standard_package_prefixes = ("com", "org", "net", "io", "edu", "gov") + for i, part in enumerate(parts): + if part in standard_package_prefixes: + if i > 0: + return Path(*parts[:i]) + + for i, part in enumerate(parts): + if part == "java" and i > 0: + return Path(*parts[: i + 1]) + + return tests_root + + def _fix_java_test_paths( + self, behavior_source: str, perf_source: str, used_paths: set[Path] + ) -> tuple[Path, Path, str, str]: + """Fix Java test file paths to match package structure. + + Java requires test files to be in directories matching their package. + This method extracts the package and class from the generated tests + and returns correct paths. If the path would conflict with an already + used path, it renames the class by adding an index suffix. + + Args: + behavior_source: Source code of the behavior test. + perf_source: Source code of the performance test. + used_paths: Set of already used behavior file paths. + + Returns: + Tuple of (behavior_path, perf_path, modified_behavior_source, modified_perf_source) + with correct package structure and unique class names. + + """ + package_match = re.search(r"^\s*package\s+([\w.]+)\s*;", behavior_source, re.MULTILINE) + package_name = package_match.group(1) if package_match else "" + + # JPMS: If a test module-info.java exists, remap the package to the + # test module namespace to avoid split-package errors. + test_dir = self._get_java_sources_root() + test_module_info = test_dir / "module-info.java" + if package_name and test_module_info.exists(): + mi_content = test_module_info.read_text(encoding="utf-8") + mi_match = re.search(r"module\s+([\w.]+)", mi_content) + if mi_match: + test_module_name = mi_match.group(1) + main_dir = test_dir.parent.parent / "main" / "java" + main_module_info = main_dir / "module-info.java" + if main_module_info.exists(): + main_content = main_module_info.read_text(encoding="utf-8") + main_match = re.search(r"module\s+([\w.]+)", main_content) + if main_match: + main_module_name = main_match.group(1) + if package_name.startswith(main_module_name): + suffix = package_name[len(main_module_name) :] + new_package = test_module_name + suffix + old_decl = f"package {package_name};" + new_decl = f"package {new_package};" + behavior_source = behavior_source.replace(old_decl, new_decl, 1) + perf_source = perf_source.replace(old_decl, new_decl, 1) + package_name = new_package + logger.debug(f"[JPMS] Remapped package: {old_decl} -> {new_decl}") + + class_match = re.search(r"^(?:public\s+)?class\s+(\w+)", behavior_source, re.MULTILINE) + behavior_class = class_match.group(1) if class_match else "GeneratedTest" + + perf_class_match = re.search(r"^(?:public\s+)?class\s+(\w+)", perf_source, re.MULTILINE) + perf_class = perf_class_match.group(1) if perf_class_match else "GeneratedPerfTest" + + test_dir = self._get_java_sources_root() + + if package_name: + package_path = package_name.replace(".", "/") + behavior_path = test_dir / package_path / f"{behavior_class}.java" + perf_path = test_dir / package_path / f"{perf_class}.java" + else: + package_path = "" + behavior_path = test_dir / f"{behavior_class}.java" + perf_path = test_dir / f"{perf_class}.java" + + modified_behavior_source = behavior_source + modified_perf_source = perf_source + if behavior_path in used_paths: + index = 2 + while True: + new_behavior_class = f"{behavior_class}_{index}" + new_perf_class = f"{perf_class}_{index}" + if package_path: + new_behavior_path = test_dir / package_path / f"{new_behavior_class}.java" + new_perf_path = test_dir / package_path / f"{new_perf_class}.java" + else: + new_behavior_path = test_dir / f"{new_behavior_class}.java" + new_perf_path = test_dir / f"{new_perf_class}.java" + if new_behavior_path not in used_paths: + behavior_path = new_behavior_path + perf_path = new_perf_path + modified_behavior_source = re.sub( + rf"^((?:public\s+)?class\s+){re.escape(behavior_class)}(\b)", + rf"\g<1>{new_behavior_class}\g<2>", + behavior_source, + count=1, + flags=re.MULTILINE, + ) + modified_perf_source = re.sub( + rf"^((?:public\s+)?class\s+){re.escape(perf_class)}(\b)", + rf"\g<1>{new_perf_class}\g<2>", + perf_source, + count=1, + flags=re.MULTILINE, + ) + logger.debug(f"[JAVA] Renamed duplicate test class from {behavior_class} to {new_behavior_class}") + break + index += 1 + + behavior_path.parent.mkdir(parents=True, exist_ok=True) + perf_path.parent.mkdir(parents=True, exist_ok=True) + + logger.debug(f"[JAVA] Fixed paths: behavior={behavior_path}, perf={perf_path}") + return behavior_path, perf_path, modified_behavior_source, modified_perf_source + + def fixup_generated_tests(self, generated_tests: GeneratedTestsList) -> GeneratedTestsList: + from codeflash.models.models import GeneratedTests, GeneratedTestsList + + used_paths: set[Path] = set() + fixed_tests: list[GeneratedTests] = [] + for test in generated_tests.generated_tests: + behavior_path, perf_path, behavior_source, perf_source = self._fix_java_test_paths( + test.instrumented_behavior_test_source, test.instrumented_perf_test_source, used_paths + ) + used_paths.add(behavior_path) + fixed_tests.append( + GeneratedTests( + generated_original_test_source=test.generated_original_test_source, + instrumented_behavior_test_source=behavior_source, + instrumented_perf_test_source=perf_source, + behavior_file_path=behavior_path, + perf_file_path=perf_path, + ) + ) + return GeneratedTestsList(generated_tests=fixed_tests) + + def compare_candidate_results( + self, + baseline_results: OriginalCodeBaseline, + candidate_behavior_results: TestResults, + optimization_candidate_index: int, + ) -> tuple[bool, list[TestDiff]]: + original_sqlite = get_run_tmp_file(Path("test_return_values_0.sqlite")) + candidate_sqlite = get_run_tmp_file(Path(f"test_return_values_{optimization_candidate_index}.sqlite")) + + if original_sqlite.exists() and candidate_sqlite.exists(): + match, diffs = self.language_support.compare_test_results( + original_sqlite, candidate_sqlite, project_root=self.project_root + ) + candidate_sqlite.unlink(missing_ok=True) + else: + match, diffs = compare_test_results(baseline_results.behavior_test_results, candidate_behavior_results) + return match, diffs + + def should_skip_sqlite_cleanup(self, testing_type: TestingMode, optimization_iteration: int) -> bool: + return testing_type == TestingMode.BEHAVIOR or optimization_iteration == 0 + + def parse_line_profile_test_results( + self, line_profiler_output_file: Path | None + ) -> tuple[TestResults | dict[str, Any], CoverageData | None]: + if line_profiler_output_file is None or not line_profiler_output_file.exists(): + return TestResults(test_results=[]), None + if hasattr(self.language_support, "parse_line_profile_results"): + return self.language_support.parse_line_profile_results(line_profiler_output_file), None + return TestResults(test_results=[]), None + + def line_profiler_step( + self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], candidate_index: int + ) -> dict[str, Any]: + if not hasattr(self.language_support, "instrument_source_for_line_profiler"): + logger.warning(f"Language support for {self.language_support.language} doesn't support line profiling") + return {"timings": {}, "unit": 0, "str_out": ""} + + original_source = self.function_to_optimize.file_path.read_text(encoding="utf-8") + try: + line_profiler_output_path = get_run_tmp_file(Path("line_profiler_output.json")) + + success = self.language_support.instrument_source_for_line_profiler( + func_info=self.function_to_optimize, line_profiler_output_file=line_profiler_output_path + ) + if not success: + return {"timings": {}, "unit": 0, "str_out": ""} + + test_env = self.get_test_env( + codeflash_loop_index=0, codeflash_test_iteration=candidate_index, codeflash_tracer_disable=1 + ) + + _test_results, _ = self.run_and_parse_tests( + testing_type=TestingMode.LINE_PROFILE, + test_env=test_env, + test_files=self.test_files, + optimization_iteration=0, + testing_time=TOTAL_LOOPING_TIME_EFFECTIVE, + enable_coverage=False, + code_context=code_context, + line_profiler_output_file=line_profiler_output_path, + ) + + return self.language_support.parse_line_profile_results(line_profiler_output_path) + except Exception as e: + logger.warning(f"Failed to run line profiling: {e}") + return {"timings": {}, "unit": 0, "str_out": ""} + finally: + self.function_to_optimize.file_path.write_text(original_source, encoding="utf-8") + + def replace_function_and_helpers_with_optimized_code( + self, + code_context: CodeOptimizationContext, + optimized_code: CodeStringsMarkdown, + original_helper_code: dict[Path, str], + ) -> bool: + did_update = False + for module_abspath, qualified_names in self.group_functions_by_file(code_context).items(): + did_update |= self.language_support.replace_function_definitions( + function_names=list(qualified_names), + optimized_code=optimized_code, + module_abspath=module_abspath, + project_root_path=self.project_root, + function_to_optimize=self.function_to_optimize, + ) + return did_update diff --git a/codeflash/languages/java/parse.py b/codeflash/languages/java/parse.py new file mode 100644 index 000000000..84cee9e25 --- /dev/null +++ b/codeflash/languages/java/parse.py @@ -0,0 +1,236 @@ +"""Java-specific JUnit XML parsing with 5-field compact timing markers. + +Java uses compact 5-field markers: + Start: !$######module:class.test:func:loop_index:iteration_id######$! + End: !######module:class.test:func:loop_index:iteration_id:runtime######! + +Maven/Surefire may not capture per-test stdout in JUnit XML system-out, +so we also support fallback to subprocess stdout. +""" + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from junitparser.xunit2 import JUnitXml + +from codeflash.cli_cmds.console import console, logger +from codeflash.code_utils.code_utils import module_name_from_file_path +from codeflash.models.models import FunctionTestInvocation, InvocationId, TestResults + +if TYPE_CHECKING: + import subprocess + from pathlib import Path + + from codeflash.models.models import TestFiles + from codeflash.verification.verification_utils import TestConfig + +start_pattern = re.compile(r"!\$######([^:]*):([^:]*):([^:]*):([^:]*):([^:]+)######\$!") +end_pattern = re.compile(r"!######([^:]*):([^:]*):([^:]*):([^:]*):([^:]+):([^:]+)######!") + + +def _parse_func(file_path: Path): + from lxml.etree import XMLParser, parse + + xml_parser = XMLParser(huge_tree=True) + return parse(file_path, xml_parser) + + +def parse_java_test_xml( + test_xml_file_path: Path, + test_files: TestFiles, + test_config: TestConfig, + run_result: subprocess.CompletedProcess | None = None, +) -> TestResults: + from codeflash.verification.parse_test_output import resolve_test_file_from_class_path + + test_results = TestResults() + if not test_xml_file_path.exists(): + logger.warning(f"No test results for {test_xml_file_path} found.") + console.rule() + return test_results + try: + xml = JUnitXml.fromfile(str(test_xml_file_path), parse_func=_parse_func) + except Exception as e: + logger.warning(f"Failed to parse {test_xml_file_path} as JUnitXml. Exception: {e}") + return test_results + base_dir = test_config.tests_project_rootdir + + # Pre-parse fallback stdout once (not per testcase) to avoid O(n^2) complexity + # Maven/Surefire doesn't always capture per-test stdout in JUnit XML system-out + java_fallback_stdout = None + java_fallback_begin_matches = None + java_fallback_end_matches = None + if run_result is not None: + try: + fallback_stdout = run_result.stdout if isinstance(run_result.stdout, str) else run_result.stdout.decode() + _begin = list(start_pattern.finditer(fallback_stdout)) + if _begin: + java_fallback_stdout = fallback_stdout + java_fallback_begin_matches = _begin + java_fallback_end_matches = {} + for _m in end_pattern.finditer(fallback_stdout): + java_fallback_end_matches[_m.groups()[:5]] = _m + except Exception: + pass + + for suite in xml: + for testcase in suite: + class_name = testcase.classname + test_file_name = suite._elem.attrib.get("file") # noqa: SLF001 + + test_class_path = testcase.classname + try: + if testcase.name is None: + logger.debug( + f"testcase.name is None for testcase {testcase!r} in file {test_xml_file_path}, skipping" + ) + continue + test_function = testcase.name.split("[", 1)[0] if "[" in testcase.name else testcase.name + except (AttributeError, TypeError) as e: + msg = ( + f"Accessing testcase.name in parse_test_xml for testcase {testcase!r} in file" + f" {test_xml_file_path} has exception: {e}" + ) + logger.exception(msg) + continue + if test_file_name is None: + if test_class_path: + test_file_path = resolve_test_file_from_class_path(test_class_path, base_dir) + if test_file_path is None: + logger.warning(f"Could not find the test for file name - {test_class_path} ") + continue + else: + from codeflash.code_utils.code_utils import file_path_from_module_name + + test_file_path = file_path_from_module_name(test_function, base_dir) + else: + test_file_path = base_dir / test_file_name + assert test_file_path, f"Test file path not found for {test_file_name}" + + if not test_file_path.exists(): + logger.warning(f"Could not find the test for file name - {test_file_path} ") + continue + test_type = test_files.get_test_type_by_instrumented_file_path(test_file_path) + if test_type is None: + test_type = test_files.get_test_type_by_original_file_path(test_file_path) + if test_type is None: + registered_paths = [str(tf.instrumented_behavior_file_path) for tf in test_files.test_files] + logger.warning( + f"Test type not found for '{test_file_path}'. " + f"Registered test files: {registered_paths}. Skipping test case." + ) + continue + test_module_path = module_name_from_file_path(test_file_path, test_config.tests_project_rootdir) + result = testcase.is_passed + test_class = None + if class_name is not None and class_name.startswith(test_module_path): + test_class = class_name[len(test_module_path) + 1 :] + + loop_index = int(testcase.name.split("[ ")[-1][:-2]) if testcase.name and "[" in testcase.name else 1 + + timed_out = False + if len(testcase.result) > 1: + logger.debug(f"!!!!!Multiple results for {testcase.name or ''} in {test_xml_file_path}!!!") + if len(testcase.result) == 1: + message = testcase.result[0].message.lower() + if "failed: timeout >" in message or "timed out" in message: + timed_out = True + + sys_stdout = testcase.system_out or "" + + begin_matches = list(start_pattern.finditer(sys_stdout)) + end_matches: dict[tuple, re.Match] = {} + for match in end_pattern.finditer(sys_stdout): + end_matches[match.groups()[:5]] = match + + # Fallback to subprocess stdout when JUnit XML system-out has no markers + if not begin_matches and java_fallback_begin_matches is not None: + assert java_fallback_stdout is not None + assert java_fallback_end_matches is not None + sys_stdout = java_fallback_stdout + begin_matches = java_fallback_begin_matches + end_matches = java_fallback_end_matches + + if not begin_matches: + test_results.add( + FunctionTestInvocation( + loop_index=loop_index, + id=InvocationId( + test_module_path=test_module_path, + test_class_name=test_class, + test_function_name=test_function, + function_getting_tested="", + iteration_id="", + ), + file_name=test_file_path, + runtime=None, + test_framework=test_config.test_framework, + did_pass=result, + test_type=test_type, + return_value=None, + timed_out=timed_out, + stdout="", + ) + ) + else: + for match_index, match in enumerate(begin_matches): + groups = match.groups() + runtime = None + + end_key = groups[:5] + end_match = end_matches.get(end_key) + iteration_id = groups[4] + loop_idx = int(groups[3]) + test_module = groups[0] + class_test_field = groups[1] + if "." in class_test_field: + test_class_str, test_func = class_test_field.rsplit(".", 1) + else: + test_class_str = class_test_field + test_func = test_function + func_getting_tested = groups[2] + + if end_match: + stdout = sys_stdout[match.end() : end_match.start()] + runtime = int(end_match.groups()[5]) + elif match_index == len(begin_matches) - 1: + stdout = sys_stdout[match.end() :] + else: + stdout = sys_stdout[match.end() : begin_matches[match_index + 1].start()] + + test_results.add( + FunctionTestInvocation( + loop_index=loop_idx, + id=InvocationId( + test_module_path=test_module, + test_class_name=test_class_str, + test_function_name=test_func, + function_getting_tested=func_getting_tested, + iteration_id=iteration_id, + ), + file_name=test_file_path, + runtime=runtime, + test_framework=test_config.test_framework, + did_pass=result, + test_type=test_type, + return_value=None, + timed_out=timed_out, + stdout=stdout, + ) + ) + + if not test_results: + logger.info( + f"Tests '{[test_file.original_file_path for test_file in test_files.test_files]}' failed to run, skipping" + ) + if run_result is not None: + stdout, stderr = "", "" + try: + stdout = run_result.stdout.decode() + stderr = run_result.stderr.decode() + except AttributeError: + stdout = run_result.stderr + logger.debug(f"Test log - STDOUT : {stdout} \n STDERR : {stderr}") + return test_results diff --git a/codeflash/languages/java/replacement.py b/codeflash/languages/java/replacement.py index e6628286e..77ed3400e 100644 --- a/codeflash/languages/java/replacement.py +++ b/codeflash/languages/java/replacement.py @@ -85,8 +85,6 @@ def _parse_optimization_source(new_source: str, target_method_name: str, analyze end = target_method.end_line target_method_source = "".join(lines[start:end]) else: - # Target method not found in the generated class — the LLM generated - # a different method. Signal invalid candidate with empty source. logger.warning( "Generated class does not contain target method '%s'. Skipping candidate.", target_method_name ) @@ -308,8 +306,6 @@ def replace_function( # Parse the optimization to extract components parsed = _parse_optimization_source(new_source, func_name, analyzer) - # If the parsed optimization has no valid target source (e.g., the LLM generated - # a method with a different name), skip this candidate entirely. if not parsed.target_method_source.strip(): logger.warning("No valid replacement found for method '%s'. Returning original source.", func_name) return source diff --git a/codeflash/languages/java/support.py b/codeflash/languages/java/support.py index 3186f2165..a17b83e22 100644 --- a/codeflash/languages/java/support.py +++ b/codeflash/languages/java/support.py @@ -15,7 +15,7 @@ from codeflash.languages.java.concurrency_analyzer import analyze_function_concurrency from codeflash.languages.java.config import detect_java_project from codeflash.languages.java.context import extract_code_context, find_helper_functions -from codeflash.languages.java.discovery import discover_functions, discover_functions_from_source +from codeflash.languages.java.discovery import discover_functions_from_source from codeflash.languages.java.formatter import format_java_code, normalize_java_code from codeflash.languages.java.instrumentation import ( instrument_existing_test, @@ -93,22 +93,89 @@ def default_file_extension(self) -> str: def dir_excludes(self) -> frozenset[str]: return frozenset({"target", "build", ".gradle", ".mvn", ".idea"}) + @property + def default_language_version(self) -> str | None: + return "17" + + @property + def valid_test_frameworks(self) -> tuple[str, ...]: + return ("junit5", "junit4", "testng") + + @property + def test_result_serialization_format(self) -> str: + return "json" + + def parse_test_xml( + self, test_xml_file_path: Path, test_files: Any, test_config: Any, run_result: Any = None + ) -> Any: + from codeflash.languages.java.parse import parse_java_test_xml + + return parse_java_test_xml(test_xml_file_path, test_files, test_config, run_result) + def postprocess_generated_tests( self, generated_tests: GeneratedTestsList, test_framework: str, project_root: Path, source_file_path: Path ) -> GeneratedTestsList: _ = test_framework, project_root, source_file_path return generated_tests + def process_generated_test_strings( + self, + generated_test_source: str, + instrumented_behavior_test_source: str, + instrumented_perf_test_source: str, + function_to_optimize: Any, + test_path: Path, + test_cfg: Any, + project_module_system: str | None, + ) -> tuple[str, str, str]: + from codeflash.languages.java.instrumentation import instrument_generated_java_test + + func_name = function_to_optimize.function_name + qualified_name = function_to_optimize.qualified_name + + instrumented_behavior_test_source = instrument_generated_java_test( + test_code=generated_test_source, + function_name=func_name, + qualified_name=qualified_name, + mode="behavior", + function_to_optimize=function_to_optimize, + ) + + instrumented_perf_test_source = instrument_generated_java_test( + test_code=generated_test_source, + function_name=func_name, + qualified_name=qualified_name, + mode="performance", + function_to_optimize=function_to_optimize, + ) + + logger.debug("Instrumented Java tests locally for %s", func_name) + return generated_test_source, instrumented_behavior_test_source, instrumented_perf_test_source + def add_global_declarations(self, optimized_code: str, original_source: str, module_abspath: Path) -> str: return original_source + def prepare_module(self, module_code: str, module_path: Path, project_root: Path) -> tuple[dict[Path, Any], None]: + from codeflash.models.models import ValidCode + + validated_original_code: dict[Path, ValidCode] = { + module_path: ValidCode(source_code=module_code, normalized_code=module_code) + } + return validated_original_code, None + + @property + def function_optimizer_class(self) -> type: + from codeflash.languages.java.function_optimizer import JavaFunctionOptimizer + + return JavaFunctionOptimizer + # === Discovery === def discover_functions( - self, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None + self, source: str, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None ) -> list[FunctionToOptimize]: - """Find all optimizable functions in a Java file.""" - return discover_functions(file_path, filter_criteria, self._analyzer) + """Find all optimizable functions in Java source code.""" + return discover_functions_from_source(source, file_path, filter_criteria, self._analyzer) def discover_functions_from_source( self, source: str, file_path: Path | None = None, filter_criteria: FunctionFilterCriteria | None = None @@ -151,6 +218,26 @@ def replace_function(self, source: str, function: FunctionToOptimize, new_source """Replace a function in source code with new implementation.""" return replace_function(source, function, new_source, self._analyzer) + def replace_function_definitions( + self, + function_names: list[str], + optimized_code: Any, + module_abspath: Path, + project_root_path: Path, + function_to_optimize: FunctionToOptimize | None = None, + ) -> bool: + """Replace function definitions in a Java source file with optimized code.""" + from codeflash.languages.code_replacer import replace_function_definitions_for_language + + return replace_function_definitions_for_language( + function_names=function_names, + optimized_code=optimized_code, + module_abspath=module_abspath, + project_root_path=project_root_path, + lang_support=self, + function_to_optimize=function_to_optimize, + ) + def format_code(self, source: str, file_path: Path | None = None) -> str: """Format Java code.""" project_root = file_path.parent if file_path else None @@ -279,14 +366,61 @@ def compare_test_results( # === Configuration === + def adjust_test_config_for_discovery(self, test_cfg: Any) -> None: + """Adjust test config before test discovery for Java. + + Ensures test file resolution works correctly in parse_test_xml. + """ + test_cfg.tests_project_rootdir = test_cfg.tests_root + def get_test_file_suffix(self) -> str: """Get the test file suffix for Java.""" return "Test.java" + def resolve_test_file_from_class_path(self, test_class_path: str, base_dir: Path) -> Path | None: + """Resolve Java class path (e.g., "com.example.TestClass") to a test file.""" + file_ext = self.default_file_extension + relative_path = test_class_path.replace(".", "/") + file_ext + + # 1. Directly under base_dir + potential_path = base_dir / relative_path + if potential_path.exists(): + return potential_path + + # 2. Under src/test/java relative to project root (Maven structure) + project_root = base_dir.parent if base_dir.name == "java" else base_dir + while project_root.name not in ("", "/") and not (project_root / "pom.xml").exists(): + project_root = project_root.parent + if (project_root / "pom.xml").exists(): + potential_path = project_root / "src" / "test" / "java" / relative_path + if potential_path.exists(): + return potential_path + + # 3. Search by filename in base_dir tree + file_name = test_class_path.rsplit(".", maxsplit=1)[-1] + file_ext + for java_file in base_dir.rglob(file_name): + return java_file + + return None + + def resolve_test_module_path_for_pr( + self, test_module_path: str, tests_project_rootdir: Path, non_generated_tests: set[Path] + ) -> Path | None: + """Resolve Java test module path (class name) to absolute file path for PR.""" + lang_ext = self.default_file_extension + abs_path = (tests_project_rootdir / f"{test_module_path}{lang_ext}").resolve() + for candidate in non_generated_tests: + if candidate.stem == test_module_path: + return candidate + return abs_path + def get_comment_prefix(self) -> str: """Get the comment prefix for Java.""" return "//" + def get_test_dir_for_source(self, test_dir: Path, source_file: Path | None) -> Path | None: + return None + def find_test_root(self, project_root: Path) -> Path | None: """Find the test root directory for a Java project.""" return find_test_root(project_root) diff --git a/codeflash/languages/java/test_runner.py b/codeflash/languages/java/test_runner.py index 3b837b640..c5cf14850 100644 --- a/codeflash/languages/java/test_runner.py +++ b/codeflash/languages/java/test_runner.py @@ -996,7 +996,8 @@ def _run_benchmarking_tests_maven( run_env["CODEFLASH_LOOP_INDEX"] = str(loop_idx) run_env["CODEFLASH_MODE"] = "performance" run_env["CODEFLASH_TEST_ITERATION"] = "0" - run_env["CODEFLASH_INNER_ITERATIONS"] = str(inner_iterations) + if "CODEFLASH_INNER_ITERATIONS" not in run_env: + run_env["CODEFLASH_INNER_ITERATIONS"] = str(inner_iterations) result = _run_maven_tests( maven_root, test_paths, run_env, timeout=per_loop_timeout, mode="performance", test_module=test_module @@ -1187,7 +1188,8 @@ def run_benchmarking_tests( run_env["CODEFLASH_LOOP_INDEX"] = str(loop_idx) run_env["CODEFLASH_MODE"] = "performance" run_env["CODEFLASH_TEST_ITERATION"] = "0" - run_env["CODEFLASH_INNER_ITERATIONS"] = str(inner_iterations) + if "CODEFLASH_INNER_ITERATIONS" not in run_env: + run_env["CODEFLASH_INNER_ITERATIONS"] = str(inner_iterations) # Run tests directly with XML report generation loop_start = time.time() diff --git a/codeflash/languages/javascript/edit_tests.py b/codeflash/languages/javascript/edit_tests.py index 00ba04f9c..0bb5a7e10 100644 --- a/codeflash/languages/javascript/edit_tests.py +++ b/codeflash/languages/javascript/edit_tests.py @@ -11,27 +11,8 @@ from pathlib import Path from codeflash.cli_cmds.console import logger -from codeflash.code_utils.time_utils import format_perf, format_time +from codeflash.code_utils.time_utils import format_runtime_comment from codeflash.models.models import GeneratedTests, GeneratedTestsList -from codeflash.result.critic import performance_gain - - -def format_runtime_comment(original_time: int, optimized_time: int) -> str: - """Format a runtime comparison comment for JavaScript. - - Args: - original_time: Original runtime in nanoseconds. - optimized_time: Optimized runtime in nanoseconds. - - Returns: - Formatted comment string with // prefix. - - """ - perf_gain = format_perf( - abs(performance_gain(original_runtime_ns=original_time, optimized_runtime_ns=optimized_time) * 100) - ) - status = "slower" if optimized_time > original_time else "faster" - return f"// {format_time(original_time)} -> {format_time(optimized_time)} ({perf_gain}% {status})" def add_runtime_comments(source: str, original_runtimes: dict[str, int], optimized_runtimes: dict[str, int]) -> str: @@ -120,7 +101,7 @@ def find_matching_test(test_description: str) -> str | None: # Only add comment if line has a function call and doesn't already have a comment if func_call_pattern.search(line) and "//" not in line and "expect(" in line: orig_time, opt_time = timing_by_full_name[current_matched_full_name] - comment = format_runtime_comment(orig_time, opt_time) + comment = format_runtime_comment(orig_time, opt_time, comment_prefix="//") logger.debug(f"[js-annotations] Adding comment to test '{current_test_name}': {comment}") # Add comment at end of line line = f"{line.rstrip()} {comment}" @@ -230,6 +211,8 @@ def inject_test_globals(generated_tests: GeneratedTestsList, test_framework: str # Use vitest imports for vitest projects, jest imports for jest projects if test_framework == "vitest": global_import = "import { vi, describe, it, expect, beforeEach, afterEach, beforeAll, test } from 'vitest'\n" + elif test_framework == "mocha": + global_import = "import assert from 'node:assert/strict';\n" else: # Default to jest imports for jest and other frameworks global_import = ( diff --git a/codeflash/languages/javascript/function_optimizer.py b/codeflash/languages/javascript/function_optimizer.py new file mode 100644 index 000000000..fce273dd5 --- /dev/null +++ b/codeflash/languages/javascript/function_optimizer.py @@ -0,0 +1,226 @@ +from __future__ import annotations + +import hashlib +from collections import defaultdict +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from codeflash.cli_cmds.console import logger +from codeflash.code_utils.code_utils import encoded_tokens_len, get_run_tmp_file +from codeflash.code_utils.config_consts import ( + OPTIMIZATION_CONTEXT_TOKEN_LIMIT, + READ_WRITABLE_LIMIT_ERROR, + TESTGEN_CONTEXT_TOKEN_LIMIT, + TESTGEN_LIMIT_ERROR, + TOTAL_LOOPING_TIME_EFFECTIVE, +) +from codeflash.either import Failure, Success +from codeflash.models.models import ( + CodeOptimizationContext, + CodeString, + CodeStringsMarkdown, + FunctionSource, + TestingMode, + TestResults, +) +from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.verification.equivalence import compare_test_results + +if TYPE_CHECKING: + from codeflash.either import Result + from codeflash.languages.base import CodeContext, HelperFunction + from codeflash.models.models import CoverageData, OriginalCodeBaseline, TestDiff + + +class JavaScriptFunctionOptimizer(FunctionOptimizer): + def get_code_optimization_context(self) -> Result[CodeOptimizationContext, str]: + from codeflash.languages import get_language_support + from codeflash.languages.base import Language + + language = Language(self.function_to_optimize.language) + lang_support = get_language_support(language) + + try: + code_context = lang_support.extract_code_context( + self.function_to_optimize, self.project_root, self.project_root + ) + return Success( + self._build_optimization_context( + code_context, + self.function_to_optimize.file_path, + self.function_to_optimize.language, + self.project_root, + ) + ) + except ValueError as e: + return Failure(str(e)) + + @staticmethod + def _build_optimization_context( + code_context: CodeContext, + file_path: Path, + language: str, + project_root: Path, + optim_token_limit: int = OPTIMIZATION_CONTEXT_TOKEN_LIMIT, + testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT, + ) -> CodeOptimizationContext: + imports_code = "\n".join(code_context.imports) if code_context.imports else "" + + try: + target_relative_path = file_path.resolve().relative_to(project_root.resolve()) + except ValueError: + target_relative_path = file_path + + helpers_by_file: dict[Path, list[HelperFunction]] = defaultdict(list) + helper_function_sources = [] + + for helper in code_context.helper_functions: + helpers_by_file[helper.file_path].append(helper) + helper_function_sources.append( + FunctionSource( + file_path=helper.file_path, + qualified_name=helper.qualified_name, + fully_qualified_name=helper.qualified_name, + only_function_name=helper.name, + source_code=helper.source_code, + ) + ) + + target_file_code = code_context.target_code + same_file_helpers = helpers_by_file.get(file_path, []) + if same_file_helpers: + helper_code = "\n\n".join(h.source_code for h in same_file_helpers) + target_file_code = target_file_code + "\n\n" + helper_code + + if imports_code: + target_file_code = imports_code + "\n\n" + target_file_code + + read_writable_code_strings = [ + CodeString(code=target_file_code, file_path=target_relative_path, language=language) + ] + + for helper_file_path, file_helpers in helpers_by_file.items(): + if helper_file_path == file_path: + continue + try: + helper_relative_path = helper_file_path.resolve().relative_to(project_root.resolve()) + except ValueError: + helper_relative_path = helper_file_path + combined_helper_code = "\n\n".join(h.source_code for h in file_helpers) + read_writable_code_strings.append( + CodeString(code=combined_helper_code, file_path=helper_relative_path, language=language) + ) + + read_writable_code = CodeStringsMarkdown(code_strings=read_writable_code_strings, language=language) + testgen_context = CodeStringsMarkdown(code_strings=read_writable_code_strings.copy(), language=language) + + read_writable_tokens = encoded_tokens_len(read_writable_code.markdown) + if read_writable_tokens > optim_token_limit: + raise ValueError(READ_WRITABLE_LIMIT_ERROR) + + testgen_tokens = encoded_tokens_len(testgen_context.markdown) + if testgen_tokens > testgen_token_limit: + raise ValueError(TESTGEN_LIMIT_ERROR) + + code_hash = hashlib.sha256(read_writable_code.flat.encode("utf-8")).hexdigest() + + return CodeOptimizationContext( + testgen_context=testgen_context, + read_writable_code=read_writable_code, + read_only_context_code=code_context.read_only_context, + hashing_code_context=read_writable_code.flat, + hashing_code_context_hash=code_hash, + helper_functions=helper_function_sources, + testgen_helper_fqns=[fs.fully_qualified_name for fs in helper_function_sources], + preexisting_objects=set(), + ) + + def compare_candidate_results( + self, + baseline_results: OriginalCodeBaseline, + candidate_behavior_results: TestResults, + optimization_candidate_index: int, + ) -> tuple[bool, list[TestDiff]]: + original_sqlite = get_run_tmp_file(Path("test_return_values_0.sqlite")) + candidate_sqlite = get_run_tmp_file(Path(f"test_return_values_{optimization_candidate_index}.sqlite")) + + if original_sqlite.exists() and candidate_sqlite.exists(): + js_root = self.test_cfg.js_project_root or self.project_root + match, diffs = self.language_support.compare_test_results( + original_sqlite, candidate_sqlite, project_root=js_root + ) + candidate_sqlite.unlink(missing_ok=True) + else: + match, diffs = compare_test_results(baseline_results.behavior_test_results, candidate_behavior_results) + return match, diffs + + def should_skip_sqlite_cleanup(self, testing_type: TestingMode, optimization_iteration: int) -> bool: + return testing_type == TestingMode.BEHAVIOR or optimization_iteration == 0 + + def parse_line_profile_test_results( + self, line_profiler_output_file: Path | None + ) -> tuple[TestResults | dict[str, Any], CoverageData | None]: + if line_profiler_output_file is None or not line_profiler_output_file.exists(): + return TestResults(test_results=[]), None + if hasattr(self.language_support, "parse_line_profile_results"): + return self.language_support.parse_line_profile_results(line_profiler_output_file), None + return TestResults(test_results=[]), None + + def line_profiler_step( + self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], candidate_index: int + ) -> dict[str, Any]: + if not hasattr(self.language_support, "instrument_source_for_line_profiler"): + logger.warning(f"Language support for {self.language_support.language} doesn't support line profiling") + return {"timings": {}, "unit": 0, "str_out": ""} + + original_source = self.function_to_optimize.file_path.read_text(encoding="utf-8") + try: + line_profiler_output_path = get_run_tmp_file(Path("line_profiler_output.json")) + + success = self.language_support.instrument_source_for_line_profiler( + func_info=self.function_to_optimize, line_profiler_output_file=line_profiler_output_path + ) + if not success: + return {"timings": {}, "unit": 0, "str_out": ""} + + test_env = self.get_test_env( + codeflash_loop_index=0, codeflash_test_iteration=candidate_index, codeflash_tracer_disable=1 + ) + + _test_results, _ = self.run_and_parse_tests( + testing_type=TestingMode.LINE_PROFILE, + test_env=test_env, + test_files=self.test_files, + optimization_iteration=0, + testing_time=TOTAL_LOOPING_TIME_EFFECTIVE, + enable_coverage=False, + code_context=code_context, + line_profiler_output_file=line_profiler_output_path, + ) + + return self.language_support.parse_line_profile_results(line_profiler_output_path) + except Exception as e: + logger.warning(f"Failed to run line profiling: {e}") + return {"timings": {}, "unit": 0, "str_out": ""} + finally: + self.function_to_optimize.file_path.write_text(original_source, encoding="utf-8") + + def replace_function_and_helpers_with_optimized_code( + self, + code_context: CodeOptimizationContext, + optimized_code: CodeStringsMarkdown, + original_helper_code: dict[Path, str], + ) -> bool: + from codeflash.languages.code_replacer import replace_function_definitions_for_language + + did_update = False + for module_abspath, qualified_names in self.group_functions_by_file(code_context).items(): + did_update |= replace_function_definitions_for_language( + function_names=list(qualified_names), + optimized_code=optimized_code, + module_abspath=module_abspath, + project_root_path=self.project_root, + lang_support=self.language_support, + function_to_optimize=self.function_to_optimize, + ) + return did_update diff --git a/codeflash/languages/javascript/import_resolver.py b/codeflash/languages/javascript/import_resolver.py index b5ec67115..34dd1990f 100644 --- a/codeflash/languages/javascript/import_resolver.py +++ b/codeflash/languages/javascript/import_resolver.py @@ -499,6 +499,18 @@ def _extract_helper_from_file( # Split source into lines for JSDoc extraction lines = source.splitlines(keepends=True) + def helper_from_func(func): + effective_start = func.doc_start_line or func.start_line + helper_source = "".join(lines[effective_start - 1 : func.end_line]) + return HelperFunction( + name=func.name, + qualified_name=func.name, + file_path=file_path, + source_code=helper_source, + start_line=effective_start, + end_line=func.end_line, + ) + # Handle "default" export - look for default exported function if function_name == "default": # Find the default export @@ -506,38 +518,14 @@ def _extract_helper_from_file( # For now, return first function if looking for default # TODO: Implement proper default export detection for func in functions: - # Extract source including JSDoc if present - effective_start = func.doc_start_line or func.start_line - helper_lines = lines[effective_start - 1 : func.end_line] - helper_source = "".join(helper_lines) - - return HelperFunction( - name=func.name, - qualified_name=func.name, - file_path=file_path, - source_code=helper_source, - start_line=effective_start, - end_line=func.end_line, - ) + return helper_from_func(func) return None # Find the function by name functions = file_analyzer.find_functions(source, include_methods=True) for func in functions: if func.name == function_name: - # Extract source including JSDoc if present - effective_start = func.doc_start_line or func.start_line - helper_lines = lines[effective_start - 1 : func.end_line] - helper_source = "".join(helper_lines) - - return HelperFunction( - name=func.name, - qualified_name=func.name, - file_path=file_path, - source_code=helper_source, - start_line=effective_start, - end_line=func.end_line, - ) + return helper_from_func(func) logger.debug("Function %s not found in %s", function_name, file_path) return None diff --git a/codeflash/languages/javascript/mocha_runner.py b/codeflash/languages/javascript/mocha_runner.py new file mode 100644 index 000000000..5c288d67b --- /dev/null +++ b/codeflash/languages/javascript/mocha_runner.py @@ -0,0 +1,597 @@ +"""Mocha test runner for JavaScript/TypeScript. + +This module provides functions for running Mocha tests for behavioral +verification and performance benchmarking. Uses Mocha's built-in JSON reporter +and converts the output to JUnit XML in Python, avoiding extra npm dependencies. +""" + +from __future__ import annotations + +import json +import subprocess +import time +from pathlib import Path +from typing import TYPE_CHECKING +from xml.etree.ElementTree import Element, SubElement, tostring + +from codeflash.cli_cmds.console import logger +from codeflash.cli_cmds.init_javascript import get_package_install_command +from codeflash.code_utils.code_utils import get_run_tmp_file +from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args + +if TYPE_CHECKING: + from codeflash.models.models import TestFiles + + +def _find_mocha_project_root(file_path: Path) -> Path | None: + """Find the Mocha project root by looking for .mocharc.* or package.json. + + Traverses up from the given file path to find the directory containing + a Mocha config file. Falls back to package.json if no Mocha config is found. + + Args: + file_path: A file path within the Mocha project. + + Returns: + The project root directory, or None if not found. + + """ + current = file_path.parent if file_path.is_file() else file_path + package_json_dir = None + + mocha_config_names = ( + ".mocharc.yml", + ".mocharc.yaml", + ".mocharc.json", + ".mocharc.js", + ".mocharc.cjs", + ".mocharc.mjs", + ) + + while current != current.parent: + if any((current / cfg).exists() for cfg in mocha_config_names): + return current + if package_json_dir is None and (current / "package.json").exists(): + package_json_dir = current + current = current.parent + + return package_json_dir + + +def _ensure_runtime_files(project_root: Path) -> None: + """Ensure JavaScript runtime package is installed in the project. + + Installs codeflash package if not already present. + The package provides all runtime files needed for test instrumentation. + + Args: + project_root: The project root directory. + + """ + node_modules_pkg = project_root / "node_modules" / "codeflash" + if node_modules_pkg.exists(): + logger.debug("codeflash already installed") + return + + install_cmd = get_package_install_command(project_root, "codeflash", dev=True) + try: + result = subprocess.run(install_cmd, check=False, cwd=project_root, capture_output=True, text=True, timeout=120) + if result.returncode == 0: + logger.debug(f"Installed codeflash using {install_cmd[0]}") + return + logger.warning(f"Failed to install codeflash: {result.stderr}") + except Exception as e: + logger.warning(f"Error installing codeflash: {e}") + + logger.error(f"Could not install codeflash. Please install it manually: {' '.join(install_cmd)}") + + +def mocha_json_to_junit_xml(json_str: str, output_file: Path) -> None: + """Convert Mocha's JSON reporter output to JUnit XML. + + Mocha JSON format: + { stats: {...}, tests: [...], failures: [...], passes: [...], pending: [...] } + + Each test object has: fullTitle, title, duration, err, ... + + Args: + json_str: JSON string from Mocha's --reporter json output. + output_file: Path to write the JUnit XML file. + + """ + try: + data = json.loads(json_str) + except json.JSONDecodeError: + logger.warning("Failed to parse Mocha JSON output") + # Write a minimal empty JUnit XML so downstream parsing doesn't break + output_file.write_text('\n\n') + return + + tests = data.get("tests", []) + stats = data.get("stats", {}) + + testsuites = Element("testsuites") + testsuites.set("tests", str(stats.get("tests", len(tests)))) + testsuites.set("failures", str(stats.get("failures", 0))) + testsuites.set("time", str((stats.get("duration", 0) or 0) / 1000.0)) + + # Group tests by suite (parent describe block) + suites: dict[str, list[dict]] = {} + for test in tests: + full_title = test.get("fullTitle", "") + title = test.get("title", "") + # Suite name = fullTitle minus the test's own title + suite_name = full_title[: -len(title)].strip() if title and full_title.endswith(title) else "root" + suite_name = suite_name or "root" + suites.setdefault(suite_name, []).append(test) + + for suite_name, suite_tests in suites.items(): + testsuite = SubElement(testsuites, "testsuite") + testsuite.set("name", suite_name) + testsuite.set("tests", str(len(suite_tests))) + + suite_failures = 0 + suite_time = 0.0 + + for test in suite_tests: + testcase = SubElement(testsuite, "testcase") + testcase.set("classname", suite_name) + testcase.set("name", test.get("title", "unknown")) + duration_ms = test.get("duration", 0) or 0 + duration_s = duration_ms / 1000.0 + testcase.set("time", str(duration_s)) + suite_time += duration_s + + err = test.get("err", {}) + if err and err.get("message"): + suite_failures += 1 + failure = SubElement(testcase, "failure") + failure.set("message", err.get("message", "")) + failure.text = err.get("stack", err.get("message", "")) + + if test.get("pending"): + skipped = SubElement(testcase, "skipped") + skipped.set("message", "pending") + + testsuite.set("failures", str(suite_failures)) + testsuite.set("time", str(suite_time)) + + xml_bytes = tostring(testsuites, encoding="unicode") + output_file.write_text(f'\n{xml_bytes}\n') + + +def _extract_mocha_json(stdout: str) -> str | None: + """Extract Mocha JSON output from stdout that may contain mixed content. + + Mocha's JSON reporter writes the JSON blob to stdout, but other output + (console.log from tests, codeflash markers) may be interleaved. + We look for the JSON object by finding the outermost { ... } that + contains the expected "stats" key. + + Args: + stdout: Full stdout from the Mocha subprocess. + + Returns: + The extracted JSON string, or None if not found. + + """ + # Try the whole stdout first + stripped = stdout.strip() + if stripped.startswith("{") and '"stats"' in stripped: + try: + json.loads(stripped) + return stripped + except json.JSONDecodeError: + pass + + # Find the outermost JSON object containing "stats" + depth = 0 + start = None + for i, ch in enumerate(stdout): + if ch == "{": + if depth == 0: + start = i + depth += 1 + elif ch == "}": + depth -= 1 + if depth == 0 and start is not None: + candidate = stdout[start : i + 1] + if '"stats"' in candidate: + try: + json.loads(candidate) + return candidate + except json.JSONDecodeError: + pass + start = None + + return None + + +def _build_mocha_behavioral_command( + test_files: list[Path], timeout: int | None = None, project_root: Path | None = None +) -> list[str]: + """Build Mocha command for behavioral tests. + + Args: + test_files: List of test files to run. + timeout: Optional timeout in seconds (converted to ms for Mocha). + project_root: Project root directory. + + Returns: + Command list for subprocess execution. + + """ + cmd = ["npx", "mocha", "--reporter", "json", "--jobs", "1", "--exit"] + + if timeout: + cmd.extend(["--timeout", str(timeout * 1000)]) + else: + cmd.extend(["--timeout", "60000"]) + + cmd.extend(str(f.resolve()) for f in test_files) + + return cmd + + +def _build_mocha_benchmarking_command( + test_files: list[Path], timeout: int | None = None, project_root: Path | None = None +) -> list[str]: + """Build Mocha command for benchmarking tests. + + Args: + test_files: List of test files to run. + timeout: Optional timeout in seconds (converted to ms for Mocha). + project_root: Project root directory. + + Returns: + Command list for subprocess execution. + + """ + cmd = ["npx", "mocha", "--reporter", "json", "--jobs", "1", "--exit"] + + if timeout: + cmd.extend(["--timeout", str(timeout * 1000)]) + else: + cmd.extend(["--timeout", "120000"]) + + cmd.extend(str(f.resolve()) for f in test_files) + + return cmd + + +def _build_mocha_line_profile_command( + test_files: list[Path], timeout: int | None = None, project_root: Path | None = None +) -> list[str]: + """Build Mocha command for line profiling tests. + + Args: + test_files: List of test files to run. + timeout: Optional timeout in seconds (converted to ms for Mocha). + project_root: Project root directory. + + Returns: + Command list for subprocess execution. + + """ + cmd = ["npx", "mocha", "--reporter", "json", "--jobs", "1", "--exit"] + + if timeout: + cmd.extend(["--timeout", str(timeout * 1000)]) + else: + cmd.extend(["--timeout", "60000"]) + + cmd.extend(str(f.resolve()) for f in test_files) + + return cmd + + +def _run_mocha_and_convert( + mocha_cmd: list[str], + mocha_env: dict[str, str], + effective_cwd: Path, + result_file_path: Path, + subprocess_timeout: int, + label: str, +) -> subprocess.CompletedProcess: + """Run Mocha subprocess, extract JSON output, and convert to JUnit XML. + + Args: + mocha_cmd: Mocha command list. + mocha_env: Environment variables. + effective_cwd: Working directory. + result_file_path: Path to write JUnit XML. + subprocess_timeout: Timeout in seconds. + label: Label for log messages (e.g. "behavioral", "benchmarking"). + + Returns: + CompletedProcess with combined stdout/stderr. + + """ + try: + run_args = get_cross_platform_subprocess_run_args( + cwd=effective_cwd, env=mocha_env, timeout=subprocess_timeout, check=False, text=True, capture_output=True + ) + result = subprocess.run(mocha_cmd, **run_args) # noqa: PLW1510 + + # Combine stderr into stdout + stdout = result.stdout or "" + if result.stderr: + stdout = stdout + "\n" + result.stderr if stdout else result.stderr + + result = subprocess.CompletedProcess(args=result.args, returncode=result.returncode, stdout=stdout, stderr="") + + logger.debug(f"Mocha {label} result: returncode={result.returncode}") + if result.returncode != 0: + logger.warning( + f"Mocha {label} failed with returncode={result.returncode}.\n" + f"Command: {' '.join(mocha_cmd)}\n" + f"Stdout: {stdout[:2000] if stdout else '(empty)'}" + ) + + except subprocess.TimeoutExpired: + logger.warning(f"Mocha {label} tests timed out after {subprocess_timeout}s") + result = subprocess.CompletedProcess( + args=mocha_cmd, returncode=-1, stdout="", stderr=f"{label} tests timed out" + ) + except FileNotFoundError: + logger.error("Mocha not found. Make sure Mocha is installed (npm install mocha)") + result = subprocess.CompletedProcess( + args=mocha_cmd, returncode=-1, stdout="", stderr="Mocha not found. Run: npm install mocha" + ) + + # Extract Mocha JSON from stdout and convert to JUnit XML + if result.stdout: + mocha_json = _extract_mocha_json(result.stdout) + if mocha_json: + mocha_json_to_junit_xml(mocha_json, result_file_path) + logger.debug(f"Converted Mocha JSON to JUnit XML: {result_file_path}") + else: + logger.warning(f"Could not extract Mocha JSON from stdout (len={len(result.stdout)})") + result_file_path.write_text('\n\n') + else: + result_file_path.write_text('\n\n') + + return result + + +def run_mocha_behavioral_tests( + test_paths: TestFiles, + test_env: dict[str, str], + cwd: Path, + *, + timeout: int | None = None, + project_root: Path | None = None, + enable_coverage: bool = False, + candidate_index: int = 0, +) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]: + """Run Mocha tests and return results in a format compatible with pytest output. + + Args: + test_paths: TestFiles object containing test file information. + test_env: Environment variables for the test run. + cwd: Working directory for running tests. + timeout: Optional timeout in seconds. + project_root: Mocha project root (directory containing .mocharc.* or package.json). + enable_coverage: Whether to collect coverage information (not yet supported for Mocha). + candidate_index: Index of the candidate being tested. + + Returns: + Tuple of (result_file_path, subprocess_result, coverage_json_path, None). + + """ + result_file_path = get_run_tmp_file(Path("mocha_results.xml")) + + test_files = [Path(file.instrumented_behavior_file_path) for file in test_paths.test_files] + + if project_root is None and test_files: + project_root = _find_mocha_project_root(test_files[0]) + + effective_cwd = project_root if project_root else cwd + logger.debug(f"Mocha working directory: {effective_cwd}") + + _ensure_runtime_files(effective_cwd) + + mocha_cmd = _build_mocha_behavioral_command(test_files=test_files, timeout=timeout, project_root=effective_cwd) + + mocha_env = test_env.copy() + codeflash_sqlite_file = get_run_tmp_file(Path(f"test_return_values_{candidate_index}.sqlite")) + mocha_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file) + mocha_env["CODEFLASH_TEST_ITERATION"] = str(candidate_index) + mocha_env["CODEFLASH_LOOP_INDEX"] = "1" + mocha_env["CODEFLASH_MODE"] = "behavior" + mocha_env["CODEFLASH_RANDOM_SEED"] = "42" + + logger.debug(f"Running Mocha behavioral tests: {' '.join(mocha_cmd)}") + + subprocess_timeout = max(120, (timeout or 60) * 10) + + start_time_ns = time.perf_counter_ns() + try: + result = _run_mocha_and_convert( + mocha_cmd=mocha_cmd, + mocha_env=mocha_env, + effective_cwd=effective_cwd, + result_file_path=result_file_path, + subprocess_timeout=subprocess_timeout, + label="behavioral", + ) + finally: + wall_clock_ns = time.perf_counter_ns() - start_time_ns + logger.debug(f"Mocha behavioral tests completed in {wall_clock_ns / 1e9:.2f}s") + + if result_file_path.exists(): + file_size = result_file_path.stat().st_size + logger.debug(f"Mocha JUnit XML created: {result_file_path} ({file_size} bytes)") + else: + logger.warning(f"Mocha JUnit XML not created at {result_file_path}") + + return result_file_path, result, None, None + + +def run_mocha_benchmarking_tests( + test_paths: TestFiles, + test_env: dict[str, str], + cwd: Path, + *, + timeout: int | None = None, + project_root: Path | None = None, + min_loops: int = 5, + max_loops: int = 100, + target_duration_ms: int = 10_000, + stability_check: bool = True, +) -> tuple[Path, subprocess.CompletedProcess]: + """Run Mocha benchmarking tests with internal looping via capturePerf. + + Args: + test_paths: TestFiles object containing test file information. + test_env: Environment variables for the test run. + cwd: Working directory for running tests. + timeout: Optional timeout in seconds for the entire benchmark run. + project_root: Mocha project root. + min_loops: Minimum number of loop iterations. + max_loops: Maximum number of loop iterations. + target_duration_ms: Target total duration in milliseconds for all loops. + stability_check: Whether to enable stability-based early stopping. + + Returns: + Tuple of (result_file_path, subprocess_result with stdout from all iterations). + + """ + result_file_path = get_run_tmp_file(Path("mocha_perf_results.xml")) + + test_files = [Path(file.benchmarking_file_path) for file in test_paths.test_files if file.benchmarking_file_path] + + logger.debug( + f"Mocha benchmark test file selection: {len(test_files)}/{len(test_paths.test_files)} have benchmarking_file_path" + ) + if not test_files: + logger.warning("No perf test files found! Cannot run benchmarking tests.") + + if project_root is None and test_files: + project_root = _find_mocha_project_root(test_files[0]) + + effective_cwd = project_root if project_root else cwd + logger.debug(f"Mocha benchmarking working directory: {effective_cwd}") + + _ensure_runtime_files(effective_cwd) + + mocha_cmd = _build_mocha_benchmarking_command(test_files=test_files, timeout=timeout, project_root=effective_cwd) + + mocha_env = test_env.copy() + codeflash_sqlite_file = get_run_tmp_file(Path("test_return_values_0.sqlite")) + mocha_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file) + mocha_env["CODEFLASH_TEST_ITERATION"] = "0" + mocha_env["CODEFLASH_MODE"] = "performance" + mocha_env["CODEFLASH_RANDOM_SEED"] = "42" + + mocha_env["CODEFLASH_PERF_LOOP_COUNT"] = str(max_loops) + mocha_env["CODEFLASH_PERF_MIN_LOOPS"] = str(min_loops) + mocha_env["CODEFLASH_PERF_TARGET_DURATION_MS"] = str(target_duration_ms) + mocha_env["CODEFLASH_PERF_STABILITY_CHECK"] = "true" if stability_check else "false" + mocha_env["CODEFLASH_LOOP_INDEX"] = "1" + + if test_files: + test_module_path = str( + test_files[0].relative_to(effective_cwd) + if test_files[0].is_relative_to(effective_cwd) + else test_files[0].name + ) + mocha_env["CODEFLASH_TEST_MODULE"] = test_module_path + + total_timeout = max(120, (target_duration_ms // 1000) + 60, timeout or 120) + + logger.debug(f"Running Mocha benchmarking tests: {' '.join(mocha_cmd)}") + logger.debug( + f"Config: min_loops={min_loops}, max_loops={max_loops}, " + f"target_duration={target_duration_ms}ms, stability_check={stability_check}" + ) + + total_start_time = time.time() + try: + result = _run_mocha_and_convert( + mocha_cmd=mocha_cmd, + mocha_env=mocha_env, + effective_cwd=effective_cwd, + result_file_path=result_file_path, + subprocess_timeout=total_timeout, + label="benchmarking", + ) + finally: + wall_clock_seconds = time.time() - total_start_time + logger.debug(f"Mocha benchmarking completed in {wall_clock_seconds:.2f}s, returncode={result.returncode}") + + return result_file_path, result + + +def run_mocha_line_profile_tests( + test_paths: TestFiles, + test_env: dict[str, str], + cwd: Path, + *, + timeout: int | None = None, + project_root: Path | None = None, + line_profile_output_file: Path | None = None, +) -> tuple[Path, subprocess.CompletedProcess]: + """Run Mocha tests for line profiling. + + Args: + test_paths: TestFiles object containing test file information. + test_env: Environment variables for the test run. + cwd: Working directory for running tests. + timeout: Optional timeout in seconds for the subprocess. + project_root: Mocha project root. + line_profile_output_file: Path where line profile results will be written. + + Returns: + Tuple of (result_file_path, subprocess_result). + + """ + result_file_path = get_run_tmp_file(Path("mocha_line_profile_results.xml")) + + test_files = [] + for file in test_paths.test_files: + if file.instrumented_behavior_file_path: + test_files.append(Path(file.instrumented_behavior_file_path)) + elif file.benchmarking_file_path: + test_files.append(Path(file.benchmarking_file_path)) + + if project_root is None and test_files: + project_root = _find_mocha_project_root(test_files[0]) + + effective_cwd = project_root if project_root else cwd + logger.debug(f"Mocha line profiling working directory: {effective_cwd}") + + _ensure_runtime_files(effective_cwd) + + mocha_cmd = _build_mocha_line_profile_command(test_files=test_files, timeout=timeout, project_root=effective_cwd) + + mocha_env = test_env.copy() + codeflash_sqlite_file = get_run_tmp_file(Path("test_return_values_line_profile.sqlite")) + mocha_env["CODEFLASH_OUTPUT_FILE"] = str(codeflash_sqlite_file) + mocha_env["CODEFLASH_TEST_ITERATION"] = "0" + mocha_env["CODEFLASH_LOOP_INDEX"] = "1" + mocha_env["CODEFLASH_MODE"] = "line_profile" + mocha_env["CODEFLASH_RANDOM_SEED"] = "42" + + if line_profile_output_file: + mocha_env["CODEFLASH_LINE_PROFILE_OUTPUT"] = str(line_profile_output_file) + + subprocess_timeout = max(120, (timeout or 60) * 10) + + logger.debug(f"Running Mocha line profile tests: {' '.join(mocha_cmd)}") + + start_time_ns = time.perf_counter_ns() + try: + result = _run_mocha_and_convert( + mocha_cmd=mocha_cmd, + mocha_env=mocha_env, + effective_cwd=effective_cwd, + result_file_path=result_file_path, + subprocess_timeout=subprocess_timeout, + label="line_profile", + ) + finally: + wall_clock_ns = time.perf_counter_ns() - start_time_ns + logger.debug(f"Mocha line profile tests completed in {wall_clock_ns / 1e9:.2f}s") + + return result_file_path, result diff --git a/codeflash/languages/javascript/normalizer.py b/codeflash/languages/javascript/normalizer.py new file mode 100644 index 000000000..39ae952cb --- /dev/null +++ b/codeflash/languages/javascript/normalizer.py @@ -0,0 +1,257 @@ +"""JavaScript/TypeScript code normalizer using tree-sitter. + +Not currently wired into JavaScriptSupport.normalize_code — kept as a +ready-to-use upgrade path when AST-based JS deduplication is needed. + +The old CodeNormalizer ABC (deleted from base.py) is preserved below for reference. +""" + +from __future__ import annotations + +import re +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from tree_sitter import Node + + +# --------------------------------------------------------------------------- +# Reference: the old CodeNormalizer ABC that was deleted from base.py. +# Kept here so the interface contract is visible if we re-introduce a +# normalizer hierarchy later. +# --------------------------------------------------------------------------- +class CodeNormalizer(ABC): + @property + @abstractmethod + def language(self) -> str: ... + + @abstractmethod + def normalize(self, code: str) -> str: ... + + @abstractmethod + def normalize_for_hash(self, code: str) -> str: ... + + def are_duplicates(self, code1: str, code2: str) -> bool: + try: + return self.normalize_for_hash(code1) == self.normalize_for_hash(code2) + except Exception: + return False + + def get_fingerprint(self, code: str) -> str: + import hashlib + + return hashlib.sha256(self.normalize_for_hash(code).encode()).hexdigest() + + +# --------------------------------------------------------------------------- + + +class JavaScriptVariableNormalizer: + """Normalizes JavaScript/TypeScript code for duplicate detection using tree-sitter. + + Normalizes local variable names while preserving function names, class names, + parameters, and imported names. + """ + + def __init__(self) -> None: + self.var_counter = 0 + self.var_mapping: dict[str, str] = {} + self.preserved_names: set[str] = set() + # Common JavaScript builtins + self.builtins = { + "console", + "window", + "document", + "Math", + "JSON", + "Object", + "Array", + "String", + "Number", + "Boolean", + "Date", + "RegExp", + "Error", + "Promise", + "Map", + "Set", + "WeakMap", + "WeakSet", + "Symbol", + "Proxy", + "Reflect", + "undefined", + "null", + "NaN", + "Infinity", + "globalThis", + "parseInt", + "parseFloat", + "isNaN", + "isFinite", + "eval", + "setTimeout", + "setInterval", + "clearTimeout", + "clearInterval", + "fetch", + "require", + "module", + "exports", + "process", + "__dirname", + "__filename", + "Buffer", + } + + def get_normalized_name(self, name: str) -> str: + """Get or create normalized name for a variable.""" + if name in self.builtins or name in self.preserved_names: + return name + if name not in self.var_mapping: + self.var_mapping[name] = f"var_{self.var_counter}" + self.var_counter += 1 + return self.var_mapping[name] + + def collect_preserved_names(self, node: Node, source_code: bytes) -> None: + """Collect names that should be preserved (function names, class names, imports, params).""" + # Function declarations and expressions - preserve the function name + if node.type in ("function_declaration", "function_expression", "method_definition", "arrow_function"): + name_node = node.child_by_field_name("name") + if name_node: + self.preserved_names.add(source_code[name_node.start_byte : name_node.end_byte].decode("utf-8")) + # Preserve parameters + params_node = node.child_by_field_name("parameters") or node.child_by_field_name("parameter") + if params_node: + self._collect_parameter_names(params_node, source_code) + + # Class declarations + elif node.type == "class_declaration": + name_node = node.child_by_field_name("name") + if name_node: + self.preserved_names.add(source_code[name_node.start_byte : name_node.end_byte].decode("utf-8")) + + # Import declarations + elif node.type in ("import_statement", "import_declaration"): + for child in node.children: + if child.type == "import_clause": + self._collect_import_names(child, source_code) + elif child.type == "identifier": + self.preserved_names.add(source_code[child.start_byte : child.end_byte].decode("utf-8")) + + # Recurse + for child in node.children: + self.collect_preserved_names(child, source_code) + + def _collect_parameter_names(self, node: Node, source_code: bytes) -> None: + """Collect parameter names from a parameters node.""" + for child in node.children: + if child.type == "identifier": + self.preserved_names.add(source_code[child.start_byte : child.end_byte].decode("utf-8")) + elif child.type in ("required_parameter", "optional_parameter", "rest_parameter"): + pattern_node = child.child_by_field_name("pattern") + if pattern_node and pattern_node.type == "identifier": + self.preserved_names.add( + source_code[pattern_node.start_byte : pattern_node.end_byte].decode("utf-8") + ) + # Recurse for nested patterns + self._collect_parameter_names(child, source_code) + + def _collect_import_names(self, node: Node, source_code: bytes) -> None: + """Collect imported names from import clause.""" + for child in node.children: + if child.type == "identifier": + self.preserved_names.add(source_code[child.start_byte : child.end_byte].decode("utf-8")) + elif child.type == "import_specifier": + # Get the local name (alias or original) + alias_node = child.child_by_field_name("alias") + name_node = child.child_by_field_name("name") + if alias_node: + self.preserved_names.add(source_code[alias_node.start_byte : alias_node.end_byte].decode("utf-8")) + elif name_node: + self.preserved_names.add(source_code[name_node.start_byte : name_node.end_byte].decode("utf-8")) + self._collect_import_names(child, source_code) + + def normalize_tree(self, node: Node, source_code: bytes) -> str: + """Normalize the AST tree to a string representation for comparison.""" + parts: list[str] = [] + self._normalize_node(node, source_code, parts) + return " ".join(parts) + + def _normalize_node(self, node: Node, source_code: bytes, parts: list[str]) -> None: + """Recursively normalize a node.""" + # Skip comments + if node.type in ("comment", "line_comment", "block_comment"): + return + + # Handle identifiers - normalize variable names + if node.type == "identifier": + name = source_code[node.start_byte : node.end_byte].decode("utf-8") + normalized = self.get_normalized_name(name) + parts.append(normalized) + return + + # Handle type identifiers (TypeScript) - preserve as-is + if node.type == "type_identifier": + parts.append(source_code[node.start_byte : node.end_byte].decode("utf-8")) + return + + # Handle string literals - normalize to placeholder + if node.type in ("string", "template_string", "string_fragment"): + parts.append('"STR"') + return + + # Handle number literals - normalize to placeholder + if node.type == "number": + parts.append("NUM") + return + + # For leaf nodes, output the node type + if len(node.children) == 0: + text = source_code[node.start_byte : node.end_byte].decode("utf-8") + parts.append(text) + return + + # Output node type for structure + parts.append(f"({node.type}") + + # Recurse into children + for child in node.children: + self._normalize_node(child, source_code, parts) + + parts.append(")") + + +def _basic_normalize_js(code: str) -> str: + """Basic normalization: remove comments and normalize whitespace.""" + code = re.sub(r"//.*$", "", code, flags=re.MULTILINE) + code = re.sub(r"/\*.*?\*/", "", code, flags=re.DOTALL) + return " ".join(code.split()) + + +def normalize_js_code(code: str, typescript: bool = False) -> str: + """Normalize JavaScript/TypeScript code to a canonical form for comparison. + + Uses tree-sitter to parse and normalize variable names. Falls back to + basic comment/whitespace stripping if tree-sitter is unavailable or parsing fails. + + Not currently wired into JavaScriptSupport.normalize_code — kept as a + ready-to-use upgrade path when AST-based JS deduplication is needed. + """ + try: + from codeflash.languages.javascript.treesitter import TreeSitterAnalyzer, TreeSitterLanguage + + lang = TreeSitterLanguage.TYPESCRIPT if typescript else TreeSitterLanguage.JAVASCRIPT + analyzer = TreeSitterAnalyzer(lang) + tree = analyzer.parse(code) + + if tree.root_node.has_error: + return _basic_normalize_js(code) + + normalizer = JavaScriptVariableNormalizer() + source_bytes = code.encode("utf-8") + normalizer.collect_preserved_names(tree.root_node, source_bytes) + return normalizer.normalize_tree(tree.root_node, source_bytes) + except Exception: + return _basic_normalize_js(code) diff --git a/codeflash/languages/javascript/optimizer.py b/codeflash/languages/javascript/optimizer.py new file mode 100644 index 000000000..bc88786b1 --- /dev/null +++ b/codeflash/languages/javascript/optimizer.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from codeflash.cli_cmds.console import logger +from codeflash.models.models import ValidCode + +if TYPE_CHECKING: + from pathlib import Path + + from codeflash.verification.verification_utils import TestConfig + + +def prepare_javascript_module( + original_module_code: str, original_module_path: Path +) -> tuple[dict[Path, ValidCode], None]: + """Prepare a JavaScript/TypeScript module for optimization. + + Unlike Python, JS/TS doesn't need AST parsing or import analysis at this stage. + Returns a mapping of the file path to ValidCode with the source as-is. + """ + validated_original_code: dict[Path, ValidCode] = { + original_module_path: ValidCode(source_code=original_module_code, normalized_code=original_module_code) + } + return validated_original_code, None + + +def verify_js_requirements(test_cfg: TestConfig) -> None: + """Verify JavaScript/TypeScript requirements before optimization. + + Checks that Node.js, npm, and the test framework are available. + Logs warnings if requirements are not met but does not abort. + """ + from codeflash.languages import get_language_support + from codeflash.languages.base import Language + from codeflash.languages.test_framework import get_js_test_framework_or_default + + js_project_root = test_cfg.js_project_root + if not js_project_root: + return + + try: + js_support = get_language_support(Language.JAVASCRIPT) + test_framework = get_js_test_framework_or_default() + success, errors = js_support.verify_requirements(js_project_root, test_framework) + + if not success: + logger.warning("JavaScript requirements check found issues:") + for error in errors: + logger.warning(f" - {error}") + except Exception as e: + logger.debug(f"Failed to verify JS requirements: {e}") diff --git a/codeflash/languages/javascript/support.py b/codeflash/languages/javascript/support.py index 51526f94e..b18a2c7a5 100644 --- a/codeflash/languages/javascript/support.py +++ b/codeflash/languages/javascript/support.py @@ -23,7 +23,8 @@ from codeflash.languages.base import ReferenceInfo from codeflash.languages.javascript.treesitter import TypeDefinition - from codeflash.models.models import GeneratedTestsList, InvocationId + from codeflash.models.models import GeneratedTestsList, InvocationId, ValidCode + from codeflash.verification.verification_utils import TestConfig logger = logging.getLogger(__name__) @@ -50,8 +51,7 @@ def file_extensions(self) -> tuple[str, ...]: @property def default_file_extension(self) -> str: - """Default file extension for JavaScript.""" - return ".js" + return self.file_extensions[0] @property def test_framework(self) -> str: @@ -68,17 +68,62 @@ def comment_prefix(self) -> str: def dir_excludes(self) -> frozenset[str]: return frozenset({"node_modules", "dist", "build", ".next", ".nuxt", "coverage", ".cache", ".turbo", ".vercel"}) + @property + def default_language_version(self) -> str | None: + return "ES2022" + + @property + def valid_test_frameworks(self) -> tuple[str, ...]: + return ("jest", "mocha", "vitest") + + @property + def test_result_serialization_format(self) -> str: + return "json" + + def parse_test_xml( + self, test_xml_file_path: Path, test_files: Any, test_config: Any, run_result: Any = None + ) -> Any: + from codeflash.languages.javascript.parse import parse_jest_test_xml + from codeflash.verification.parse_test_output import parse_func, resolve_test_file_from_class_path + + return parse_jest_test_xml( + test_xml_file_path, + test_files, + test_config, + run_result, + parse_func=parse_func, + resolve_test_file_from_class_path=resolve_test_file_from_class_path, + ) + + def load_coverage( + self, + coverage_database_file: Path, + function_name: str, + code_context: Any, + source_file: Path, + coverage_config_file: Path | None = None, + ) -> Any: + from codeflash.verification.coverage_utils import JestCoverageUtils + + return JestCoverageUtils.load_from_jest_json( + coverage_json_path=coverage_database_file, + function_name=function_name, + code_context=code_context, + source_code_path=source_file, + ) + # === Discovery === def discover_functions( - self, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None + self, source: str, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None ) -> list[FunctionToOptimize]: - """Find all optimizable functions in a JavaScript file. + """Find all optimizable functions in JavaScript/TypeScript source code. - Uses tree-sitter to parse the file and find functions. + Uses tree-sitter to parse the source and find functions. Args: - file_path: Path to the JavaScript file to analyze. + source: Source code to analyze. + file_path: Path to the source file (used for language detection). filter_criteria: Optional criteria to filter functions. Returns: @@ -87,12 +132,6 @@ def discover_functions( """ criteria = filter_criteria or FunctionFilterCriteria() - try: - source = file_path.read_text(encoding="utf-8") - except Exception as e: - logger.warning("Failed to read %s: %s", file_path, e) - return [] - try: analyzer = get_analyzer_for_file(file_path) tree_functions = analyzer.find_functions( @@ -111,7 +150,7 @@ def discover_functions( # Skip non-exported functions (can't be imported in tests) # Exception: nested functions and methods are allowed if their parent is exported - if not func.is_exported and not func.parent_function: + if criteria.require_export and not func.is_exported and not func.parent_function: logger.debug(f"Skipping non-exported function: {func.name}") # noqa: G004 continue @@ -144,61 +183,6 @@ def discover_functions( logger.warning("Failed to parse %s: %s", file_path, e) return [] - def discover_functions_from_source(self, source: str, file_path: Path | None = None) -> list[FunctionToOptimize]: - """Find all functions in source code string. - - Uses tree-sitter to parse the source and find functions. - - Args: - source: The source code to analyze. - file_path: Optional file path for context (used for language detection). - - Returns: - List of FunctionToOptimize objects for discovered functions. - - """ - try: - # Use JavaScript analyzer by default, or detect from file path - if file_path: - analyzer = get_analyzer_for_file(file_path) - else: - analyzer = TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT) - - tree_functions = analyzer.find_functions( - source, include_methods=True, include_arrow_functions=True, require_name=True - ) - - functions: list[FunctionToOptimize] = [] - for func in tree_functions: - # Build parents list - parents: list[FunctionParent] = [] - if func.class_name: - parents.append(FunctionParent(name=func.class_name, type="ClassDef")) - if func.parent_function: - parents.append(FunctionParent(name=func.parent_function, type="FunctionDef")) - - functions.append( - FunctionToOptimize( - function_name=func.name, - file_path=file_path or Path("unknown"), - parents=parents, - starting_line=func.start_line, - ending_line=func.end_line, - starting_col=func.start_col, - ending_col=func.end_col, - is_async=func.is_async, - is_method=func.is_method, - language=str(self.language), - doc_start_line=func.doc_start_line, - ) - ) - - return functions - - except Exception as e: - logger.warning("Failed to parse source: %s", e) - return [] - def _get_test_patterns(self) -> list[str]: """Get test file patterns for this language. @@ -1508,7 +1492,7 @@ def _replace_function_text_based( return "".join(result_lines) def format_code(self, source: str, file_path: Path | None = None) -> str: - """Format JavaScript code using prettier (if available). + """Format JavaScript/TypeScript code using prettier (if available). Args: source: Source code to format. @@ -1519,9 +1503,10 @@ def format_code(self, source: str, file_path: Path | None = None) -> str: """ try: - # Try to use prettier via npx + stdin_filepath = str(file_path.name) if file_path else f"file{self.default_file_extension}" + result = subprocess.run( - ["npx", "prettier", "--stdin-filepath", "file.js"], + ["npx", "prettier", "--stdin-filepath", stdin_filepath], check=False, input=source, capture_output=True, @@ -1702,22 +1687,15 @@ def instrument_for_benchmarking(self, test_source: str, target_function: Functio # === Validation === - def validate_syntax(self, source: str) -> bool: - """Check if JavaScript source code is syntactically valid. - - Uses tree-sitter to parse and check for errors. - - Args: - source: Source code to validate. - - Returns: - True if valid, False otherwise. + @property + def treesitter_language(self) -> TreeSitterLanguage: + return TreeSitterLanguage.JAVASCRIPT - """ + def validate_syntax(self, source: str) -> bool: + """Check if source code is syntactically valid using tree-sitter.""" try: - analyzer = TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT) + analyzer = TreeSitterAnalyzer(self.treesitter_language) tree = analyzer.parse(source) - # Check if tree has errors return not tree.root_node.has_error except Exception: return False @@ -1744,6 +1722,11 @@ def normalize_code(self, source: str) -> str: normalized_lines.append(stripped) return "\n".join(normalized_lines) + def generate_concolic_tests( + self, test_cfg: Any, project_root: Any, function_to_optimize: Any, function_to_optimize_ast: Any + ) -> tuple[dict, str]: + return {}, "" + # === Test Editing === def add_runtime_comments( @@ -1909,6 +1892,92 @@ def compare_test_results( return compare_test_results(original_results_path, candidate_results_path, project_root=project_root) + @property + def function_optimizer_class(self) -> type: + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer + + return JavaScriptFunctionOptimizer + + def prepare_module( + self, module_code: str, module_path: Path, project_root: Path + ) -> tuple[dict[Path, ValidCode], None]: + from codeflash.languages.javascript.optimizer import prepare_javascript_module + + return prepare_javascript_module(module_code, module_path) + + def setup_test_config(self, test_cfg: TestConfig, file_path: Path) -> None: + from codeflash.languages.javascript.optimizer import verify_js_requirements + from codeflash.languages.javascript.test_runner import find_node_project_root + + test_cfg.js_project_root = find_node_project_root(file_path) + verify_js_requirements(test_cfg) + + def adjust_test_config_for_discovery(self, test_cfg: TestConfig) -> None: + test_cfg.tests_project_rootdir = test_cfg.tests_root + + def detect_module_system(self, project_root: Path, source_file: Path) -> str | None: + from codeflash.languages.javascript.module_system import detect_module_system + + return detect_module_system(project_root, source_file) + + def process_generated_test_strings( + self, + generated_test_source: str, + instrumented_behavior_test_source: str, + instrumented_perf_test_source: str, + function_to_optimize: Any, + test_path: Path, + test_cfg: Any, + project_module_system: str | None, + ) -> tuple[str, str, str]: + from codeflash.languages.javascript.instrument import ( + TestingMode, + fix_imports_inside_test_blocks, + fix_jest_mock_paths, + instrument_generated_js_test, + validate_and_fix_import_style, + ) + from codeflash.languages.javascript.module_system import ( + ensure_module_system_compatibility, + ensure_vitest_imports, + ) + + source_file = Path(function_to_optimize.file_path) + + # Fix import statements that appear inside test blocks (invalid JS syntax) + generated_test_source = fix_imports_inside_test_blocks(generated_test_source) + + # Fix relative paths in jest.mock() calls + generated_test_source = fix_jest_mock_paths( + generated_test_source, test_path, source_file, test_cfg.tests_project_rootdir + ) + + # Validate and fix import styles (default vs named exports) + generated_test_source = validate_and_fix_import_style( + generated_test_source, source_file, function_to_optimize.function_name + ) + + # Convert module system if needed (e.g., CommonJS -> ESM for ESM projects) + generated_test_source = ensure_module_system_compatibility( + generated_test_source, project_module_system, test_cfg.tests_project_rootdir + ) + + # Ensure vitest imports are present when using vitest framework + generated_test_source = ensure_vitest_imports(generated_test_source, test_cfg.test_framework) + + # Instrument for behavior verification (writes to SQLite) + instrumented_behavior_test_source = instrument_generated_js_test( + test_code=generated_test_source, function_to_optimize=function_to_optimize, mode=TestingMode.BEHAVIOR + ) + + # Instrument for performance measurement (prints to stdout) + instrumented_perf_test_source = instrument_generated_js_test( + test_code=generated_test_source, function_to_optimize=function_to_optimize, mode=TestingMode.PERFORMANCE + ) + + logger.debug("Instrumented JS/TS tests locally for %s", function_to_optimize.function_name) + return generated_test_source, instrumented_behavior_test_source, instrumented_perf_test_source + # === Configuration === def get_test_file_suffix(self) -> str: @@ -1920,6 +1989,73 @@ def get_test_file_suffix(self) -> str: """ return ".test.js" + def get_test_dir_for_source(self, test_dir: Path, source_file: Path | None) -> Path | None: + """Find the appropriate test directory for a JavaScript/TypeScript package. + + For monorepos, this finds the package's test directory from the source file path. + For example: packages/workflow/src/utils.ts -> packages/workflow/test/codeflash-generated/ + + Args: + test_dir: The root tests directory (may be monorepo packages root). + source_file: Path to the source file being tested. + + Returns: + The test directory path, or None if not found. + + """ + if source_file is None: + # No source path provided, check if test_dir itself has a test subdirectory + for test_subdir_name in ["test", "tests", "__tests__", "src/__tests__"]: + test_subdir = test_dir / test_subdir_name + if test_subdir.is_dir(): + codeflash_test_dir = test_subdir / "codeflash-generated" + codeflash_test_dir.mkdir(parents=True, exist_ok=True) + return codeflash_test_dir + return None + + try: + # Resolve paths for reliable comparison + tests_root = test_dir.resolve() + source_path = Path(source_file).resolve() + + # Walk up from the source file to find a directory with package.json or test/ folder + package_dir = None + + for parent in source_path.parents: + # Stop if we've gone above or reached the tests_root level + # For monorepos, tests_root might be /packages/ and we want to search within packages + if parent in (tests_root, tests_root.parent): + break + + # Check if this looks like a package root + has_package_json = (parent / "package.json").exists() + has_test_dir = any((parent / d).is_dir() for d in ["test", "tests", "__tests__"]) + + if has_package_json or has_test_dir: + package_dir = parent + break + + if package_dir: + # Find the test directory in this package + for test_subdir_name in ["test", "tests", "__tests__", "src/__tests__"]: + test_subdir = package_dir / test_subdir_name + if test_subdir.is_dir(): + codeflash_test_dir = test_subdir / "codeflash-generated" + codeflash_test_dir.mkdir(parents=True, exist_ok=True) + return codeflash_test_dir + + return None + except Exception: + return None + + def resolve_test_file_from_class_path(self, test_class_path: str, base_dir: Path) -> Path | None: + return None + + def resolve_test_module_path_for_pr( + self, test_module_path: str, tests_project_rootdir: Path, non_generated_tests: set[Path] + ) -> Path | None: + return None + def find_test_root(self, project_root: Path) -> Path | None: """Find the test root directory for a JavaScript project. @@ -2133,11 +2269,12 @@ def instrument_existing_test( for behavioral verification and performance benchmarking. Args: - test_path: Path to the test file. + test_string: The test source code string. call_positions: List of code positions where the function is called. function_to_optimize: The function being optimized. tests_project_root: Root directory of tests. mode: Testing mode - "behavior" or "performance". + test_path: Path to the test file. Returns: Tuple of (success, instrumented_code). @@ -2264,6 +2401,23 @@ def run_behavioral_tests( candidate_index=candidate_index, ) + if framework == "mocha": + from codeflash.languages.javascript.mocha_runner import run_mocha_behavioral_tests + + return run_mocha_behavioral_tests( + test_paths=test_paths, + test_env=test_env, + cwd=cwd, + timeout=timeout, + project_root=project_root, + enable_coverage=enable_coverage, + candidate_index=candidate_index, + ) + + if framework not in ("jest", "vitest", "mocha"): + msg = f"Test framework '{framework}' is not yet supported. Supported frameworks: jest, vitest, mocha." + raise NotImplementedError(msg) + from codeflash.languages.javascript.test_runner import run_jest_behavioral_tests return run_jest_behavioral_tests( @@ -2332,6 +2486,25 @@ def run_benchmarking_tests( target_duration_ms=int(target_duration_seconds * 1000), ) + if framework == "mocha": + from codeflash.languages.javascript.mocha_runner import run_mocha_benchmarking_tests + + logger.debug("Dispatching to run_mocha_benchmarking_tests") + return run_mocha_benchmarking_tests( + test_paths=test_paths, + test_env=test_env, + cwd=cwd, + timeout=timeout, + project_root=project_root, + min_loops=min_loops, + max_loops=effective_max_loops, + target_duration_ms=int(target_duration_seconds * 1000), + ) + + if framework not in ("jest", "vitest", "mocha"): + msg = f"Test framework '{framework}' is not yet supported. Supported frameworks: jest, vitest, mocha." + raise NotImplementedError(msg) + from codeflash.languages.javascript.test_runner import run_jest_benchmarking_tests return run_jest_benchmarking_tests( @@ -2386,6 +2559,22 @@ def run_line_profile_tests( line_profile_output_file=line_profile_output_file, ) + if framework == "mocha": + from codeflash.languages.javascript.mocha_runner import run_mocha_line_profile_tests + + return run_mocha_line_profile_tests( + test_paths=test_paths, + test_env=test_env, + cwd=cwd, + timeout=timeout, + project_root=project_root, + line_profile_output_file=line_profile_output_file, + ) + + if framework not in ("jest", "vitest", "mocha"): + msg = f"Test framework '{framework}' is not yet supported. Supported frameworks: jest, vitest, mocha." + raise NotImplementedError(msg) + from codeflash.languages.javascript.test_runner import run_jest_line_profile_tests return run_jest_line_profile_tests( @@ -2438,62 +2627,9 @@ def _get_test_patterns(self) -> list[str]: ] def get_test_file_suffix(self) -> str: - """Get the test file suffix for TypeScript. - - Returns: - Jest test file suffix for TypeScript. - - """ + """Get the test file suffix for TypeScript.""" return ".test.ts" - def validate_syntax(self, source: str) -> bool: - """Check if TypeScript source code is syntactically valid. - - Uses tree-sitter TypeScript parser to parse and check for errors. - - Args: - source: Source code to validate. - - Returns: - True if valid, False otherwise. - - """ - try: - analyzer = TreeSitterAnalyzer(TreeSitterLanguage.TYPESCRIPT) - tree = analyzer.parse(source) - return not tree.root_node.has_error - except Exception: - return False - - def format_code(self, source: str, file_path: Path | None = None) -> str: - """Format TypeScript code using prettier (if available). - - Args: - source: Source code to format. - file_path: Optional file path for context. - - Returns: - Formatted source code. - - """ - try: - # Determine file extension for prettier - stdin_filepath = str(file_path.name) if file_path else "file.ts" - - # Try to use prettier via npx - result = subprocess.run( - ["npx", "prettier", "--stdin-filepath", stdin_filepath], - check=False, - input=source, - capture_output=True, - text=True, - timeout=30, - ) - if result.returncode == 0: - return result.stdout - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - except Exception as e: - logger.debug("Prettier formatting failed: %s", e) - - return source + @property + def treesitter_language(self) -> TreeSitterLanguage: + return TreeSitterLanguage.TYPESCRIPT diff --git a/codeflash/languages/javascript/test_runner.py b/codeflash/languages/javascript/test_runner.py index 3a193602b..f60232ab6 100644 --- a/codeflash/languages/javascript/test_runner.py +++ b/codeflash/languages/javascript/test_runner.py @@ -42,6 +42,12 @@ def clear_created_config_files() -> None: _created_config_files.clear() +# The bundled JUnit reporter path, resolved as "codeflash/jest-reporter" +# This is shipped inside the codeflash npm runtime package, so it's always +# available when the codeflash runtime is installed (which is already required). +CODEFLASH_JEST_REPORTER = "codeflash/jest-reporter" + + def _detect_bundler_module_resolution(project_root: Path) -> bool: """Detect if the project uses moduleResolution: 'bundler' in tsconfig. @@ -363,7 +369,7 @@ def _get_jest_config_for_project(project_root: Path) -> Path | None: return original_jest_config -def _find_node_project_root(file_path: Path) -> Path | None: +def find_node_project_root(file_path: Path) -> Path | None: """Find the Node.js project root by looking for package.json. Traverses up from the given file path to find the nearest directory @@ -680,7 +686,7 @@ def run_jest_behavioral_tests( # Use provided project_root, or detect it as fallback if project_root is None and test_files: first_test_file = Path(test_files[0]) - project_root = _find_node_project_root(first_test_file) + project_root = find_node_project_root(first_test_file) # Use the project root, or fall back to provided cwd effective_cwd = project_root if project_root else cwd @@ -698,7 +704,7 @@ def run_jest_behavioral_tests( "npx", "jest", "--reporters=default", - "--reporters=jest-junit", + f"--reporters={CODEFLASH_JEST_REPORTER}", "--runInBand", # Run tests serially for consistent timing "--forceExit", ] @@ -732,7 +738,7 @@ def run_jest_behavioral_tests( jest_env["JEST_JUNIT_OUTPUT_FILE"] = str(result_file_path) jest_env["JEST_JUNIT_OUTPUT_DIR"] = str(result_file_path.parent) jest_env["JEST_JUNIT_OUTPUT_NAME"] = result_file_path.name - # Configure jest-junit to use filepath-based classnames for proper parsing + # Configure codeflash jest-reporter to use filepath-based classnames for proper parsing jest_env["JEST_JUNIT_CLASSNAME"] = "{filepath}" jest_env["JEST_JUNIT_SUITE_NAME"] = "{filepath}" jest_env["JEST_JUNIT_ADD_FILE_ATTRIBUTE"] = "true" @@ -797,7 +803,7 @@ def run_jest_behavioral_tests( except FileNotFoundError: logger.error("Jest not found. Make sure Jest is installed (npm install jest)") result = subprocess.CompletedProcess( - args=jest_cmd, returncode=-1, stdout="", stderr="Jest not found. Run: npm install jest jest-junit" + args=jest_cmd, returncode=-1, stdout="", stderr="Jest not found. Run: npm install jest" ) finally: wall_clock_ns = time.perf_counter_ns() - start_time_ns @@ -930,7 +936,7 @@ def run_jest_benchmarking_tests( # Use provided project_root, or detect it as fallback if project_root is None and test_files: first_test_file = Path(test_files[0]) - project_root = _find_node_project_root(first_test_file) + project_root = find_node_project_root(first_test_file) effective_cwd = project_root if project_root else cwd @@ -947,7 +953,7 @@ def run_jest_benchmarking_tests( "npx", "jest", "--reporters=default", - "--reporters=jest-junit", + f"--reporters={CODEFLASH_JEST_REPORTER}", "--runInBand", # Ensure serial execution "--forceExit", "--runner=codeflash/loop-runner", # Use custom loop runner for in-process looping @@ -1100,7 +1106,7 @@ def run_jest_line_profile_tests( # Use provided project_root, or detect it as fallback if project_root is None and test_files: first_test_file = Path(test_files[0]) - project_root = _find_node_project_root(first_test_file) + project_root = find_node_project_root(first_test_file) effective_cwd = project_root if project_root else cwd logger.debug(f"Jest line profiling working directory: {effective_cwd}") @@ -1113,7 +1119,7 @@ def run_jest_line_profile_tests( "npx", "jest", "--reporters=default", - "--reporters=jest-junit", + f"--reporters={CODEFLASH_JEST_REPORTER}", "--runInBand", # Run tests serially for consistent line profiling "--forceExit", ] diff --git a/codeflash/languages/javascript/treesitter.py b/codeflash/languages/javascript/treesitter.py index c00cb228e..f3ba0453f 100644 --- a/codeflash/languages/javascript/treesitter.py +++ b/codeflash/languages/javascript/treesitter.py @@ -208,6 +208,22 @@ def find_functions( current_function=None, ) + # Post-process: upgrade is_exported for functions referenced in named export clauses + # e.g., const joinBy = () => {}; export { joinBy }; + exports = self.find_exports(source) + exported_names: set[str] = set() + for export in exports: + for name, _ in export.exported_names: + exported_names.add(name) + if export.default_export: + exported_names.add(export.default_export) + if export.wrapped_default_args: + exported_names.update(export.wrapped_default_args) + + for func in functions: + if not func.is_exported and func.name in exported_names: + func.is_exported = True + return functions def _walk_tree_for_functions( @@ -505,7 +521,11 @@ def _check_commonjs_assignment_exports(self, node: Node, name: str, source_bytes # Check module.exports = name (single export) if left_text == "module.exports" and right_node.type == "identifier": - if self.get_node_text(right_node, source_bytes) == name: + exported_var = self.get_node_text(right_node, source_bytes) + if exported_var == name: + return True + # module.exports = varName → check if name is a property of varName's object + if self._is_name_property_of_variable(node, exported_var, name, source_bytes): return True # Check module.exports.name = ... or exports.name = ... @@ -514,6 +534,85 @@ def _check_commonjs_assignment_exports(self, node: Node, name: str, source_bytes return False + def _resolve_variable_object_properties( + self, node: Node, var_name: str, source_bytes: bytes + ) -> list[tuple[str, str | None]]: + """Resolve a variable name to its object literal and return property names. + + For `const utils = { match() {}, foo: bar }`, returns [("match", None), ("foo", None)]. + """ + root = node + while root.parent: + root = root.parent + + properties: list[tuple[str, str | None]] = [] + for child in root.children: + if child.type in ("lexical_declaration", "variable_declaration"): + for decl in child.children: + if decl.type == "variable_declarator": + name_node = decl.child_by_field_name("name") + value_node = decl.child_by_field_name("value") + if ( + name_node + and self.get_node_text(name_node, source_bytes) == var_name + and value_node + and value_node.type == "object" + ): + for obj_child in value_node.children: + if obj_child.type == "method_definition": + method_name_node = obj_child.child_by_field_name("name") + if method_name_node: + properties.append((self.get_node_text(method_name_node, source_bytes), None)) + elif obj_child.type == "shorthand_property_identifier": + properties.append((self.get_node_text(obj_child, source_bytes), None)) + elif obj_child.type == "pair": + key_node = obj_child.child_by_field_name("key") + if key_node: + properties.append((self.get_node_text(key_node, source_bytes), None)) + return properties + + def _is_name_property_of_variable(self, node: Node, var_name: str, prop_name: str, source_bytes: bytes) -> bool: + """Check if prop_name is a property/method of the object assigned to var_name. + + Resolves patterns like: + const utils = { match() {}, foo: bar }; + module.exports = utils; + → checks if prop_name is a key of the object literal assigned to var_name. + """ + root = node + while root.parent: + root = root.parent + + for child in root.children: + # Look for: const/let/var varName = { ... } + if child.type in ("lexical_declaration", "variable_declaration"): + for decl in child.children: + if decl.type == "variable_declarator": + name_node = decl.child_by_field_name("name") + value_node = decl.child_by_field_name("value") + if ( + name_node + and self.get_node_text(name_node, source_bytes) == var_name + and value_node + and value_node.type == "object" + ): + for obj_child in value_node.children: + if obj_child.type == "method_definition": + method_name_node = obj_child.child_by_field_name("name") + if ( + method_name_node + and self.get_node_text(method_name_node, source_bytes) == prop_name + ): + return True + elif obj_child.type == "shorthand_property_identifier": + if self.get_node_text(obj_child, source_bytes) == prop_name: + return True + elif obj_child.type == "pair": + key_node = obj_child.child_by_field_name("key") + if key_node and self.get_node_text(key_node, source_bytes) == prop_name: + return True + return False + def _find_preceding_jsdoc(self, node: Node, source_bytes: bytes) -> int | None: """Find JSDoc comment immediately preceding a function node. @@ -1005,8 +1104,12 @@ def _extract_commonjs_export(self, node: Node, source_bytes: bytes) -> ExportInf name_node = right_node.child_by_field_name("name") default_export = self.get_node_text(name_node, source_bytes) if name_node else "default" elif right_node.type == "identifier": - # module.exports = someFunction - default_export = self.get_node_text(right_node, source_bytes) + # module.exports = someFunction or module.exports = someObject + var_name = self.get_node_text(right_node, source_bytes) + default_export = var_name + # Resolve variable to object literal and add its properties as exports + obj_props = self._resolve_variable_object_properties(node, var_name, source_bytes) + exported_names.extend(obj_props) elif right_node.type == "object": # module.exports = { foo, bar, baz: qux } for child in right_node.children: diff --git a/codeflash/languages/python/context/code_context_extractor.py b/codeflash/languages/python/context/code_context_extractor.py index 13a1f1884..33222862c 100644 --- a/codeflash/languages/python/context/code_context_extractor.py +++ b/codeflash/languages/python/context/code_context_extractor.py @@ -2,7 +2,8 @@ import ast import hashlib -from collections import defaultdict +import os +from collections import defaultdict, deque from itertools import chain from pathlib import Path from typing import TYPE_CHECKING @@ -11,11 +12,13 @@ from codeflash.cli_cmds.console import logger from codeflash.code_utils.code_utils import encoded_tokens_len, get_qualified_name, path_belongs_to_site_packages -from codeflash.code_utils.config_consts import OPTIMIZATION_CONTEXT_TOKEN_LIMIT, TESTGEN_CONTEXT_TOKEN_LIMIT +from codeflash.code_utils.config_consts import ( + OPTIMIZATION_CONTEXT_TOKEN_LIMIT, + READ_WRITABLE_LIMIT_ERROR, + TESTGEN_CONTEXT_TOKEN_LIMIT, + TESTGEN_LIMIT_ERROR, +) from codeflash.discovery.functions_to_optimize import FunctionToOptimize # noqa: TC001 - -# Language support imports for multi-language code context extraction -from codeflash.languages import Language, is_python from codeflash.languages.python.context.unused_definition_remover import ( collect_top_level_defs_with_usages, get_section_names, @@ -39,20 +42,9 @@ if TYPE_CHECKING: from jedi.api.classes import Name - from codeflash.languages.base import DependencyResolver, HelperFunction + from codeflash.languages.base import DependencyResolver from codeflash.languages.python.context.unused_definition_remover import UsageInfo -# Error message constants -READ_WRITABLE_LIMIT_ERROR = "Read-writable code has exceeded token limit, cannot proceed" -TESTGEN_LIMIT_ERROR = "Testgen code context has exceeded token limit, cannot proceed" - - -def safe_relative_to(path: Path, root: Path) -> Path: - try: - return path.resolve().relative_to(root.resolve()) - except ValueError: - return path - def build_testgen_context( helpers_of_fto_dict: dict[Path, set[FunctionSource]], @@ -61,6 +53,7 @@ def build_testgen_context( *, remove_docstrings: bool = False, include_enrichment: bool = True, + function_to_optimize: FunctionToOptimize | None = None, ) -> CodeStringsMarkdown: testgen_context = extract_code_markdown_context_from_files( helpers_of_fto_dict, @@ -75,6 +68,17 @@ def build_testgen_context( if enrichment.code_strings: testgen_context = CodeStringsMarkdown(code_strings=testgen_context.code_strings + enrichment.code_strings) + if function_to_optimize is not None: + result = _parse_and_collect_imports(testgen_context) + existing_classes = collect_existing_class_names(result[0]) if result else set() + constructor_stubs = extract_parameter_type_constructors( + function_to_optimize, project_root_path, existing_classes + ) + if constructor_stubs.code_strings: + testgen_context = CodeStringsMarkdown( + code_strings=testgen_context.code_strings + constructor_stubs.code_strings + ) + return testgen_context @@ -85,12 +89,6 @@ def get_code_optimization_context( testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT, call_graph: DependencyResolver | None = None, ) -> CodeOptimizationContext: - # Route to language-specific implementation for non-Python languages - if not is_python(): - return get_code_optimization_context_for_language( - function_to_optimize, project_root_path, optim_token_limit, testgen_token_limit - ) - # Get FunctionSource representation of helpers of FTO fto_input = {function_to_optimize.file_path: {function_to_optimize.qualified_name}} if call_graph is not None: @@ -167,12 +165,18 @@ def get_code_optimization_context( read_only_context_code = "" # Progressive fallback for testgen context token limits - testgen_context = build_testgen_context(helpers_of_fto_dict, helpers_of_helpers_dict, project_root_path) + testgen_context = build_testgen_context( + helpers_of_fto_dict, helpers_of_helpers_dict, project_root_path, function_to_optimize=function_to_optimize + ) if encoded_tokens_len(testgen_context.markdown) > testgen_token_limit: logger.debug("Testgen context exceeded token limit, removing docstrings") testgen_context = build_testgen_context( - helpers_of_fto_dict, helpers_of_helpers_dict, project_root_path, remove_docstrings=True + helpers_of_fto_dict, + helpers_of_helpers_dict, + project_root_path, + remove_docstrings=True, + function_to_optimize=function_to_optimize, ) if encoded_tokens_len(testgen_context.markdown) > testgen_token_limit: @@ -204,139 +208,6 @@ def get_code_optimization_context( ) -def get_code_optimization_context_for_language( - function_to_optimize: FunctionToOptimize, - project_root_path: Path, - optim_token_limit: int = OPTIMIZATION_CONTEXT_TOKEN_LIMIT, - testgen_token_limit: int = TESTGEN_CONTEXT_TOKEN_LIMIT, -) -> CodeOptimizationContext: - """Extract code optimization context for non-Python languages. - - Uses the language support abstraction to extract code context and converts - it to the CodeOptimizationContext format expected by the pipeline. - - This function supports multi-file context extraction, grouping helpers by file - and creating proper CodeStringsMarkdown with file paths for multi-file replacement. - - Args: - function_to_optimize: The function to extract context for. - project_root_path: Root of the project. - optim_token_limit: Token limit for optimization context. - testgen_token_limit: Token limit for testgen context. - - Returns: - CodeOptimizationContext with target code and dependencies. - - """ - from codeflash.languages import get_language_support - - # Get language support for this function - language = Language(function_to_optimize.language) - lang_support = get_language_support(language) - - # Extract code context using language support - code_context = lang_support.extract_code_context(function_to_optimize, project_root_path, project_root_path) - - # Build imports string if available - imports_code = "\n".join(code_context.imports) if code_context.imports else "" - - # Get relative path for target file - target_relative_path = safe_relative_to(function_to_optimize.file_path, project_root_path) - - # Group helpers by file path - helpers_by_file: dict[Path, list[HelperFunction]] = defaultdict(list) - helper_function_sources = [] - - for helper in code_context.helper_functions: - helpers_by_file[helper.file_path].append(helper) - - # Convert to FunctionSource for pipeline compatibility - helper_function_sources.append( - FunctionSource( - file_path=helper.file_path, - qualified_name=helper.qualified_name, - fully_qualified_name=helper.qualified_name, - only_function_name=helper.name, - source_code=helper.source_code, - ) - ) - - # Build read-writable code (target file + same-file helpers + global variables) - read_writable_code_strings = [] - - # Combine target code with same-file helpers - target_file_code = code_context.target_code - same_file_helpers = helpers_by_file.get(function_to_optimize.file_path, []) - if same_file_helpers: - helper_code = "\n\n".join(h.source_code for h in same_file_helpers) - target_file_code = target_file_code + "\n\n" + helper_code - - # Note: code_context.read_only_context contains type definitions and global variables - # These should be passed as read-only context to the AI, not prepended to the target code - # If prepended to target code, the AI treats them as code to optimize and includes them in output - - # Add imports to target file code - if imports_code: - target_file_code = imports_code + "\n\n" + target_file_code - - read_writable_code_strings.append( - CodeString(code=target_file_code, file_path=target_relative_path, language=function_to_optimize.language) - ) - - # Add helper files (cross-file helpers) - for file_path, file_helpers in helpers_by_file.items(): - if file_path == function_to_optimize.file_path: - continue # Already included in target file - - helper_relative_path = safe_relative_to(file_path, project_root_path) - - # Combine all helpers from this file - combined_helper_code = "\n\n".join(h.source_code for h in file_helpers) - - read_writable_code_strings.append( - CodeString( - code=combined_helper_code, file_path=helper_relative_path, language=function_to_optimize.language - ) - ) - - read_writable_code = CodeStringsMarkdown( - code_strings=read_writable_code_strings, language=function_to_optimize.language - ) - - # Build testgen context (same as read_writable for non-Python, plus imported type skeletons) - testgen_code_strings = read_writable_code_strings.copy() - if code_context.imported_type_skeletons: - testgen_code_strings.append( - CodeString( - code=code_context.imported_type_skeletons, file_path=None, language=function_to_optimize.language - ) - ) - testgen_context = CodeStringsMarkdown(code_strings=testgen_code_strings, language=function_to_optimize.language) - - # Check token limits - read_writable_tokens = encoded_tokens_len(read_writable_code.markdown) - if read_writable_tokens > optim_token_limit: - raise ValueError(READ_WRITABLE_LIMIT_ERROR) - - testgen_tokens = encoded_tokens_len(testgen_context.markdown) - if testgen_tokens > testgen_token_limit: - raise ValueError(TESTGEN_LIMIT_ERROR) - - # Generate code hash from all read-writable code - code_hash = hashlib.sha256(read_writable_code.flat.encode("utf-8")).hexdigest() - - return CodeOptimizationContext( - testgen_context=testgen_context, - read_writable_code=read_writable_code, - read_only_context_code=code_context.read_only_context, - hashing_code_context=read_writable_code.flat, - hashing_code_context_hash=code_hash, - helper_functions=helper_function_sources, - testgen_helper_fqns=[fs.fully_qualified_name for fs in helper_function_sources], - preexisting_objects=set(), - ) - - def process_file_context( file_path: Path, primary_qualified_names: set[str], @@ -355,7 +226,7 @@ def process_file_context( try: all_names = primary_qualified_names | secondary_qualified_names code_without_unused_defs = remove_unused_definitions_by_function_names(original_code, all_names) - code_context = parse_code_and_prune_cst( + pruned_module = parse_code_and_prune_cst( code_without_unused_defs, code_context_type, primary_qualified_names, @@ -366,17 +237,23 @@ def process_file_context( logger.debug(f"Error while getting read-only code: {e}") return None - if code_context.strip(): - if code_context_type != CodeContextType.HASHING: + if pruned_module.code.strip(): + if code_context_type == CodeContextType.HASHING: + code_context = ast.unparse(ast.parse(pruned_module.code)) + else: code_context = add_needed_imports_from_module( src_module_code=original_code, - dst_module_code=code_context, + dst_module_code=pruned_module, src_path=file_path, dst_path=file_path, project_root=project_root_path, helper_functions=helper_functions, ) - return CodeString(code=code_context, file_path=safe_relative_to(file_path, project_root_path)) + try: + relative_path = file_path.resolve().relative_to(project_root_path.resolve()) + except ValueError: + relative_path = file_path + return CodeString(code=code_context, file_path=relative_path) return None @@ -521,13 +398,17 @@ def get_function_sources_from_jedi( definition = definitions[0] definition_path = definition.module_path if definition_path is not None: - rel = safe_relative_to(definition_path, project_root_path) - if not rel.is_absolute(): + try: + rel = definition_path.resolve().relative_to(project_root_path.resolve()) definition_path = project_root_path / rel + except ValueError: + pass # The definition is part of this project and not defined within the original function is_valid_definition = ( - is_project_path(definition_path, project_root_path) + definition_path is not None + and not path_belongs_to_site_packages(definition_path) + and str(definition_path).startswith(str(project_root_path) + os.sep) and definition.full_name and not belongs_to_function_qualified(definition, qualified_function_name) and definition.full_name.startswith(definition.module_name) @@ -635,6 +516,340 @@ def collect_existing_class_names(tree: ast.Module) -> set[str]: return class_names +BUILTIN_AND_TYPING_NAMES = frozenset( + { + "int", + "str", + "float", + "bool", + "bytes", + "bytearray", + "complex", + "list", + "dict", + "set", + "frozenset", + "tuple", + "type", + "object", + "None", + "NoneType", + "Ellipsis", + "NotImplemented", + "memoryview", + "range", + "slice", + "property", + "classmethod", + "staticmethod", + "super", + "Optional", + "Union", + "Any", + "List", + "Dict", + "Set", + "FrozenSet", + "Tuple", + "Type", + "Callable", + "Iterator", + "Generator", + "Coroutine", + "AsyncGenerator", + "AsyncIterator", + "Iterable", + "AsyncIterable", + "Sequence", + "MutableSequence", + "Mapping", + "MutableMapping", + "Collection", + "Awaitable", + "Literal", + "Final", + "ClassVar", + "TypeVar", + "TypeAlias", + "ParamSpec", + "Concatenate", + "Annotated", + "TypeGuard", + "Self", + "Unpack", + "TypeVarTuple", + "Never", + "NoReturn", + "SupportsInt", + "SupportsFloat", + "SupportsComplex", + "SupportsBytes", + "SupportsAbs", + "SupportsRound", + "IO", + "TextIO", + "BinaryIO", + "Pattern", + "Match", + } +) + + +def collect_type_names_from_annotation(node: ast.expr | None) -> set[str]: + if node is None: + return set() + if isinstance(node, ast.Name): + return {node.id} + if isinstance(node, ast.Subscript): + names = collect_type_names_from_annotation(node.value) + names |= collect_type_names_from_annotation(node.slice) + return names + if isinstance(node, ast.BinOp) and isinstance(node.op, ast.BitOr): + return collect_type_names_from_annotation(node.left) | collect_type_names_from_annotation(node.right) + if isinstance(node, ast.Tuple): + names = set[str]() + for elt in node.elts: + names |= collect_type_names_from_annotation(elt) + return names + return set() + + +def extract_init_stub_from_class(class_name: str, module_source: str, module_tree: ast.Module) -> str | None: + class_node = None + # Use a deque-based BFS to find the first matching ClassDef (preserves ast.walk order) + q: deque[ast.AST] = deque([module_tree]) + while q: + candidate = q.popleft() + if isinstance(candidate, ast.ClassDef) and candidate.name == class_name: + class_node = candidate + break + q.extend(ast.iter_child_nodes(candidate)) + + if class_node is None: + return None + + lines = module_source.splitlines() + relevant_nodes: list[ast.FunctionDef | ast.AsyncFunctionDef] = [] + for item in class_node.body: + if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)): + is_relevant = False + if item.name in ("__init__", "__post_init__"): + is_relevant = True + else: + # Check decorators explicitly to avoid generator overhead + for d in item.decorator_list: + if (isinstance(d, ast.Name) and d.id == "property") or ( + isinstance(d, ast.Attribute) and d.attr == "property" + ): + is_relevant = True + break + if is_relevant: + relevant_nodes.append(item) + + if not relevant_nodes: + return None + + snippets: list[str] = [] + for fn_node in relevant_nodes: + start = fn_node.lineno + if fn_node.decorator_list: + # Compute minimum decorator lineno with an explicit loop (avoids generator/min overhead) + m = start + for d in fn_node.decorator_list: + m = min(m, d.lineno) + start = m + snippets.append("\n".join(lines[start - 1 : fn_node.end_lineno])) + + return f"class {class_name}:\n" + "\n".join(snippets) + + +def extract_parameter_type_constructors( + function_to_optimize: FunctionToOptimize, project_root_path: Path, existing_class_names: set[str] +) -> CodeStringsMarkdown: + import jedi + + try: + source = function_to_optimize.file_path.read_text(encoding="utf-8") + tree = ast.parse(source) + except Exception: + return CodeStringsMarkdown(code_strings=[]) + + func_node = None + for node in ast.walk(tree): + if ( + isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)) + and node.name == function_to_optimize.function_name + ): + if function_to_optimize.starting_line is not None and node.lineno != function_to_optimize.starting_line: + continue + func_node = node + break + if func_node is None: + return CodeStringsMarkdown(code_strings=[]) + + type_names: set[str] = set() + for arg in func_node.args.args + func_node.args.posonlyargs + func_node.args.kwonlyargs: + type_names |= collect_type_names_from_annotation(arg.annotation) + if func_node.args.vararg: + type_names |= collect_type_names_from_annotation(func_node.args.vararg.annotation) + if func_node.args.kwarg: + type_names |= collect_type_names_from_annotation(func_node.args.kwarg.annotation) + + # Scan function body for isinstance(x, SomeType) and type(x) is/== SomeType patterns + for body_node in ast.walk(func_node): + if ( + isinstance(body_node, ast.Call) + and isinstance(body_node.func, ast.Name) + and body_node.func.id == "isinstance" + ): + if len(body_node.args) >= 2: + second_arg = body_node.args[1] + if isinstance(second_arg, ast.Name): + type_names.add(second_arg.id) + elif isinstance(second_arg, ast.Tuple): + for elt in second_arg.elts: + if isinstance(elt, ast.Name): + type_names.add(elt.id) + elif isinstance(body_node, ast.Compare): + # type(x) is/== SomeType + if ( + isinstance(body_node.left, ast.Call) + and isinstance(body_node.left.func, ast.Name) + and body_node.left.func.id == "type" + ): + for comparator in body_node.comparators: + if isinstance(comparator, ast.Name): + type_names.add(comparator.id) + + # Collect base class names from enclosing class (if this is a method) + if function_to_optimize.class_name is not None: + for top_node in ast.walk(tree): + if isinstance(top_node, ast.ClassDef) and top_node.name == function_to_optimize.class_name: + for base in top_node.bases: + if isinstance(base, ast.Name): + type_names.add(base.id) + break + + type_names -= BUILTIN_AND_TYPING_NAMES + type_names -= existing_class_names + if not type_names: + return CodeStringsMarkdown(code_strings=[]) + + import_map: dict[str, str] = {} + for node in ast.walk(tree): + if isinstance(node, ast.ImportFrom) and node.module: + for alias in node.names: + name = alias.asname if alias.asname else alias.name + import_map[name] = node.module + + code_strings: list[CodeString] = [] + module_cache: dict[Path, tuple[str, ast.Module]] = {} + + for type_name in sorted(type_names): + module_name = import_map.get(type_name) + if not module_name: + continue + try: + script_code = f"from {module_name} import {type_name}" + script = jedi.Script(script_code, project=jedi.Project(path=project_root_path)) + definitions = script.goto(1, len(f"from {module_name} import ") + len(type_name), follow_imports=True) + if not definitions: + continue + + module_path = definitions[0].module_path + if not module_path: + continue + + if module_path in module_cache: + mod_source, mod_tree = module_cache[module_path] + else: + mod_source = module_path.read_text(encoding="utf-8") + mod_tree = ast.parse(mod_source) + module_cache[module_path] = (mod_source, mod_tree) + + stub = extract_init_stub_from_class(type_name, mod_source, mod_tree) + if stub: + code_strings.append(CodeString(code=stub, file_path=module_path)) + except Exception: + logger.debug(f"Error extracting constructor stub for {type_name} from {module_name}") + continue + + # Transitive extraction (one level): for each extracted stub, find __init__ param types and extract their stubs + # Build an extended import map that includes imports from source modules of already-extracted stubs + transitive_import_map = dict(import_map) + for _, cached_tree in module_cache.values(): + for cache_node in ast.walk(cached_tree): + if isinstance(cache_node, ast.ImportFrom) and cache_node.module: + for alias in cache_node.names: + name = alias.asname if alias.asname else alias.name + if name not in transitive_import_map: + transitive_import_map[name] = cache_node.module + + emitted_names = type_names | existing_class_names | BUILTIN_AND_TYPING_NAMES + transitive_type_names: set[str] = set() + for cs in code_strings: + try: + stub_tree = ast.parse(cs.code) + except SyntaxError: + continue + for stub_node in ast.walk(stub_tree): + if isinstance(stub_node, (ast.FunctionDef, ast.AsyncFunctionDef)) and stub_node.name in ( + "__init__", + "__post_init__", + ): + for arg in stub_node.args.args + stub_node.args.posonlyargs + stub_node.args.kwonlyargs: + transitive_type_names |= collect_type_names_from_annotation(arg.annotation) + transitive_type_names -= emitted_names + for type_name in sorted(transitive_type_names): + module_name = transitive_import_map.get(type_name) + if not module_name: + continue + try: + script_code = f"from {module_name} import {type_name}" + script = jedi.Script(script_code, project=jedi.Project(path=project_root_path)) + definitions = script.goto(1, len(f"from {module_name} import ") + len(type_name), follow_imports=True) + if not definitions: + continue + module_path = definitions[0].module_path + if not module_path: + continue + if module_path in module_cache: + mod_source, mod_tree = module_cache[module_path] + else: + mod_source = module_path.read_text(encoding="utf-8") + mod_tree = ast.parse(mod_source) + module_cache[module_path] = (mod_source, mod_tree) + stub = extract_init_stub_from_class(type_name, mod_source, mod_tree) + if stub: + code_strings.append(CodeString(code=stub, file_path=module_path)) + except Exception: + logger.debug(f"Error extracting transitive constructor stub for {type_name} from {module_name}") + continue + + return CodeStringsMarkdown(code_strings=code_strings) + + +def resolve_instance_class_name(name: str, module_tree: ast.Module) -> str | None: + for node in module_tree.body: + if isinstance(node, ast.Assign): + for target in node.targets: + if isinstance(target, ast.Name) and target.id == name: + value = node.value + if isinstance(value, ast.Call): + func = value.func + if isinstance(func, ast.Name): + return func.id + if isinstance(func, ast.Attribute) and isinstance(func.value, ast.Name): + return func.value.id + elif isinstance(node, ast.AnnAssign) and isinstance(node.target, ast.Name) and node.target.id == name: + ann = node.annotation + if isinstance(ann, ast.Name): + return ann.id + if isinstance(ann, ast.Subscript) and isinstance(ann.value, ast.Name): + return ann.value.id + return None + + def enrich_testgen_context(code_context: CodeStringsMarkdown, project_root_path: Path) -> CodeStringsMarkdown: import jedi @@ -648,28 +863,6 @@ def enrich_testgen_context(code_context: CodeStringsMarkdown, project_root_path: existing_classes = collect_existing_class_names(tree) - # Collect base class names from ClassDef nodes (single walk) - base_class_names: set[str] = set() - for node in ast.walk(tree): - if isinstance(node, ast.ClassDef): - for base in node.bases: - if isinstance(base, ast.Name): - base_class_names.add(base.id) - elif isinstance(base, ast.Attribute) and isinstance(base.value, ast.Name): - base_class_names.add(base.attr) - - # Classify external imports using importlib-based check - is_project_cache: dict[str, bool] = {} - external_base_classes: set[tuple[str, str]] = set() - external_direct_imports: set[tuple[str, str]] = set() - - for name, module_name in imported_names.items(): - if not _is_project_module_cached(module_name, project_root_path, is_project_cache): - if name in base_class_names: - external_base_classes.add((name, module_name)) - if name not in existing_classes: - external_direct_imports.add((name, module_name)) - code_strings: list[CodeString] = [] emitted_class_names: set[str] = set() @@ -723,15 +916,14 @@ def extract_class_and_bases( start_line = min(d.lineno for d in class_node.decorator_list) class_source = "\n".join(lines[start_line - 1 : class_node.end_lineno]) - class_imports = extract_imports_for_class(module_tree, class_node, module_source) - full_source = class_imports + "\n\n" + class_source if class_imports else class_source + full_source = class_source code_strings.append(CodeString(code=full_source, file_path=module_path)) extracted_classes.add((module_path, class_name)) emitted_class_names.add(class_name) for name, module_name in imported_names.items(): - if name in existing_classes: + if name in existing_classes or module_name == "__future__": continue try: test_code = f"import {module_name}" @@ -745,7 +937,11 @@ def extract_class_and_bases( if not module_path: continue - if not is_project_path(module_path, project_root_path): + resolved_module = module_path.resolve() + module_str = str(resolved_module) + is_project = module_str.startswith(str(project_root_path.resolve()) + os.sep) + is_third_party = "site-packages" in module_str + if not is_project and not is_third_party: continue mod_result = get_module_source_and_tree(module_path) @@ -753,49 +949,28 @@ def extract_class_and_bases( continue module_source, module_tree = mod_result - extract_class_and_bases(name, module_path, module_source, module_tree) + if is_project: + extract_class_and_bases(name, module_path, module_source, module_tree) + if (module_path, name) not in extracted_classes: + resolved_class = resolve_instance_class_name(name, module_tree) + if resolved_class and resolved_class not in existing_classes: + extract_class_and_bases(resolved_class, module_path, module_source, module_tree) + elif is_third_party: + target_name = name + if not any(isinstance(n, ast.ClassDef) and n.name == name for n in ast.walk(module_tree)): + resolved_class = resolve_instance_class_name(name, module_tree) + if resolved_class: + target_name = resolved_class + if target_name not in emitted_class_names: + stub = extract_init_stub_from_class(target_name, module_source, module_tree) + if stub: + code_strings.append(CodeString(code=stub, file_path=module_path)) + emitted_class_names.add(target_name) except Exception: logger.debug(f"Error extracting class definition for {name} from {module_name}") continue - # --- Step 2: External base class __init__ stubs --- - if external_base_classes: - for cls, name in resolve_classes_from_modules(external_base_classes): - if name in emitted_class_names: - continue - stub = extract_init_stub(cls, name, require_site_packages=False) - if stub is not None: - code_strings.append(stub) - emitted_class_names.add(name) - - # --- Step 3: External direct import __init__ stubs with BFS --- - if external_direct_imports: - processed_classes: set[type] = set() - worklist: list[tuple[type, str, int]] = [ - (cls, name, 0) for cls, name in resolve_classes_from_modules(external_direct_imports) - ] - - while worklist: - cls, class_name, depth = worklist.pop(0) - - if cls in processed_classes: - continue - processed_classes.add(cls) - - stub = extract_init_stub(cls, class_name) - if stub is None: - continue - - if class_name not in emitted_class_names: - code_strings.append(stub) - emitted_class_names.add(class_name) - - if depth < MAX_TRANSITIVE_DEPTH: - for dep_cls in resolve_transitive_type_deps(cls): - if dep_cls not in processed_classes: - worklist.append((dep_cls, dep_cls.__name__, depth + 1)) - return CodeStringsMarkdown(code_strings=code_strings) @@ -1063,8 +1238,8 @@ def parse_code_and_prune_cst( target_functions: set[str], helpers_of_helper_functions: set[str] = set(), # noqa: B006 remove_docstrings: bool = False, -) -> str: - """Create a read-only version of the code by parsing and filtering the code to keep only class contextual information, and other module scoped variables.""" +) -> cst.Module: + """Parse and filter the code CST, returning the pruned Module.""" module = cst.parse_module(code) defs_with_usages = collect_top_level_defs_with_usages(module, target_functions | helpers_of_helper_functions) @@ -1100,23 +1275,8 @@ def parse_code_and_prune_cst( if not found_target: raise ValueError("No target functions found in the provided code") if filtered_node and isinstance(filtered_node, cst.Module): - code = str(filtered_node.code) - if code_context_type == CodeContextType.HASHING: - code = ast.unparse(ast.parse(code)) # Makes it standard - return code - return "" - - -def _qualified_name(prefix: str, name: str) -> str: - return f"{prefix}.{name}" if prefix else name - - -def _validate_classdef(node: cst.ClassDef, prefix: str) -> tuple[str, cst.IndentedBlock] | None: - if prefix: - return None - if not isinstance(node.body, cst.IndentedBlock): - raise ValueError("ClassDef body is not an IndentedBlock") # noqa: TRY004 - return _qualified_name(prefix, node.name.value), node.body + return filtered_node + raise ValueError("Pruning produced no module") def prune_cst( @@ -1158,7 +1318,7 @@ def prune_cst( return None, False if isinstance(node, cst.FunctionDef): - qualified_name = _qualified_name(prefix, node.name.value) + qualified_name = f"{prefix}.{node.name.value}" if prefix else node.name.value # Check if it's a helper function (higher priority than target) if helpers and qualified_name in helpers: @@ -1183,7 +1343,12 @@ def prune_cst( return node, False # Handle dunder methods for READ_ONLY/TESTGEN modes - if include_dunder_methods and is_dunder_method(node.name.value): + if ( + include_dunder_methods + and len(node.name.value) > 4 + and node.name.value.startswith("__") + and node.name.value.endswith("__") + ): if not include_init_dunder and node.name.value == "__init__": return None, False if remove_docstrings and isinstance(node.body, cst.IndentedBlock): @@ -1193,17 +1358,18 @@ def prune_cst( return None, False if isinstance(node, cst.ClassDef): - result = _validate_classdef(node, prefix) - if result is None: + if prefix: return None, False - class_prefix, _ = result + if not isinstance(node.body, cst.IndentedBlock): + raise ValueError("ClassDef body is not an IndentedBlock") # noqa: TRY004 + class_prefix = node.name.value class_name = node.name.value # Handle dependency classes for READ_WRITABLE mode if defs_with_usages: # Check if this class contains any target functions has_target_functions = any( - isinstance(stmt, cst.FunctionDef) and _qualified_name(class_prefix, stmt.name.value) in target_functions + isinstance(stmt, cst.FunctionDef) and f"{class_prefix}.{stmt.name.value}" in target_functions for stmt in node.body.body ) diff --git a/codeflash/languages/python/context/unused_definition_remover.py b/codeflash/languages/python/context/unused_definition_remover.py index 3101c97ee..c02d52a5d 100644 --- a/codeflash/languages/python/context/unused_definition_remover.py +++ b/codeflash/languages/python/context/unused_definition_remover.py @@ -10,7 +10,8 @@ import libcst as cst from codeflash.cli_cmds.console import logger -from codeflash.languages import is_python +from codeflash.languages import current_language +from codeflash.languages.base import Language from codeflash.languages.python.static_analysis.code_replacer import replace_function_definitions_in_module from codeflash.models.models import CodeString, CodeStringsMarkdown @@ -747,7 +748,7 @@ def detect_unused_helper_functions( """ # Skip this analysis for non-Python languages since we use Python's ast module - if not is_python(): + if current_language() != Language.PYTHON: return [] if isinstance(optimized_code, CodeStringsMarkdown) and len(optimized_code.code_strings) > 0: diff --git a/codeflash/languages/python/function_optimizer.py b/codeflash/languages/python/function_optimizer.py new file mode 100644 index 000000000..30bf44ac8 --- /dev/null +++ b/codeflash/languages/python/function_optimizer.py @@ -0,0 +1,215 @@ +from __future__ import annotations + +import ast +from pathlib import Path +from typing import TYPE_CHECKING + +from codeflash.cli_cmds.console import console, logger +from codeflash.code_utils.config_consts import TOTAL_LOOPING_TIME_EFFECTIVE +from codeflash.either import Failure, Success +from codeflash.languages.python.context.unused_definition_remover import ( + detect_unused_helper_functions, + revert_unused_helper_functions, +) +from codeflash.languages.python.optimizer import resolve_python_function_ast +from codeflash.languages.python.static_analysis.code_extractor import get_opt_review_metrics, is_numerical_code +from codeflash.languages.python.static_analysis.code_replacer import ( + add_custom_marker_to_all_tests, + modify_autouse_fixture, +) +from codeflash.languages.python.static_analysis.line_profile_utils import add_decorator_imports, contains_jit_decorator +from codeflash.models.models import TestingMode, TestResults +from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.verification.parse_test_output import calculate_function_throughput_from_test_results + +if TYPE_CHECKING: + from typing import Any + + from codeflash.either import Result + from codeflash.languages.base import Language + from codeflash.models.function_types import FunctionParent + from codeflash.models.models import ( + CodeOptimizationContext, + CodeStringsMarkdown, + ConcurrencyMetrics, + CoverageData, + OriginalCodeBaseline, + TestDiff, + ) + + +class PythonFunctionOptimizer(FunctionOptimizer): + def get_code_optimization_context(self) -> Result[CodeOptimizationContext, str]: + from codeflash.languages.python.context import code_context_extractor + + try: + return Success( + code_context_extractor.get_code_optimization_context( + self.function_to_optimize, self.project_root, call_graph=self.call_graph + ) + ) + except ValueError as e: + return Failure(str(e)) + + def _resolve_function_ast( + self, source_code: str, function_name: str, parents: list[FunctionParent] + ) -> ast.FunctionDef | ast.AsyncFunctionDef | None: + original_module_ast = ast.parse(source_code) + return resolve_python_function_ast(function_name, parents, original_module_ast) + + def requires_function_ast(self) -> bool: + return True + + def analyze_code_characteristics(self, code_context: CodeOptimizationContext) -> None: + self.is_numerical_code = is_numerical_code(code_string=code_context.read_writable_code.flat) + + def get_optimization_review_metrics( + self, + source_code: str, + file_path: Path, + qualified_name: str, + project_root: Path, + tests_root: Path, + language: Language, + ) -> str: + return get_opt_review_metrics(source_code, file_path, qualified_name, project_root, tests_root, language) + + def instrument_test_fixtures(self, test_paths: list[Path]) -> dict[Path, list[str]] | None: + logger.info("Disabling all autouse fixtures associated with the generated test files") + original_conftest_content = modify_autouse_fixture(test_paths) + logger.info("Add custom marker to generated test files") + add_custom_marker_to_all_tests(test_paths) + return original_conftest_content + + def instrument_capture(self, file_path_to_helper_classes: dict[Path, set[str]]) -> None: + from codeflash.verification.instrument_codeflash_capture import instrument_codeflash_capture + + instrument_codeflash_capture(self.function_to_optimize, file_path_to_helper_classes, self.test_cfg.tests_root) + + def should_check_coverage(self) -> bool: + return True + + def collect_async_metrics( + self, + benchmarking_results: TestResults, + code_context: CodeOptimizationContext, + helper_code: dict[Path, str], + test_env: dict[str, str], + ) -> tuple[int | None, ConcurrencyMetrics | None]: + if not self.function_to_optimize.is_async: + return None, None + + async_throughput = calculate_function_throughput_from_test_results( + benchmarking_results, self.function_to_optimize.function_name + ) + logger.debug(f"Async function throughput: {async_throughput} calls/second") + + concurrency_metrics = self.run_concurrency_benchmark( + code_context=code_context, original_helper_code=helper_code, test_env=test_env + ) + if concurrency_metrics: + logger.debug( + f"Concurrency metrics: ratio={concurrency_metrics.concurrency_ratio:.2f}, " + f"seq={concurrency_metrics.sequential_time_ns}ns, conc={concurrency_metrics.concurrent_time_ns}ns" + ) + return async_throughput, concurrency_metrics + + def instrument_async_for_mode(self, mode: TestingMode) -> None: + from codeflash.code_utils.instrument_existing_tests import add_async_decorator_to_function + + add_async_decorator_to_function( + self.function_to_optimize.file_path, self.function_to_optimize, mode, project_root=self.project_root + ) + + def should_skip_sqlite_cleanup(self, testing_type: TestingMode, optimization_iteration: int) -> bool: + return False + + def parse_line_profile_test_results( + self, line_profiler_output_file: Path | None + ) -> tuple[TestResults | dict, CoverageData | None]: + if line_profiler_output_file is None: + return {"timings": {}, "unit": 0, "str_out": ""}, None + return self.language_support.parse_line_profile_results(line_profiler_output_file), None + + def compare_candidate_results( + self, + baseline_results: OriginalCodeBaseline, + candidate_behavior_results: TestResults, + optimization_candidate_index: int, + ) -> tuple[bool, list[TestDiff]]: + from codeflash.verification.equivalence import compare_test_results + + return compare_test_results(baseline_results.behavior_test_results, candidate_behavior_results) + + def replace_function_and_helpers_with_optimized_code( + self, + code_context: CodeOptimizationContext, + optimized_code: CodeStringsMarkdown, + original_helper_code: dict[Path, str], + ) -> bool: + from codeflash.languages.python.static_analysis.code_replacer import replace_function_definitions_in_module + + did_update = False + for module_abspath, qualified_names in self.group_functions_by_file(code_context).items(): + did_update |= replace_function_definitions_in_module( + function_names=list(qualified_names), + optimized_code=optimized_code, + module_abspath=module_abspath, + preexisting_objects=code_context.preexisting_objects, + project_root_path=self.project_root, + ) + + unused_helpers = detect_unused_helper_functions(self.function_to_optimize, code_context, optimized_code) + if unused_helpers: + revert_unused_helper_functions(self.project_root, unused_helpers, original_helper_code) + return did_update + + def line_profiler_step( + self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], candidate_index: int + ) -> dict[str, Any]: + candidate_fto_code = Path(self.function_to_optimize.file_path).read_text("utf-8") + if contains_jit_decorator(candidate_fto_code): + logger.info( + f"Skipping line profiler for {self.function_to_optimize.function_name} - code contains JIT decorator" + ) + return {"timings": {}, "unit": 0, "str_out": ""} + + for module_abspath in original_helper_code: + candidate_helper_code = Path(module_abspath).read_text("utf-8") + if contains_jit_decorator(candidate_helper_code): + logger.info( + f"Skipping line profiler for {self.function_to_optimize.function_name} - helper code contains JIT decorator" + ) + return {"timings": {}, "unit": 0, "str_out": ""} + + try: + console.rule() + + test_env = self.get_test_env( + codeflash_loop_index=0, codeflash_test_iteration=candidate_index, codeflash_tracer_disable=1 + ) + line_profiler_output_file = add_decorator_imports(self.function_to_optimize, code_context) + line_profile_results, _ = self.run_and_parse_tests( + testing_type=TestingMode.LINE_PROFILE, + test_env=test_env, + test_files=self.test_files, + optimization_iteration=0, + testing_time=TOTAL_LOOPING_TIME_EFFECTIVE, + enable_coverage=False, + code_context=code_context, + line_profiler_output_file=line_profiler_output_file, + ) + finally: + self.write_code_and_helpers( + self.function_to_optimize_source_code, original_helper_code, self.function_to_optimize.file_path + ) + if isinstance(line_profile_results, TestResults) and not line_profile_results.test_results: + logger.warning( + f"Timeout occurred while running line profiler for original function {self.function_to_optimize.function_name}" + ) + return {"timings": {}, "unit": 0, "str_out": ""} + if line_profile_results["str_out"] == "": + logger.warning( + f"Couldn't run line profiler for original function {self.function_to_optimize.function_name}" + ) + return line_profile_results diff --git a/codeflash/languages/python/normalizer.py b/codeflash/languages/python/normalizer.py new file mode 100644 index 000000000..e01580547 --- /dev/null +++ b/codeflash/languages/python/normalizer.py @@ -0,0 +1,180 @@ +"""Python code normalizer using AST transformation.""" + +from __future__ import annotations + +import ast + + +class VariableNormalizer(ast.NodeTransformer): + """Normalizes only local variable names in AST to canonical forms like var_0, var_1, etc. + + Preserves function names, class names, parameters, built-ins, and imported names. + """ + + def __init__(self) -> None: + self.var_counter = 0 + self.var_mapping: dict[str, str] = {} + self.scope_stack: list[dict] = [] + self.builtins = set(dir(__builtins__)) + self.imports: set[str] = set() + self.global_vars: set[str] = set() + self.nonlocal_vars: set[str] = set() + self.parameters: set[str] = set() + + def enter_scope(self) -> None: + """Enter a new scope (function/class).""" + self.scope_stack.append( + {"var_mapping": dict(self.var_mapping), "var_counter": self.var_counter, "parameters": set(self.parameters)} + ) + + def exit_scope(self) -> None: + """Exit current scope and restore parent scope.""" + if self.scope_stack: + scope = self.scope_stack.pop() + self.var_mapping = scope["var_mapping"] + self.var_counter = scope["var_counter"] + self.parameters = scope["parameters"] + + def get_normalized_name(self, name: str) -> str: + """Get or create normalized name for a variable.""" + if ( + name in self.builtins + or name in self.imports + or name in self.global_vars + or name in self.nonlocal_vars + or name in self.parameters + ): + return name + + if name not in self.var_mapping: + self.var_mapping[name] = f"var_{self.var_counter}" + self.var_counter += 1 + return self.var_mapping[name] + + def visit_Import(self, node: ast.Import) -> ast.Import: + """Track imported names.""" + for alias in node.names: + name = alias.asname if alias.asname else alias.name + self.imports.add(name.split(".")[0]) + return node + + def visit_ImportFrom(self, node: ast.ImportFrom) -> ast.ImportFrom: + """Track imported names from modules.""" + for alias in node.names: + name = alias.asname if alias.asname else alias.name + self.imports.add(name) + return node + + def visit_Global(self, node: ast.Global) -> ast.Global: + """Track global variable declarations.""" + self.global_vars.update(node.names) + return node + + def visit_Nonlocal(self, node: ast.Nonlocal) -> ast.Nonlocal: + """Track nonlocal variable declarations.""" + self.nonlocal_vars.update(node.names) + return node + + def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.FunctionDef: + """Process function but keep function name and parameters unchanged.""" + self.enter_scope() + + for arg in node.args.args: + self.parameters.add(arg.arg) + if node.args.vararg: + self.parameters.add(node.args.vararg.arg) + if node.args.kwarg: + self.parameters.add(node.args.kwarg.arg) + for arg in node.args.kwonlyargs: + self.parameters.add(arg.arg) + + node = self.generic_visit(node) + self.exit_scope() + return node + + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AsyncFunctionDef: + """Handle async functions same as regular functions.""" + return self.visit_FunctionDef(node) # type: ignore[return-value] + + def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: + """Process class but keep class name unchanged.""" + self.enter_scope() + node = self.generic_visit(node) + self.exit_scope() + return node + + def visit_Name(self, node: ast.Name) -> ast.Name: + """Normalize variable names in Name nodes.""" + if isinstance(node.ctx, (ast.Store, ast.Del)): + if ( + node.id not in self.builtins + and node.id not in self.imports + and node.id not in self.parameters + and node.id not in self.global_vars + and node.id not in self.nonlocal_vars + ): + node.id = self.get_normalized_name(node.id) + elif isinstance(node.ctx, ast.Load) and node.id in self.var_mapping: + node.id = self.var_mapping[node.id] + return node + + def visit_ExceptHandler(self, node: ast.ExceptHandler) -> ast.ExceptHandler: + """Normalize exception variable names.""" + if node.name: + node.name = self.get_normalized_name(node.name) + return self.generic_visit(node) + + def visit_comprehension(self, node: ast.comprehension) -> ast.comprehension: + """Normalize comprehension target variables.""" + old_mapping = dict(self.var_mapping) + old_counter = self.var_counter + + node = self.generic_visit(node) + + self.var_mapping = old_mapping + self.var_counter = old_counter + return node + + def visit_For(self, node: ast.For) -> ast.For: + """Handle for loop target variables.""" + return self.generic_visit(node) + + def visit_With(self, node: ast.With) -> ast.With: + """Handle with statement as variables.""" + return self.generic_visit(node) + + +def _remove_docstrings_from_ast(node: ast.AST) -> None: + """Remove docstrings from AST nodes.""" + node_types = (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module) + stack = [node] + while stack: + current_node = stack.pop() + if isinstance(current_node, node_types): + body = current_node.body + if ( + body + and isinstance(body[0], ast.Expr) + and isinstance(body[0].value, ast.Constant) + and isinstance(body[0].value.value, str) + ): + current_node.body = body[1:] + stack.extend([child for child in body if isinstance(child, node_types)]) + + +def normalize_python_code(code: str, remove_docstrings: bool = True) -> str: + """Normalize Python code to a canonical form for comparison. + + Replaces local variable names with canonical forms (var_0, var_1, etc.) + while preserving function names, class names, parameters, and imports. + """ + tree = ast.parse(code) + + if remove_docstrings: + _remove_docstrings_from_ast(tree) + + normalizer = VariableNormalizer() + normalized_tree = normalizer.visit(tree) + ast.fix_missing_locations(normalized_tree) + + return ast.unparse(normalized_tree) diff --git a/codeflash/languages/python/optimizer.py b/codeflash/languages/python/optimizer.py new file mode 100644 index 000000000..475c834fc --- /dev/null +++ b/codeflash/languages/python/optimizer.py @@ -0,0 +1,63 @@ +from __future__ import annotations + +import ast +from typing import TYPE_CHECKING + +from codeflash.cli_cmds.console import logger +from codeflash.models.models import ValidCode + +if TYPE_CHECKING: + from pathlib import Path + + from codeflash.models.function_types import FunctionParent + + +def prepare_python_module( + original_module_code: str, original_module_path: Path, project_root: Path +) -> tuple[dict[Path, ValidCode], ast.Module] | None: + """Parse a Python module, normalize its code, and validate imported callee modules. + + Returns a mapping of file paths to ValidCode (for the module and its imported callees) + plus the parsed AST, or None on syntax error. + """ + from codeflash.languages.python.static_analysis.code_replacer import normalize_code, normalize_node + from codeflash.languages.python.static_analysis.static_analysis import analyze_imported_modules + + try: + original_module_ast = ast.parse(original_module_code) + except SyntaxError as e: + logger.warning(f"Syntax error parsing code in {original_module_path}: {e}") + logger.info("Skipping optimization due to file error.") + return None + + normalized_original_module_code = ast.unparse(normalize_node(original_module_ast)) + validated_original_code: dict[Path, ValidCode] = { + original_module_path: ValidCode( + source_code=original_module_code, normalized_code=normalized_original_module_code + ) + } + + imported_module_analyses = analyze_imported_modules(original_module_code, original_module_path, project_root) + + for analysis in imported_module_analyses: + callee_original_code = analysis.file_path.read_text(encoding="utf8") + try: + normalized_callee_original_code = normalize_code(callee_original_code) + except SyntaxError as e: + logger.warning(f"Syntax error parsing code in callee module {analysis.file_path}: {e}") + logger.info("Skipping optimization due to helper file error.") + return None + validated_original_code[analysis.file_path] = ValidCode( + source_code=callee_original_code, normalized_code=normalized_callee_original_code + ) + + return validated_original_code, original_module_ast + + +def resolve_python_function_ast( + function_name: str, parents: list[FunctionParent], module_ast: ast.Module +) -> ast.FunctionDef | ast.AsyncFunctionDef | None: + """Look up a function/method AST node in a parsed Python module.""" + from codeflash.languages.python.static_analysis.static_analysis import get_first_top_level_function_or_method_ast + + return get_first_top_level_function_or_method_ast(function_name, parents, module_ast) diff --git a/codeflash/languages/python/parse_xml.py b/codeflash/languages/python/parse_xml.py new file mode 100644 index 000000000..a2417894b --- /dev/null +++ b/codeflash/languages/python/parse_xml.py @@ -0,0 +1,238 @@ +r"""Python-specific JUnit XML parsing with 6-field timing markers. + +Python uses extended 6-field markers: + Start: !$######module:class_prefix.test_func:func_tested:loop_index:iteration_id######$!\n + End: !######module:class_prefix.test_func:func_tested:loop_index:iteration_id:runtime######! +""" + +from __future__ import annotations + +import os +import re +from typing import TYPE_CHECKING + +from junitparser.xunit2 import JUnitXml + +from codeflash.cli_cmds.console import console, logger +from codeflash.code_utils.code_utils import file_path_from_module_name, module_name_from_file_path +from codeflash.models.models import FunctionTestInvocation, InvocationId, TestResults + +if TYPE_CHECKING: + import subprocess + from pathlib import Path + + from codeflash.models.models import TestFiles + from codeflash.verification.verification_utils import TestConfig + +matches_re_start = re.compile( + r"!\$######([^:]*)" # group 1: module path + r":((?:[^:.]*\.)*)" # group 2: class prefix with trailing dot, or empty + r"([^.:]*)" # group 3: test function name + r":([^:]*)" # group 4: function being tested + r":([^:]*)" # group 5: loop index + r":([^#]*)" # group 6: iteration id + r"######\$!\n" +) +matches_re_end = re.compile( + r"!######([^:]*)" # group 1: module path + r":((?:[^:.]*\.)*)" # group 2: class prefix with trailing dot, or empty + r"([^.:]*)" # group 3: test function name + r":([^:]*)" # group 4: function being tested + r":([^:]*)" # group 5: loop index + r":([^#]*)" # group 6: iteration_id or iteration_id:runtime + r"######!" +) + + +def _parse_func(file_path: Path): + from lxml.etree import XMLParser, parse + + xml_parser = XMLParser(huge_tree=True) + return parse(file_path, xml_parser) + + +def parse_python_test_xml( + test_xml_file_path: Path, + test_files: TestFiles, + test_config: TestConfig, + run_result: subprocess.CompletedProcess | None = None, +) -> TestResults: + from codeflash.verification.parse_test_output import resolve_test_file_from_class_path + + test_results = TestResults() + if not test_xml_file_path.exists(): + logger.warning(f"No test results for {test_xml_file_path} found.") + console.rule() + return test_results + try: + xml = JUnitXml.fromfile(str(test_xml_file_path), parse_func=_parse_func) + except Exception as e: + logger.warning(f"Failed to parse {test_xml_file_path} as JUnitXml. Exception: {e}") + return test_results + base_dir = test_config.tests_project_rootdir + + for suite in xml: + for testcase in suite: + class_name = testcase.classname + test_file_name = suite._elem.attrib.get("file") # noqa: SLF001 + if ( + test_file_name == f"unittest{os.sep}loader.py" + and class_name == "unittest.loader._FailedTest" + and suite.errors == 1 + and suite.tests == 1 + ): + logger.info("Test failed to load, skipping it.") + if run_result is not None: + if isinstance(run_result.stdout, str) and isinstance(run_result.stderr, str): + logger.info(f"Test log - STDOUT : {run_result.stdout} \n STDERR : {run_result.stderr}") + else: + logger.info( + f"Test log - STDOUT : {run_result.stdout.decode()} \n STDERR : {run_result.stderr.decode()}" + ) + return test_results + + test_class_path = testcase.classname + if test_class_path and test_class_path.split(".")[0] in ("pytest", "_pytest"): + logger.debug(f"Skipping pytest-internal test entry: {test_class_path}") + continue + try: + if testcase.name is None: + logger.debug( + f"testcase.name is None for testcase {testcase!r} in file {test_xml_file_path}, skipping" + ) + continue + test_function = testcase.name.split("[", 1)[0] if "[" in testcase.name else testcase.name + except (AttributeError, TypeError) as e: + msg = ( + f"Accessing testcase.name in parse_test_xml for testcase {testcase!r} in file" + f" {test_xml_file_path} has exception: {e}" + ) + logger.exception(msg) + continue + if test_file_name is None: + if test_class_path: + test_file_path = resolve_test_file_from_class_path(test_class_path, base_dir) + if test_file_path is None: + logger.warning(f"Could not find the test for file name - {test_class_path} ") + continue + else: + test_file_path = file_path_from_module_name(test_function, base_dir) + else: + test_file_path = base_dir / test_file_name + assert test_file_path, f"Test file path not found for {test_file_name}" + + if not test_file_path.exists(): + logger.warning(f"Could not find the test for file name - {test_file_path} ") + continue + test_type = test_files.get_test_type_by_instrumented_file_path(test_file_path) + if test_type is None: + test_type = test_files.get_test_type_by_original_file_path(test_file_path) + if test_type is None: + registered_paths = [str(tf.instrumented_behavior_file_path) for tf in test_files.test_files] + logger.warning( + f"Test type not found for '{test_file_path}'. " + f"Registered test files: {registered_paths}. Skipping test case." + ) + continue + test_module_path = module_name_from_file_path(test_file_path, test_config.tests_project_rootdir) + result = testcase.is_passed + test_class = None + if class_name is not None and class_name.startswith(test_module_path): + test_class = class_name[len(test_module_path) + 1 :] + + loop_index = int(testcase.name.split("[ ")[-1][:-2]) if testcase.name and "[" in testcase.name else 1 + + timed_out = False + if len(testcase.result) > 1: + logger.debug(f"!!!!!Multiple results for {testcase.name or ''} in {test_xml_file_path}!!!") + if len(testcase.result) == 1: + message = testcase.result[0].message.lower() + if "failed: timeout >" in message or "timed out" in message: + timed_out = True + + sys_stdout = testcase.system_out or "" + + begin_matches = list(matches_re_start.finditer(sys_stdout)) + end_matches: dict[tuple, re.Match] = {} + for match in matches_re_end.finditer(sys_stdout): + groups = match.groups() + if len(groups[5].split(":")) > 1: + iteration_id = groups[5].split(":")[0] + groups = (*groups[:5], iteration_id) + end_matches[groups] = match + + if not begin_matches: + test_results.add( + FunctionTestInvocation( + loop_index=loop_index, + id=InvocationId( + test_module_path=test_module_path, + test_class_name=test_class, + test_function_name=test_function, + function_getting_tested="", + iteration_id="", + ), + file_name=test_file_path, + runtime=None, + test_framework=test_config.test_framework, + did_pass=result, + test_type=test_type, + return_value=None, + timed_out=timed_out, + stdout="", + ) + ) + else: + for match_index, match in enumerate(begin_matches): + groups = match.groups() + runtime = None + + end_match = end_matches.get(groups) + iteration_id = groups[5] + if end_match: + stdout = sys_stdout[match.end() : end_match.start()] + split_val = end_match.groups()[5].split(":") + if len(split_val) > 1: + iteration_id = split_val[0] + runtime = int(split_val[1]) + else: + iteration_id, runtime = split_val[0], None + elif match_index == len(begin_matches) - 1: + stdout = sys_stdout[match.end() :] + else: + stdout = sys_stdout[match.end() : begin_matches[match_index + 1].start()] + + test_results.add( + FunctionTestInvocation( + loop_index=int(groups[4]), + id=InvocationId( + test_module_path=groups[0], + test_class_name=None if groups[1] == "" else groups[1][:-1], + test_function_name=groups[2], + function_getting_tested=groups[3], + iteration_id=iteration_id, + ), + file_name=test_file_path, + runtime=runtime, + test_framework=test_config.test_framework, + did_pass=result, + test_type=test_type, + return_value=None, + timed_out=timed_out, + stdout=stdout, + ) + ) + + if not test_results: + logger.info( + f"Tests '{[test_file.original_file_path for test_file in test_files.test_files]}' failed to run, skipping" + ) + if run_result is not None: + stdout, stderr = "", "" + try: + stdout = run_result.stdout.decode() + stderr = run_result.stderr.decode() + except AttributeError: + stdout = run_result.stderr + logger.debug(f"Test log - STDOUT : {stdout} \n STDERR : {stderr}") + return test_results diff --git a/codeflash/languages/python/static_analysis/code_extractor.py b/codeflash/languages/python/static_analysis/code_extractor.py index 704f9e3db..49b9e2b1b 100644 --- a/codeflash/languages/python/static_analysis/code_extractor.py +++ b/codeflash/languages/python/static_analysis/code_extractor.py @@ -684,7 +684,7 @@ def resolve_star_import(module_name: str, project_root: Path) -> set[str]: def add_needed_imports_from_module( src_module_code: str, - dst_module_code: str, + dst_module_code: str | cst.Module, src_path: Path, dst_path: Path, project_root: Path, @@ -696,6 +696,8 @@ def add_needed_imports_from_module( if not helper_functions_fqn: helper_functions_fqn = {f.fully_qualified_name for f in (helper_functions or [])} + dst_code_fallback = dst_module_code if isinstance(dst_module_code, str) else dst_module_code.code + src_module_and_package: ModuleNameAndPackage = calculate_module_and_package(project_root, src_path) dst_module_and_package: ModuleNameAndPackage = calculate_module_and_package(project_root, dst_path) @@ -715,15 +717,19 @@ def add_needed_imports_from_module( cst.parse_module(src_module_code).visit(gatherer) except Exception as e: logger.error(f"Error parsing source module code: {e}") - return dst_module_code + return dst_code_fallback dotted_import_collector = DottedImportCollector() - try: - parsed_dst_module = cst.parse_module(dst_module_code) + if isinstance(dst_module_code, cst.Module): + parsed_dst_module = dst_module_code parsed_dst_module.visit(dotted_import_collector) - except cst.ParserSyntaxError as e: - logger.exception(f"Syntax error in destination module code: {e}") - return dst_module_code # Return the original code if there's a syntax error + else: + try: + parsed_dst_module = cst.parse_module(dst_module_code) + parsed_dst_module.visit(dotted_import_collector) + except cst.ParserSyntaxError as e: + logger.exception(f"Syntax error in destination module code: {e}") + return dst_code_fallback try: for mod in gatherer.module_imports: @@ -768,7 +774,7 @@ def add_needed_imports_from_module( RemoveImportsVisitor.remove_unused_import(dst_context, mod, obj) except Exception as e: logger.exception(f"Error adding imports to destination module code: {e}") - return dst_module_code + return dst_code_fallback for mod, asname in gatherer.module_aliases.items(): if not asname: @@ -796,7 +802,7 @@ def add_needed_imports_from_module( return transformed_module.code.lstrip("\n") except Exception as e: logger.exception(f"Error adding imports to destination module code: {e}") - return dst_module_code + return dst_code_fallback def get_code(functions_to_optimize: list[FunctionToOptimize]) -> tuple[str | None, set[tuple[str, str]]]: diff --git a/codeflash/languages/python/static_analysis/code_replacer.py b/codeflash/languages/python/static_analysis/code_replacer.py index fb71fe0c7..6d9a3b3f1 100644 --- a/codeflash/languages/python/static_analysis/code_replacer.py +++ b/codeflash/languages/python/static_analysis/code_replacer.py @@ -4,8 +4,7 @@ from collections import defaultdict from functools import lru_cache from itertools import chain -from pathlib import Path -from typing import TYPE_CHECKING, Optional, TypeVar +from typing import TYPE_CHECKING, TypeVar import libcst as cst from libcst.metadata import PositionProvider @@ -13,7 +12,7 @@ from codeflash.cli_cmds.console import logger from codeflash.code_utils.config_parser import find_conftest_files from codeflash.code_utils.formatter import sort_imports -from codeflash.languages import is_python +from codeflash.languages.code_replacer import get_optimized_code_for_module from codeflash.languages.python.static_analysis.code_extractor import ( add_global_assignments, add_needed_imports_from_module, @@ -25,9 +24,7 @@ if TYPE_CHECKING: from pathlib import Path - from codeflash.discovery.functions_to_optimize import FunctionToOptimize - from codeflash.languages.base import LanguageSupport - from codeflash.models.models import CodeOptimizationContext, CodeStringsMarkdown, OptimizedCandidate, ValidCode + from codeflash.models.models import CodeStringsMarkdown ASTNodeT = TypeVar("ASTNodeT", bound=ast.AST) @@ -240,149 +237,6 @@ def add_custom_marker_to_all_tests(test_paths: list[Path]) -> None: test_path.write_text(modified_module.code, encoding="utf-8") -class OptimFunctionCollector(cst.CSTVisitor): - METADATA_DEPENDENCIES = (cst.metadata.ParentNodeProvider,) - - def __init__( - self, - preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]] | None = None, - function_names: set[tuple[str | None, str]] | None = None, - ) -> None: - super().__init__() - self.preexisting_objects = preexisting_objects if preexisting_objects is not None else set() - - self.function_names = function_names # set of (class_name, function_name) - self.modified_functions: dict[ - tuple[str | None, str], cst.FunctionDef - ] = {} # keys are (class_name, function_name) - self.new_functions: list[cst.FunctionDef] = [] - self.new_class_functions: dict[str, list[cst.FunctionDef]] = defaultdict(list) - self.new_classes: list[cst.ClassDef] = [] - self.current_class = None - self.modified_init_functions: dict[str, cst.FunctionDef] = {} - - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: - if (self.current_class, node.name.value) in self.function_names: - self.modified_functions[(self.current_class, node.name.value)] = node - elif self.current_class and node.name.value == "__init__": - self.modified_init_functions[self.current_class] = node - elif ( - self.preexisting_objects - and (node.name.value, ()) not in self.preexisting_objects - and self.current_class is None - ): - self.new_functions.append(node) - return False - - def visit_ClassDef(self, node: cst.ClassDef) -> bool: - if self.current_class: - return False # If already in a class, do not recurse deeper - self.current_class = node.name.value - - parents = (FunctionParent(name=node.name.value, type="ClassDef"),) - - if (node.name.value, ()) not in self.preexisting_objects: - self.new_classes.append(node) - - for child_node in node.body.body: - if ( - self.preexisting_objects - and isinstance(child_node, cst.FunctionDef) - and (child_node.name.value, parents) not in self.preexisting_objects - ): - self.new_class_functions[node.name.value].append(child_node) - - return True - - def leave_ClassDef(self, node: cst.ClassDef) -> None: - if self.current_class: - self.current_class = None - - -class OptimFunctionReplacer(cst.CSTTransformer): - def __init__( - self, - modified_functions: Optional[dict[tuple[str | None, str], cst.FunctionDef]] = None, - new_classes: Optional[list[cst.ClassDef]] = None, - new_functions: Optional[list[cst.FunctionDef]] = None, - new_class_functions: Optional[dict[str, list[cst.FunctionDef]]] = None, - modified_init_functions: Optional[dict[str, cst.FunctionDef]] = None, - ) -> None: - super().__init__() - self.modified_functions = modified_functions if modified_functions is not None else {} - self.new_functions = new_functions if new_functions is not None else [] - self.new_classes = new_classes if new_classes is not None else [] - self.new_class_functions = new_class_functions if new_class_functions is not None else defaultdict(list) - self.modified_init_functions: dict[str, cst.FunctionDef] = ( - modified_init_functions if modified_init_functions is not None else {} - ) - self.current_class = None - - def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: - return False - - def leave_FunctionDef(self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef) -> cst.FunctionDef: - if (self.current_class, original_node.name.value) in self.modified_functions: - node = self.modified_functions[(self.current_class, original_node.name.value)] - return updated_node.with_changes(body=node.body, decorators=node.decorators) - if original_node.name.value == "__init__" and self.current_class in self.modified_init_functions: - return self.modified_init_functions[self.current_class] - - return updated_node - - def visit_ClassDef(self, node: cst.ClassDef) -> bool: - if self.current_class: - return False # If already in a class, do not recurse deeper - self.current_class = node.name.value - return True - - def leave_ClassDef(self, original_node: cst.ClassDef, updated_node: cst.ClassDef) -> cst.ClassDef: - if self.current_class and self.current_class == original_node.name.value: - self.current_class = None - if original_node.name.value in self.new_class_functions: - return updated_node.with_changes( - body=updated_node.body.with_changes( - body=(list(updated_node.body.body) + list(self.new_class_functions[original_node.name.value])) - ) - ) - return updated_node - - def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: - node = updated_node - max_function_index = None - max_class_index = None - for index, _node in enumerate(node.body): - if isinstance(_node, cst.FunctionDef): - max_function_index = index - if isinstance(_node, cst.ClassDef): - max_class_index = index - - if self.new_classes: - existing_class_names = {_node.name.value for _node in node.body if isinstance(_node, cst.ClassDef)} - - unique_classes = [ - new_class for new_class in self.new_classes if new_class.name.value not in existing_class_names - ] - if unique_classes: - new_classes_insertion_idx = max_class_index or find_insertion_index_after_imports(node) - new_body = list( - chain(node.body[:new_classes_insertion_idx], unique_classes, node.body[new_classes_insertion_idx:]) - ) - node = node.with_changes(body=new_body) - - if max_function_index is not None: - node = node.with_changes( - body=(*node.body[: max_function_index + 1], *self.new_functions, *node.body[max_function_index + 1 :]) - ) - elif max_class_index is not None: - node = node.with_changes( - body=(*node.body[: max_class_index + 1], *self.new_functions, *node.body[max_class_index + 1 :]) - ) - else: - node = node.with_changes(body=(*self.new_functions, *node.body)) - return node - - def replace_functions_in_file( source_code: str, original_function_names: list[str], @@ -535,14 +389,7 @@ def replace_function_definitions_in_module( preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]], project_root_path: Path, should_add_global_assignments: bool = True, - function_to_optimize: Optional[FunctionToOptimize] = None, ) -> bool: - # Route to language-specific implementation for non-Python languages - if not is_python(): - return replace_function_definitions_for_language( - function_names, optimized_code, module_abspath, project_root_path, function_to_optimize - ) - source_code: str = module_abspath.read_text(encoding="utf8") code_to_apply = get_optimized_code_for_module(module_abspath.relative_to(project_root_path), optimized_code) @@ -564,311 +411,5 @@ def replace_function_definitions_in_module( return True -def replace_function_definitions_for_language( - function_names: list[str], - optimized_code: CodeStringsMarkdown, - module_abspath: Path, - project_root_path: Path, - function_to_optimize: Optional[FunctionToOptimize] = None, -) -> bool: - """Replace function definitions for non-Python languages. - - Uses the language support abstraction to perform code replacement. - - Args: - function_names: List of qualified function names to replace. - optimized_code: The optimized code to apply. - module_abspath: Path to the module file. - project_root_path: Root of the project. - function_to_optimize: The function being optimized (needed for line info). - - Returns: - True if the code was modified, False if no changes. - - """ - from codeflash.languages import get_language_support - from codeflash.languages.base import Language - - original_source_code: str = module_abspath.read_text(encoding="utf8") - code_to_apply = get_optimized_code_for_module(module_abspath.relative_to(project_root_path), optimized_code) - - if not code_to_apply.strip(): - return False - - # Get language support - language = Language(optimized_code.language) - lang_support = get_language_support(language) - - # Add any new global declarations from the optimized code to the original source - original_source_code = lang_support.add_global_declarations( - optimized_code=code_to_apply, original_source=original_source_code, module_abspath=module_abspath - ) - - # If we have function_to_optimize with line info and this is the main file, use it for precise replacement - if ( - function_to_optimize - and function_to_optimize.starting_line - and function_to_optimize.ending_line - and function_to_optimize.file_path == module_abspath - ): - # For Java, we need to pass the full optimized code so replace_function can - # extract and add any new class members (static fields, helper methods). - # For other languages, we extract just the target function. - if language == Language.JAVA: - new_code = lang_support.replace_function(original_source_code, function_to_optimize, code_to_apply) - else: - # Extract just the target function from the optimized code - optimized_func = _extract_function_from_code( - lang_support, code_to_apply, function_to_optimize.function_name, module_abspath - ) - if optimized_func: - new_code = lang_support.replace_function(original_source_code, function_to_optimize, optimized_func) - else: - # Fallback: use the entire optimized code (for simple single-function files) - new_code = lang_support.replace_function(original_source_code, function_to_optimize, code_to_apply) - else: - # For helper files or when we don't have precise line info: - # Find each function by name in both original and optimized code - # Then replace with the corresponding optimized version - new_code = original_source_code - modified = False - - # Get the list of function names to replace - functions_to_replace = list(function_names) - - for func_name in functions_to_replace: - # Re-discover functions from current code state to get correct line numbers - current_functions = lang_support.discover_functions_from_source(new_code, module_abspath) - - # Find the function in current code - func = None - for f in current_functions: - if func_name in (f.qualified_name, f.function_name): - func = f - break - - if func is None: - continue - - # For Java, pass the full optimized code to handle class member insertion. - # For other languages, extract just the target function. - if language == Language.JAVA: - new_code = lang_support.replace_function(new_code, func, code_to_apply) - modified = True - else: - # Extract just this function from the optimized code - optimized_func = _extract_function_from_code( - lang_support, code_to_apply, func.function_name, module_abspath - ) - if optimized_func: - new_code = lang_support.replace_function(new_code, func, optimized_func) - modified = True - - if not modified: - logger.warning(f"Could not find function {function_names} in {module_abspath}") - return False - - # Check if there was actually a change - if original_source_code.strip() == new_code.strip(): - return False - - module_abspath.write_text(new_code, encoding="utf8") - return True - - -def _extract_function_from_code( - lang_support: LanguageSupport, source_code: str, function_name: str, file_path: Path | None = None -) -> str | None: - """Extract a specific function's source code from a code string. - - Includes JSDoc/docstring comments if present. - - Args: - lang_support: Language support instance. - source_code: The full source code containing the function. - function_name: Name of the function to extract. - file_path: Path to the file (used to determine correct analyzer for JS/TS). - - Returns: - The function's source code (including doc comments), or None if not found. - - """ - try: - # Use the language support to find functions in the source - # file_path is needed for JS/TS to determine correct analyzer (TypeScript vs JavaScript) - functions = lang_support.discover_functions_from_source(source_code, file_path) - for func in functions: - if func.function_name == function_name: - # Extract the function's source using line numbers - # Use doc_start_line if available to include JSDoc/docstring - lines = source_code.splitlines(keepends=True) - effective_start = func.doc_start_line or func.starting_line - if effective_start and func.ending_line and effective_start <= len(lines): - func_lines = lines[effective_start - 1 : func.ending_line] - return "".join(func_lines) - except Exception as e: - logger.debug(f"Error extracting function {function_name}: {e}") - - return None - - -def get_optimized_code_for_module(relative_path: Path, optimized_code: CodeStringsMarkdown) -> str: - file_to_code_context = optimized_code.file_to_path() - relative_path_str = str(relative_path) - module_optimized_code = file_to_code_context.get(relative_path_str) - if module_optimized_code is None: - # Fallback: if there's only one code block with None file path, - # use it regardless of the expected path (the AI server doesn't always include file paths) - if "None" in file_to_code_context and len(file_to_code_context) == 1: - module_optimized_code = file_to_code_context["None"] - logger.debug(f"Using code block with None file_path for {relative_path}") - else: - # Fallback: try to match by just the filename (for Java/JS where the AI - # might return just the class name like "Algorithms.java" instead of - # the full path like "src/main/java/com/example/Algorithms.java") - target_filename = relative_path.name - for file_path_str, code in file_to_code_context.items(): - if file_path_str: - # Extract filename without creating Path object repeatedly - if file_path_str.endswith(target_filename) and ( - len(file_path_str) == len(target_filename) - or file_path_str[-len(target_filename) - 1] in ("/", "\\") - ): - module_optimized_code = code - logger.debug(f"Matched {file_path_str} to {relative_path} by filename") - break - - if module_optimized_code is None: - # Also try matching if there's only one code file, but ONLY for non-Python - # languages where path matching is less strict. For Python, we require - # exact path matching to avoid applying code meant for one file to another. - # This prevents bugs like PR #1309 where a function was duplicated because - # optimized code for formatter.py was incorrectly applied to support.py. - if len(file_to_code_context) == 1 and not is_python(): - only_key = next(iter(file_to_code_context.keys())) - module_optimized_code = file_to_code_context[only_key] - logger.debug(f"Using only code block {only_key} for {relative_path}") - else: - # Delay expensive string formatting until actually logging - if logger.isEnabledFor(logger.level): - logger.warning( - f"Optimized code not found for {relative_path} In the context\n-------\n{optimized_code}\n-------\n" - "re-check your 'markdown code structure'" - f"existing files are {file_to_code_context.keys()}" - ) - module_optimized_code = "" - return module_optimized_code - - def is_zero_diff(original_code: str, new_code: str) -> bool: return normalize_code(original_code) == normalize_code(new_code) - - -def replace_optimized_code( - callee_module_paths: set[Path], - candidates: list[OptimizedCandidate], - code_context: CodeOptimizationContext, - function_to_optimize: FunctionToOptimize, - validated_original_code: dict[Path, ValidCode], - project_root: Path, -) -> tuple[set[Path], dict[str, dict[Path, str]]]: - initial_optimized_code = { - candidate.optimization_id: replace_functions_and_add_imports( - validated_original_code[function_to_optimize.file_path].source_code, - [function_to_optimize.qualified_name], - candidate.source_code, - function_to_optimize.file_path, - function_to_optimize.file_path, - code_context.preexisting_objects, - project_root, - ) - for candidate in candidates - } - callee_original_code = { - module_path: validated_original_code[module_path].source_code for module_path in callee_module_paths - } - intermediate_original_code: dict[str, dict[Path, str]] = { - candidate.optimization_id: ( - callee_original_code | {function_to_optimize.file_path: initial_optimized_code[candidate.optimization_id]} - ) - for candidate in candidates - } - module_paths = callee_module_paths | {function_to_optimize.file_path} - optimized_code = { - candidate.optimization_id: { - module_path: replace_functions_and_add_imports( - intermediate_original_code[candidate.optimization_id][module_path], - ( - [ - callee.qualified_name - for callee in code_context.helper_functions - if callee.file_path == module_path and callee.definition_type != "class" - ] - ), - candidate.source_code, - function_to_optimize.file_path, - module_path, - [], - project_root, - ) - for module_path in module_paths - } - for candidate in candidates - } - return module_paths, optimized_code - - -def is_optimized_module_code_zero_diff( - candidates: list[OptimizedCandidate], - validated_original_code: dict[Path, ValidCode], - optimized_code: dict[str, dict[Path, str]], - module_paths: set[Path], -) -> dict[str, dict[Path, bool]]: - return { - candidate.optimization_id: { - callee_module_path: normalize_code(optimized_code[candidate.optimization_id][callee_module_path]) - == validated_original_code[callee_module_path].normalized_code - for callee_module_path in module_paths - } - for candidate in candidates - } - - -def candidates_with_diffs( - candidates: list[OptimizedCandidate], - validated_original_code: ValidCode, - optimized_code: dict[str, dict[Path, str]], - module_paths: set[Path], -) -> list[OptimizedCandidate]: - return [ - candidate - for candidate in candidates - if not all( - is_optimized_module_code_zero_diff(candidates, validated_original_code, optimized_code, module_paths)[ - candidate.optimization_id - ].values() - ) - ] - - -def replace_optimized_code_in_worktrees( - optimized_code: dict[str, dict[Path, str]], - candidates: list[OptimizedCandidate], # Should be candidates_with_diffs - worktrees: list[Path], - git_root: Path, # Handle None case -) -> None: - for candidate, worktree in zip(candidates, worktrees[1:]): - for module_path in optimized_code[candidate.optimization_id]: - (worktree / module_path.relative_to(git_root)).write_text( - optimized_code[candidate.optimization_id][module_path], encoding="utf8" - ) # Check with is_optimized_module_code_zero_diff - - -def function_to_optimize_original_worktree_fqn( - function_to_optimize: FunctionToOptimize, worktrees: list[Path], git_root: Path -) -> str: - return ( - str(worktrees[0].name / function_to_optimize.file_path.relative_to(git_root).with_suffix("")).replace("/", ".") - + "." - + function_to_optimize.qualified_name - ) diff --git a/codeflash/languages/python/support.py b/codeflash/languages/python/support.py index b0e6926c1..223fdaf0a 100644 --- a/codeflash/languages/python/support.py +++ b/codeflash/languages/python/support.py @@ -6,6 +6,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Any +import libcst as cst + from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.base import ( CodeContext, @@ -17,12 +19,18 @@ TestResult, ) from codeflash.languages.registry import register_language +from codeflash.models.function_types import FunctionParent if TYPE_CHECKING: + import ast from collections.abc import Sequence + from libcst import CSTNode + from libcst.metadata import CodeRange + from codeflash.languages.base import DependencyResolver - from codeflash.models.models import FunctionSource, GeneratedTestsList, InvocationId + from codeflash.models.models import FunctionSource, GeneratedTestsList, InvocationId, ValidCode + from codeflash.verification.verification_utils import TestConfig logger = logging.getLogger(__name__) @@ -41,6 +49,70 @@ def function_sources_to_helpers(sources: list[FunctionSource]) -> list[HelperFun ] +class ReturnStatementVisitor(cst.CSTVisitor): + def __init__(self) -> None: + super().__init__() + self.has_return_statement: bool = False + + def visit_Return(self, node: cst.Return) -> None: + self.has_return_statement = True + + +class FunctionVisitor(cst.CSTVisitor): + METADATA_DEPENDENCIES = (cst.metadata.PositionProvider, cst.metadata.ParentNodeProvider) + + def __init__(self, file_path: Path) -> None: + super().__init__() + self.file_path: Path = file_path + self.functions: list[FunctionToOptimize] = [] + + @staticmethod + def is_pytest_fixture(node: cst.FunctionDef) -> bool: + for decorator in node.decorators: + dec = decorator.decorator + if isinstance(dec, cst.Call): + dec = dec.func + if isinstance(dec, cst.Attribute) and dec.attr.value == "fixture": + if isinstance(dec.value, cst.Name) and dec.value.value == "pytest": + return True + if isinstance(dec, cst.Name) and dec.value == "fixture": + return True + return False + + @staticmethod + def is_property(node: cst.FunctionDef) -> bool: + for decorator in node.decorators: + dec = decorator.decorator + if isinstance(dec, cst.Name) and dec.value in ("property", "cached_property"): + return True + return False + + def visit_FunctionDef(self, node: cst.FunctionDef) -> None: + return_visitor: ReturnStatementVisitor = ReturnStatementVisitor() + node.visit(return_visitor) + if return_visitor.has_return_statement and not self.is_pytest_fixture(node) and not self.is_property(node): + pos: CodeRange = self.get_metadata(cst.metadata.PositionProvider, node) + parents: CSTNode | None = self.get_metadata(cst.metadata.ParentNodeProvider, node) + ast_parents: list[FunctionParent] = [] + while parents is not None: + if isinstance(parents, cst.FunctionDef): + # Skip nested functions — only discover top-level and class-level functions + return + if isinstance(parents, cst.ClassDef): + ast_parents.append(FunctionParent(parents.name.value, parents.__class__.__name__)) + parents = self.get_metadata(cst.metadata.ParentNodeProvider, parents, default=None) + self.functions.append( + FunctionToOptimize( + function_name=node.name.value, + file_path=self.file_path, + parents=list(reversed(ast_parents)), + starting_line=pos.start.line, + ending_line=pos.end.line, + is_async=bool(node.asynchronous), + ) + ) + + @register_language class PythonSupport: """Python language support implementation. @@ -107,79 +179,109 @@ def dir_excludes(self) -> frozenset[str]: } ) - # === Discovery === - - def discover_functions( - self, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None - ) -> list[FunctionToOptimize]: - """Find all optimizable functions in a Python file. - - Uses libcst to parse the file and find functions with return statements. - - Args: - file_path: Path to the Python file to analyze. - filter_criteria: Optional criteria to filter functions. + @property + def default_language_version(self) -> str | None: + return None - Returns: - List of FunctionToOptimize objects for discovered functions. + @property + def valid_test_frameworks(self) -> tuple[str, ...]: + return ("pytest", "unittest") - """ - import libcst as cst + @property + def test_result_serialization_format(self) -> str: + return "pickle" - from codeflash.discovery.functions_to_optimize import FunctionVisitor + def parse_test_xml( + self, test_xml_file_path: Path, test_files: Any, test_config: Any, run_result: Any = None + ) -> Any: + from codeflash.languages.python.parse_xml import parse_python_test_xml - criteria = filter_criteria or FunctionFilterCriteria() + return parse_python_test_xml(test_xml_file_path, test_files, test_config, run_result) - try: - # Read and parse the file using libcst with metadata - source = file_path.read_text(encoding="utf-8") - try: - tree = cst.parse_module(source) - except Exception: - return [] - - # Use the libcst-based FunctionVisitor for accurate line numbers - wrapper = cst.metadata.MetadataWrapper(tree) - function_visitor = FunctionVisitor(file_path=str(file_path)) - wrapper.visit(function_visitor) + def load_coverage( + self, + coverage_database_file: Path, + function_name: str, + code_context: Any, + source_file: Path, + coverage_config_file: Path | None = None, + ) -> Any: + from codeflash.verification.coverage_utils import CoverageUtils + + return CoverageUtils.load_from_sqlite_database( + database_path=coverage_database_file, + config_path=coverage_config_file, + source_code_path=source_file, + code_context=code_context, + function_name=function_name, + ) - functions: list[FunctionToOptimize] = [] - for func in function_visitor.functions: - if not isinstance(func, FunctionToOptimize): - continue + def process_generated_test_strings( + self, + generated_test_source: str, + instrumented_behavior_test_source: str, + instrumented_perf_test_source: str, + function_to_optimize: Any, + test_path: Path, + test_cfg: Any, + project_module_system: str | None, + ) -> tuple[str, str, str]: + from codeflash.code_utils.code_utils import get_run_tmp_file + + temp_run_dir = get_run_tmp_file(Path()).as_posix() + instrumented_behavior_test_source = instrumented_behavior_test_source.replace( + "{codeflash_run_tmp_dir_client_side}", temp_run_dir + ) + instrumented_perf_test_source = instrumented_perf_test_source.replace( + "{codeflash_run_tmp_dir_client_side}", temp_run_dir + ) + return generated_test_source, instrumented_behavior_test_source, instrumented_perf_test_source - # Apply filter criteria - if not criteria.include_async and func.is_async: - continue + def adjust_test_config_for_discovery(self, test_cfg: Any) -> None: + pass - if not criteria.include_methods and func.parents: - continue + def detect_module_system(self, project_root: Path, source_file: Path) -> str | None: + return None - # Check for return statement requirement (FunctionVisitor already filters this) - # but we double-check here for consistency - if criteria.require_return and func.starting_line is None: - continue + # === Discovery === - # Add is_method field based on parents - func_with_is_method = FunctionToOptimize( - function_name=func.function_name, - file_path=file_path, - parents=func.parents, - starting_line=func.starting_line, - ending_line=func.ending_line, - starting_col=func.starting_col, - ending_col=func.ending_col, - is_async=func.is_async, - is_method=len(func.parents) > 0 and any(p.type == "ClassDef" for p in func.parents), - language="python", - ) - functions.append(func_with_is_method) + def discover_functions( + self, source: str, file_path: Path, filter_criteria: FunctionFilterCriteria | None = None + ) -> list[FunctionToOptimize]: + criteria = filter_criteria or FunctionFilterCriteria() - return functions + tree = cst.parse_module(source) + + wrapper = cst.metadata.MetadataWrapper(tree) + function_visitor = FunctionVisitor(file_path=file_path) + wrapper.visit(function_visitor) + + functions: list[FunctionToOptimize] = [] + for func in function_visitor.functions: + if not criteria.include_async and func.is_async: + continue + + if not criteria.include_methods and func.parents: + continue + + if criteria.require_return and func.starting_line is None: + continue + + func_with_is_method = FunctionToOptimize( + function_name=func.function_name, + file_path=file_path, + parents=func.parents, + starting_line=func.starting_line, + ending_line=func.ending_line, + starting_col=func.starting_col, + ending_col=func.ending_col, + is_async=func.is_async, + is_method=len(func.parents) > 0 and any(p.type == "ClassDef" for p in func.parents), + language="python", + ) + functions.append(func_with_is_method) - except Exception as e: - logger.warning("Failed to discover functions in %s: %s", file_path, e) - return [] + return functions def discover_tests( self, test_root: Path, source_functions: Sequence[FunctionToOptimize] @@ -589,21 +691,10 @@ def validate_syntax(self, source: str) -> bool: return False def normalize_code(self, source: str) -> str: - """Normalize Python code for deduplication. - - Removes comments, normalizes whitespace, and replaces variable names. - - Args: - source: Source code to normalize. - - Returns: - Normalized source code. - - """ - from codeflash.code_utils.deduplicate_code import normalize_code + from codeflash.languages.python.normalizer import normalize_python_code try: - return normalize_code(source, remove_docstrings=True, language=Language.PYTHON) + return normalize_python_code(source, remove_docstrings=True) except Exception: return source @@ -741,6 +832,17 @@ def get_test_file_suffix(self) -> str: """ return ".py" + def get_test_dir_for_source(self, test_dir: Path, source_file: Path | None) -> Path | None: + return None + + def resolve_test_file_from_class_path(self, test_class_path: str, base_dir: Path) -> Path | None: + return None + + def resolve_test_module_path_for_pr( + self, test_module_path: str, tests_project_rootdir: Path, non_generated_tests: set[Path] + ) -> Path | None: + return None + def find_test_root(self, project_root: Path) -> Path | None: """Find the test root directory for a Python project. @@ -869,20 +971,351 @@ def instrument_source_for_line_profiler( return True def parse_line_profile_results(self, line_profiler_output_file: Path) -> dict: - """Parse line profiler output for Python. + import dill as pickle + + from codeflash.verification.parse_line_profile_test_output import show_text + + line_profiler_output_file = line_profiler_output_file.with_suffix(".lprof") + stats_dict: dict = {} + if not line_profiler_output_file.exists(): + return {"timings": {}, "unit": 0, "str_out": ""} + with line_profiler_output_file.open("rb") as f: + stats = pickle.load(f) + stats_dict["timings"] = stats.timings + stats_dict["unit"] = stats.unit + str_out = show_text(stats_dict) + stats_dict["str_out"] = str_out + return stats_dict - Args: - line_profiler_output_file: Path to profiler output file. + @property + def function_optimizer_class(self) -> type: + from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer - Returns: - Dict with timing information. + return PythonFunctionOptimizer - """ - # Python uses line_profiler which has its own output format - return {"timings": {}, "unit": 0, "str_out": ""} + def prepare_module( + self, module_code: str, module_path: Path, project_root: Path + ) -> tuple[dict[Path, ValidCode], ast.Module] | None: + from codeflash.languages.python.optimizer import prepare_python_module + + return prepare_python_module(module_code, module_path, project_root) + + pytest_cmd: str = "pytest" + + def setup_test_config(self, test_cfg: TestConfig, file_path: Path) -> None: + self.pytest_cmd = test_cfg.pytest_cmd or "pytest" + + def pytest_cmd_tokens(self, is_posix: bool) -> list[str]: + import shlex + + return shlex.split(self.pytest_cmd, posix=is_posix) + + def build_pytest_cmd(self, safe_sys_executable: str, is_posix: bool) -> list[str]: + return [safe_sys_executable, "-m", *self.pytest_cmd_tokens(is_posix)] # === Test Execution (Full Protocol) === - # Note: For Python, test execution is handled by the main test_runner.py - # which has special Python-specific logic. These methods are not called - # for Python as the test_runner checks is_python() and uses the existing path. - # They are defined here only for protocol compliance. + + def run_behavioral_tests( + self, + test_paths: Any, + test_env: dict[str, str], + cwd: Path, + timeout: int | None = None, + project_root: Path | None = None, + enable_coverage: bool = False, + candidate_index: int = 0, + ) -> tuple[Path, Any, Path | None, Path | None]: + import contextlib + import shlex + import sys + + from codeflash.code_utils.code_utils import get_run_tmp_file + from codeflash.code_utils.compat import IS_POSIX, SAFE_SYS_EXECUTABLE + from codeflash.code_utils.config_consts import TOTAL_LOOPING_TIME_EFFECTIVE + from codeflash.languages.python.static_analysis.coverage_utils import prepare_coverage_files + from codeflash.models.models import TestType + from codeflash.verification.test_runner import execute_test_subprocess + + blocklisted_plugins = ["benchmark", "codspeed", "xdist", "sugar"] + + test_files: list[str] = [] + for file in test_paths.test_files: + if file.test_type == TestType.REPLAY_TEST: + if file.tests_in_file: + test_files.extend( + [ + str(file.instrumented_behavior_file_path) + "::" + test.test_function + for test in file.tests_in_file + ] + ) + else: + test_files.append(str(file.instrumented_behavior_file_path)) + + pytest_cmd_list = self.build_pytest_cmd(SAFE_SYS_EXECUTABLE, IS_POSIX) + test_files = list(set(test_files)) + + common_pytest_args = [ + "--capture=tee-sys", + "-q", + "--codeflash_loops_scope=session", + "--codeflash_min_loops=1", + "--codeflash_max_loops=1", + f"--codeflash_seconds={TOTAL_LOOPING_TIME_EFFECTIVE}", + ] + if timeout is not None: + common_pytest_args.append(f"--timeout={timeout}") + + result_file_path = get_run_tmp_file(Path("pytest_results.xml")) + result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] + + pytest_test_env = test_env.copy() + pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" + + coverage_database_file: Path | None = None + coverage_config_file: Path | None = None + + if enable_coverage: + coverage_database_file, coverage_config_file = prepare_coverage_files() + pytest_test_env["NUMBA_DISABLE_JIT"] = str(1) + pytest_test_env["TORCHDYNAMO_DISABLE"] = str(1) + pytest_test_env["PYTORCH_JIT"] = str(0) + pytest_test_env["TF_XLA_FLAGS"] = "--tf_xla_auto_jit=0" + pytest_test_env["TF_ENABLE_ONEDNN_OPTS"] = str(0) + pytest_test_env["JAX_DISABLE_JIT"] = str(0) + + is_windows = sys.platform == "win32" + if is_windows: + if coverage_database_file.exists(): + with contextlib.suppress(PermissionError, OSError): + coverage_database_file.unlink() + else: + cov_erase = execute_test_subprocess( + shlex.split(f"{SAFE_SYS_EXECUTABLE} -m coverage erase"), cwd=cwd, env=pytest_test_env, timeout=30 + ) + logger.debug(cov_erase) + coverage_cmd = [ + SAFE_SYS_EXECUTABLE, + "-m", + "coverage", + "run", + f"--rcfile={coverage_config_file.as_posix()}", + "-m", + ] + coverage_cmd.extend(self.pytest_cmd_tokens(IS_POSIX)) + + blocklist_args = [f"-p no:{plugin}" for plugin in blocklisted_plugins if plugin != "cov"] + results = execute_test_subprocess( + coverage_cmd + common_pytest_args + blocklist_args + result_args + test_files, + cwd=cwd, + env=pytest_test_env, + timeout=600, + ) + logger.debug("Result return code: %s, %s", results.returncode, results.stderr or "") + else: + blocklist_args = [f"-p no:{plugin}" for plugin in blocklisted_plugins] + + results = execute_test_subprocess( + pytest_cmd_list + common_pytest_args + blocklist_args + result_args + test_files, + cwd=cwd, + env=pytest_test_env, + timeout=600, + ) + logger.debug("Result return code: %s, %s", results.returncode, results.stderr or "") + + return result_file_path, results, coverage_database_file, coverage_config_file + + def run_benchmarking_tests( + self, + test_paths: Any, + test_env: dict[str, str], + cwd: Path, + timeout: int | None = None, + project_root: Path | None = None, + min_loops: int = 5, + max_loops: int = 100_000, + target_duration_seconds: float = 10.0, + ) -> tuple[Path, Any]: + + from codeflash.code_utils.code_utils import get_run_tmp_file + from codeflash.code_utils.compat import IS_POSIX, SAFE_SYS_EXECUTABLE + from codeflash.verification.test_runner import execute_test_subprocess + + blocklisted_plugins = ["codspeed", "cov", "benchmark", "profiling", "xdist", "sugar"] + + pytest_cmd_list = self.build_pytest_cmd(SAFE_SYS_EXECUTABLE, IS_POSIX) + test_files: list[str] = list({str(file.benchmarking_file_path) for file in test_paths.test_files}) + pytest_args = [ + "--capture=tee-sys", + "-q", + "--codeflash_loops_scope=session", + f"--codeflash_min_loops={min_loops}", + f"--codeflash_max_loops={max_loops}", + f"--codeflash_seconds={target_duration_seconds}", + "--codeflash_stability_check=true", + ] + if timeout is not None: + pytest_args.append(f"--timeout={timeout}") + + result_file_path = get_run_tmp_file(Path("pytest_results.xml")) + result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] + pytest_test_env = test_env.copy() + pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" + blocklist_args = [f"-p no:{plugin}" for plugin in blocklisted_plugins] + results = execute_test_subprocess( + pytest_cmd_list + pytest_args + blocklist_args + result_args + test_files, + cwd=cwd, + env=pytest_test_env, + timeout=600, + ) + return result_file_path, results + + def run_line_profile_tests( + self, + test_paths: Any, + test_env: dict[str, str], + cwd: Path, + timeout: int | None = None, + project_root: Path | None = None, + line_profile_output_file: Path | None = None, + ) -> tuple[Path, Any]: + + from codeflash.code_utils.code_utils import get_run_tmp_file + from codeflash.code_utils.compat import IS_POSIX, SAFE_SYS_EXECUTABLE + from codeflash.code_utils.config_consts import TOTAL_LOOPING_TIME_EFFECTIVE + from codeflash.verification.test_runner import execute_test_subprocess + + blocklisted_plugins = ["codspeed", "cov", "benchmark", "profiling", "xdist", "sugar"] + + pytest_cmd_list = self.build_pytest_cmd(SAFE_SYS_EXECUTABLE, IS_POSIX) + test_files: list[str] = list({str(file.benchmarking_file_path) for file in test_paths.test_files}) + pytest_args = [ + "--capture=tee-sys", + "-q", + "--codeflash_loops_scope=session", + "--codeflash_min_loops=1", + "--codeflash_max_loops=1", + f"--codeflash_seconds={TOTAL_LOOPING_TIME_EFFECTIVE}", + ] + if timeout is not None: + pytest_args.append(f"--timeout={timeout}") + result_file_path = get_run_tmp_file(Path("pytest_results.xml")) + result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] + pytest_test_env = test_env.copy() + pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" + blocklist_args = [f"-p no:{plugin}" for plugin in blocklisted_plugins] + pytest_test_env["LINE_PROFILE"] = "1" + results = execute_test_subprocess( + pytest_cmd_list + pytest_args + blocklist_args + result_args + test_files, + cwd=cwd, + env=pytest_test_env, + timeout=600, + ) + return result_file_path, results + + def generate_concolic_tests( + self, test_cfg: Any, project_root: Path, function_to_optimize: FunctionToOptimize, function_to_optimize_ast: Any + ) -> tuple[dict, str]: + import ast + import importlib.util + import subprocess + import tempfile + import time + + from codeflash.cli_cmds.console import console + from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE + from codeflash.code_utils.shell_utils import make_env_with_project_root + from codeflash.discovery.discover_unit_tests import discover_unit_tests + from codeflash.languages.python.static_analysis.concolic_utils import ( + clean_concolic_tests, + is_valid_concolic_test, + ) + from codeflash.languages.python.static_analysis.static_analysis import has_typed_parameters + from codeflash.lsp.helpers import is_LSP_enabled + from codeflash.telemetry.posthog_cf import ph + from codeflash.verification.verification_utils import TestConfig + + crosshair_available = importlib.util.find_spec("crosshair") is not None + + start_time = time.perf_counter() + function_to_concolic_tests: dict = {} + concolic_test_suite_code = "" + + if not crosshair_available: + logger.debug("Skipping concolic test generation (crosshair-tool is not installed)") + return function_to_concolic_tests, concolic_test_suite_code + + if is_LSP_enabled(): + logger.debug("Skipping concolic test generation in LSP mode") + return function_to_concolic_tests, concolic_test_suite_code + + if ( + test_cfg.concolic_test_root_dir + and isinstance(function_to_optimize_ast, ast.FunctionDef) + and has_typed_parameters(function_to_optimize_ast, function_to_optimize.parents) + ): + logger.info("Generating concolic opcode coverage tests for the original code…") + console.rule() + try: + env = make_env_with_project_root(project_root) + cover_result = subprocess.run( + [ + SAFE_SYS_EXECUTABLE, + "-m", + "crosshair", + "cover", + "--example_output_format=pytest", + "--per_condition_timeout=20", + ".".join( + [ + function_to_optimize.file_path.relative_to(project_root) + .with_suffix("") + .as_posix() + .replace("/", "."), + function_to_optimize.qualified_name, + ] + ), + ], + capture_output=True, + text=True, + cwd=project_root, + check=False, + timeout=600, + env=env, + ) + except subprocess.TimeoutExpired: + logger.debug("CrossHair Cover test generation timed out") + return function_to_concolic_tests, concolic_test_suite_code + + if cover_result.returncode == 0: + generated_concolic_test: str = cover_result.stdout + if not is_valid_concolic_test(generated_concolic_test, project_root=str(project_root)): + logger.debug("CrossHair generated invalid test, skipping") + console.rule() + return function_to_concolic_tests, concolic_test_suite_code + concolic_test_suite_code = clean_concolic_tests(generated_concolic_test) + concolic_test_suite_dir = Path(tempfile.mkdtemp(dir=test_cfg.concolic_test_root_dir)) + concolic_test_suite_path = concolic_test_suite_dir / "test_concolic_coverage.py" + concolic_test_suite_path.write_text(concolic_test_suite_code, encoding="utf8") + + concolic_test_cfg = TestConfig( + tests_root=concolic_test_suite_dir, + tests_project_rootdir=test_cfg.concolic_test_root_dir, + project_root_path=project_root, + ) + function_to_concolic_tests, num_discovered_concolic_tests, _ = discover_unit_tests(concolic_test_cfg) + logger.info( + "Created %d concolic unit test case%s ", + num_discovered_concolic_tests, + "s" if num_discovered_concolic_tests != 1 else "", + ) + console.rule() + ph("cli-optimize-concolic-tests", {"num_tests": num_discovered_concolic_tests}) + + else: + logger.debug( + "Error running CrossHair Cover%s", ": " + cover_result.stderr if cover_result.stderr else "." + ) + console.rule() + end_time = time.perf_counter() + logger.debug("Generated concolic tests in %.2f seconds", end_time - start_time) + return function_to_concolic_tests, concolic_test_suite_code diff --git a/codeflash/languages/test_framework.py b/codeflash/languages/test_framework.py index ce1f3f5fe..7a5483f00 100644 --- a/codeflash/languages/test_framework.py +++ b/codeflash/languages/test_framework.py @@ -30,7 +30,7 @@ from typing import Literal -TestFramework = Literal["jest", "vitest", "mocha", "pytest", "unittest"] +TestFramework = Literal["jest", "vitest", "mocha", "pytest", "unittest", "junit5", "junit4", "testng"] # Module-level singleton for the current test framework _current_test_framework: TestFramework | None = None @@ -63,11 +63,11 @@ def set_current_test_framework(framework: TestFramework | str | None) -> None: if framework is not None: framework = framework.lower() - if framework not in ("jest", "vitest", "mocha", "pytest", "unittest"): + if framework not in ("jest", "vitest", "mocha", "pytest", "unittest", "junit5", "junit4", "testng"): # Default to jest for unknown JS frameworks, pytest for unknown Python - from codeflash.languages.current import is_javascript + from codeflash.languages.current import current_language_support - framework = "jest" if is_javascript() else "pytest" + framework = current_language_support().test_framework _current_test_framework = framework diff --git a/codeflash/lsp/beta.py b/codeflash/lsp/beta.py index 75f761f19..68d332b21 100644 --- a/codeflash/lsp/beta.py +++ b/codeflash/lsp/beta.py @@ -114,7 +114,7 @@ def get_functions_in_commit(params: OptimizableFunctionsInCommitParams) -> dict[ return {"functions": file_to_qualified_names, "status": "success"} -def _group_functions_by_file(functions: dict[str, list[FunctionToOptimize]]) -> dict[str, list[str]]: +def _group_functions_by_file(functions: dict[Path, list[FunctionToOptimize]]) -> dict[str, list[str]]: file_to_funcs_to_optimize, _ = filter_functions( modified_functions=functions, tests_root=server.optimizer.test_cfg.tests_root, @@ -463,14 +463,10 @@ def _initialize_current_function_optimizer() -> Union[dict[str, str], WrappedIni "message": "Failed to prepare module for optimization", } - validated_original_code, original_module_ast = module_prep_result + validated_original_code, _original_module_ast = module_prep_result function_optimizer = server.optimizer.create_function_optimizer( - fto, - function_to_optimize_source_code=validated_original_code[fto.file_path].source_code, - original_module_ast=original_module_ast, - original_module_path=fto.file_path, - function_to_tests={}, + fto, function_to_optimize_source_code=validated_original_code[fto.file_path].source_code, function_to_tests={} ) server.optimizer.current_function_optimizer = function_optimizer diff --git a/codeflash/lsp/helpers.py b/codeflash/lsp/helpers.py index b8840e046..14121ec68 100644 --- a/codeflash/lsp/helpers.py +++ b/codeflash/lsp/helpers.py @@ -18,6 +18,11 @@ def is_LSP_enabled() -> bool: return os.getenv("CODEFLASH_LSP", default="false").lower() == "true" +@lru_cache(maxsize=1) +def is_subagent_mode() -> bool: + return os.getenv("CODEFLASH_SUBAGENT_MODE", default="false").lower() == "true" + + def tree_to_markdown(tree: Tree, level: int = 0) -> str: """Convert a rich Tree into a Markdown bullet list.""" indent = " " * level diff --git a/codeflash/main.py b/codeflash/main.py index 690c1ae98..32ae9c66c 100644 --- a/codeflash/main.py +++ b/codeflash/main.py @@ -11,6 +11,12 @@ from pathlib import Path from typing import TYPE_CHECKING +if "--subagent" in sys.argv: + os.environ["CODEFLASH_SUBAGENT_MODE"] = "true" + import warnings + + warnings.filterwarnings("ignore") + from codeflash.cli_cmds.cli import parse_args, process_pyproject_config from codeflash.cli_cmds.cmd_init import CODEFLASH_LOGO, ask_run_end_to_end_test from codeflash.cli_cmds.console import paneled_text diff --git a/codeflash/models/models.py b/codeflash/models/models.py index 70267c067..cc957c1a3 100644 --- a/codeflash/models/models.py +++ b/codeflash/models/models.py @@ -245,8 +245,7 @@ def validate_code_syntax(self) -> CodeString: """Validate code syntax for the specified language.""" if self.language == "python": validate_python_code(self.code) - elif self.language in ("javascript", "typescript"): - # Validate JavaScript/TypeScript syntax using language support + else: from codeflash.languages.registry import get_language_support lang_support = get_language_support(self.language) diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index 795476837..f1068f27e 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -1,8 +1,8 @@ from __future__ import annotations -import ast import concurrent.futures import dataclasses +import logging import os import queue import random @@ -23,7 +23,14 @@ from codeflash.api.aiservice import AiServiceClient, AIServiceRefinerRequest, LocalAiServiceClient from codeflash.api.cfapi import add_code_context_hash, create_staging, get_cfapi_base_urls, mark_optimization_success from codeflash.benchmarking.utils import process_benchmark_data -from codeflash.cli_cmds.console import DEBUG_MODE, code_print, console, logger, lsp_log, progress_bar +from codeflash.cli_cmds.console import ( + code_print, + console, + logger, + lsp_log, + progress_bar, + subagent_log_optimization_result, +) from codeflash.code_utils import env_utils from codeflash.code_utils.code_utils import ( choose_weights, @@ -51,33 +58,17 @@ EffortLevel, get_effort_value, ) -from codeflash.code_utils.deduplicate_code import normalize_code from codeflash.code_utils.env_utils import get_pr_number from codeflash.code_utils.formatter import format_code, format_generated_code, sort_imports from codeflash.code_utils.git_utils import git_root_dir -from codeflash.code_utils.instrument_existing_tests import inject_profiling_into_existing_test from codeflash.code_utils.shell_utils import make_env_with_project_root from codeflash.code_utils.time_utils import humanize_runtime from codeflash.discovery.functions_to_optimize import was_function_previously_optimized from codeflash.either import Failure, Success, is_successful -from codeflash.languages import is_java, is_python from codeflash.languages.base import Language from codeflash.languages.current import current_language_support from codeflash.languages.javascript.test_runner import clear_created_config_files, get_created_config_files -from codeflash.languages.python.context import code_context_extractor -from codeflash.languages.python.context.unused_definition_remover import ( - detect_unused_helper_functions, - revert_unused_helper_functions, -) -from codeflash.languages.python.static_analysis.code_extractor import get_opt_review_metrics, is_numerical_code -from codeflash.languages.python.static_analysis.code_replacer import ( - add_custom_marker_to_all_tests, - modify_autouse_fixture, - replace_function_definitions_in_module, -) -from codeflash.languages.python.static_analysis.line_profile_utils import add_decorator_imports, contains_jit_decorator -from codeflash.languages.python.static_analysis.static_analysis import get_first_top_level_function_or_method_ast -from codeflash.lsp.helpers import is_LSP_enabled, report_to_markdown_table, tree_to_markdown +from codeflash.lsp.helpers import is_LSP_enabled, is_subagent_mode, report_to_markdown_table, tree_to_markdown from codeflash.lsp.lsp_message import LspCodeMessage, LspMarkdownMessage, LSPMessageId from codeflash.models.ExperimentMetadata import ExperimentMetadata from codeflash.models.models import ( @@ -86,7 +77,6 @@ AIServiceCodeRepairRequest, BestOptimization, CandidateEvaluationContext, - CodeOptimizationContext, GeneratedTests, GeneratedTestsList, OptimizationReviewResult, @@ -113,27 +103,23 @@ ) from codeflash.result.explanation import Explanation from codeflash.telemetry.posthog_cf import ph -from codeflash.verification.concolic_testing import generate_concolic_tests from codeflash.verification.equivalence import compare_test_results -from codeflash.verification.instrument_codeflash_capture import instrument_codeflash_capture -from codeflash.verification.parse_line_profile_test_output import parse_line_profile_results -from codeflash.verification.parse_test_output import ( - calculate_function_throughput_from_test_results, - parse_concurrency_metrics, - parse_test_results, -) -from codeflash.verification.test_runner import run_behavioral_tests, run_benchmarking_tests, run_line_profile_tests +from codeflash.verification.parse_test_output import parse_concurrency_metrics, parse_test_results from codeflash.verification.verification_utils import get_test_file_path from codeflash.verification.verifier import generate_tests if TYPE_CHECKING: + import ast from argparse import Namespace + from typing import Any from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.either import Result from codeflash.languages.base import DependencyResolver + from codeflash.models.function_types import FunctionParent from codeflash.models.models import ( BenchmarkKey, + CodeOptimizationContext, CodeStringsMarkdown, ConcurrencyMetrics, CoverageData, @@ -144,70 +130,9 @@ from codeflash.verification.verification_utils import TestConfig -def log_code_after_replacement(file_path: Path, candidate_index: int) -> None: - """Log the full file content after code replacement in verbose mode.""" - if not DEBUG_MODE: - return - - try: - code = file_path.read_text(encoding="utf-8") - lang_map = {".java": "java", ".py": "python", ".js": "javascript", ".ts": "typescript"} - language = lang_map.get(file_path.suffix.lower(), "text") - - console.print( - Panel( - Syntax(code, language, line_numbers=True, theme="monokai", word_wrap=True), - title=f"[bold blue]Code After Replacement (Candidate {candidate_index})[/] [dim]({file_path.name})[/]", - border_style="blue", - ) - ) - except Exception as e: - logger.debug(f"Failed to log code after replacement: {e}") - - -def log_instrumented_test(test_source: str, test_name: str, test_type: str, language: str) -> None: - """Log instrumented test code in verbose mode.""" - if not DEBUG_MODE: - return - - display_source = test_source - if len(test_source) > 15000: - display_source = test_source[:15000] + "\n\n... [truncated] ..." - - console.print( - Panel( - Syntax(display_source, language, line_numbers=True, theme="monokai", word_wrap=True), - title=f"[bold magenta]Instrumented Test: {test_name}[/] [dim]({test_type})[/]", - border_style="magenta", - ) - ) - - -def log_test_run_output(stdout: str, stderr: str, test_type: str, returncode: int = 0) -> None: - """Log test run stdout/stderr in verbose mode.""" - if not DEBUG_MODE: - return - - max_len = 10000 - - if stdout and stdout.strip(): - display_stdout = stdout[:max_len] + ("...[truncated]" if len(stdout) > max_len else "") - console.print( - Panel( - display_stdout, - title=f"[bold green]{test_type} - stdout[/] [dim](exit: {returncode})[/]", - border_style="green" if returncode == 0 else "red", - ) - ) - - if stderr and stderr.strip(): - display_stderr = stderr[:max_len] + ("...[truncated]" if len(stderr) > max_len else "") - console.print(Panel(display_stderr, title=f"[bold yellow]{test_type} - stderr[/]", border_style="yellow")) - - def log_optimization_context(function_name: str, code_context: CodeOptimizationContext) -> None: """Log optimization context details when in verbose mode using Rich formatting.""" - if not DEBUG_MODE: + if logger.getEffectiveLevel() > logging.DEBUG: return console.rule() @@ -512,14 +437,9 @@ def __init__( ) self.language_support = current_language_support() if not function_to_optimize_ast: - # Skip Python AST parsing for non-Python languages - if not is_python(): - self.function_to_optimize_ast = None - else: - original_module_ast = ast.parse(function_to_optimize_source_code) - self.function_to_optimize_ast = get_first_top_level_function_or_method_ast( - function_to_optimize.function_name, function_to_optimize.parents, original_module_ast - ) + self.function_to_optimize_ast = self._resolve_function_ast( + self.function_to_optimize_source_code, function_to_optimize.function_name, function_to_optimize.parents + ) else: self.function_to_optimize_ast = function_to_optimize_ast self.function_to_tests = function_to_tests if function_to_tests else {} @@ -556,6 +476,74 @@ def __init__( self.is_numerical_code: bool | None = None self.code_already_exists: bool = False + # --- Hooks for language-specific subclasses --- + + def _resolve_function_ast( + self, source_code: str, function_name: str, parents: list[FunctionParent] + ) -> ast.FunctionDef | ast.AsyncFunctionDef | None: + return None + + def requires_function_ast(self) -> bool: + return False + + def analyze_code_characteristics(self, code_context: CodeOptimizationContext) -> None: + pass + + def get_optimization_review_metrics( + self, + source_code: str, + file_path: Path, + qualified_name: str, + project_root: Path, + tests_root: Path, + language: Language, + ) -> str: + return "" + + def instrument_test_fixtures(self, test_paths: list[Path]) -> dict[Path, list[str]] | None: + return None + + def instrument_async_for_mode(self, mode: TestingMode) -> None: + pass + + def instrument_capture(self, file_path_to_helper_classes: dict[Path, set[str]]) -> None: + pass + + def should_check_coverage(self) -> bool: + return False + + def collect_async_metrics( + self, + benchmarking_results: TestResults, + code_context: CodeOptimizationContext, + helper_code: dict[Path, str], + test_env: dict[str, str], + ) -> tuple[int | None, ConcurrencyMetrics | None]: + return None, None + + def compare_candidate_results( + self, + baseline_results: OriginalCodeBaseline, + candidate_behavior_results: TestResults, + optimization_candidate_index: int, + ) -> tuple[bool, list[TestDiff]]: + return compare_test_results( + baseline_results.behavior_test_results, candidate_behavior_results, pass_fail_only=True + ) + + def should_skip_sqlite_cleanup(self, testing_type: TestingMode, optimization_iteration: int) -> bool: + return False + + def parse_line_profile_test_results( + self, line_profiler_output_file: Path | None + ) -> tuple[TestResults | dict, CoverageData | None]: + return TestResults(test_results=[]), None + + def fixup_generated_tests(self, generated_tests: GeneratedTestsList) -> GeneratedTestsList: + return generated_tests + + # --- End hooks --- + def can_be_optimized(self) -> Result[tuple[bool, CodeOptimizationContext, dict[Path, str]], str]: should_run_experiment = self.experiment_id is not None logger.info(f"!lsp|Function Trace ID: {self.function_trace_id}") @@ -656,50 +644,26 @@ def generate_and_instrument_tests( source_file_path=self.function_to_optimize.file_path, ) + generated_tests = self.fixup_generated_tests(generated_tests) + logger.debug(f"[PIPELINE] Processing {count_tests} generated tests") - used_behavior_paths: set[Path] = set() for i, generated_test in enumerate(generated_tests.generated_tests): - behavior_path = generated_test.behavior_file_path - perf_path = generated_test.perf_file_path - - # For Java, fix paths to match package structure - if is_java(): - behavior_path, perf_path, modified_behavior_source, modified_perf_source = self._fix_java_test_paths( - generated_test.instrumented_behavior_test_source, - generated_test.instrumented_perf_test_source, - used_behavior_paths, - ) - generated_test.behavior_file_path = behavior_path - generated_test.perf_file_path = perf_path - generated_test.instrumented_behavior_test_source = modified_behavior_source - generated_test.instrumented_perf_test_source = modified_perf_source - used_behavior_paths.add(behavior_path) - - logger.debug(f"[PIPELINE] Test {i + 1}: behavior_path={behavior_path}, perf_path={perf_path}") + logger.debug( + f"[PIPELINE] Test {i + 1}: behavior_path={generated_test.behavior_file_path}, perf_path={generated_test.perf_file_path}" + ) - with behavior_path.open("w", encoding="utf8") as f: + with generated_test.behavior_file_path.open("w", encoding="utf8") as f: f.write(generated_test.instrumented_behavior_test_source) - logger.debug(f"[PIPELINE] Wrote behavioral test to {behavior_path}") + logger.debug(f"[PIPELINE] Wrote behavioral test to {generated_test.behavior_file_path}") - # Verbose: Log instrumented behavior test - log_instrumented_test( - generated_test.instrumented_behavior_test_source, - behavior_path.name, - "Behavioral Test", - language=self.function_to_optimize.language, - ) + # Save perf test source for debugging + debug_file_path = get_run_tmp_file(Path("perf_test_debug.test.ts")) + with debug_file_path.open("w", encoding="utf-8") as debug_f: + debug_f.write(generated_test.instrumented_perf_test_source) - with perf_path.open("w", encoding="utf8") as f: + with generated_test.perf_file_path.open("w", encoding="utf8") as f: f.write(generated_test.instrumented_perf_test_source) - logger.debug(f"[PIPELINE] Wrote perf test to {perf_path}") - - # Verbose: Log instrumented performance test - log_instrumented_test( - generated_test.instrumented_perf_test_source, - perf_path.name, - "Performance Test", - language=self.function_to_optimize.language, - ) + logger.debug(f"[PIPELINE] Wrote perf test to {generated_test.perf_file_path}") # File paths are expected to be absolute - resolved at their source (CLI, TestConfig, etc.) test_file_obj = TestFile( @@ -714,11 +678,6 @@ def generate_and_instrument_tests( logger.debug( f"[PIPELINE] Added test file to collection: behavior={test_file_obj.instrumented_behavior_file_path}, perf={test_file_obj.benchmarking_file_path}" ) - logger.debug( - f"[REGISTER] TestFile added: behavior={test_file_obj.instrumented_behavior_file_path}, " - f"exists={test_file_obj.instrumented_behavior_file_path.exists()}, " - f"original={test_file_obj.original_file_path}, test_type={test_file_obj.test_type}" - ) logger.info(f"Generated test {i + 1}/{count_tests}:") # Use correct extension based on language @@ -740,10 +699,7 @@ def generate_and_instrument_tests( original_conftest_content = None if self.args.override_fixtures: - logger.info("Disabling all autouse fixtures associated with the generated test files") - original_conftest_content = modify_autouse_fixture(generated_test_paths + generated_perf_test_paths) - logger.info("Add custom marker to generated test files") - add_custom_marker_to_all_tests(generated_test_paths + generated_perf_test_paths) + original_conftest_content = self.instrument_test_fixtures(generated_test_paths + generated_perf_test_paths) return Success( ( @@ -757,213 +713,13 @@ def generate_and_instrument_tests( ) ) - def _get_java_sources_root(self) -> Path: - """Get the Java sources root directory for test files. - - For Java projects, tests_root might include the package path - (e.g., test/src/com/aerospike/test). We need to find the base directory - that should contain the package directories, not the tests_root itself. - - This method looks for standard Java package prefixes (com, org, net, io, edu, gov) - in the tests_root path and returns everything before that prefix. - - Returns: - Path to the Java sources root directory. - - """ - tests_root = self.test_cfg.tests_root - parts = tests_root.parts - - # Check if tests_root already ends with "src" (already a Java sources root) - if tests_root.name == "src": - logger.debug(f"[JAVA] tests_root already ends with 'src': {tests_root}") - logger.debug(f"[JAVA-ROOT] Returning Java sources root: {tests_root}, tests_root was: {tests_root}") - return tests_root - - # Check if tests_root already ends with src/test/java (Maven-standard) - if len(parts) >= 3 and parts[-3:] == ("src", "test", "java"): - logger.debug(f"[JAVA] tests_root already is Maven-standard test directory: {tests_root}") - logger.debug(f"[JAVA-ROOT] Returning Java sources root: {tests_root}, tests_root was: {tests_root}") - return tests_root - - # Check for simple "src" subdirectory (handles test/src, test-module/src, etc.) - src_subdir = tests_root / "src" - if src_subdir.exists() and src_subdir.is_dir(): - logger.debug(f"[JAVA] Found 'src' subdirectory: {src_subdir}") - logger.debug(f"[JAVA-ROOT] Returning Java sources root: {src_subdir}, tests_root was: {tests_root}") - return src_subdir - - # Check for Maven-standard src/test/java structure as subdirectory - maven_test_dir = tests_root / "src" / "test" / "java" - if maven_test_dir.exists() and maven_test_dir.is_dir(): - logger.debug(f"[JAVA] Found Maven-standard test directory as subdirectory: {maven_test_dir}") - logger.debug(f"[JAVA-ROOT] Returning Java sources root: {maven_test_dir}, tests_root was: {tests_root}") - return maven_test_dir - - # Look for standard Java package prefixes that indicate the start of package structure - standard_package_prefixes = ("com", "org", "net", "io", "edu", "gov") - - for i, part in enumerate(parts): - if part in standard_package_prefixes: - # Found start of package path, return everything before it - if i > 0: - java_sources_root = Path(*parts[:i]) - logger.debug( - f"[JAVA] Detected Java sources root: {java_sources_root} (from tests_root: {tests_root})" - ) - logger.debug( - f"[JAVA-ROOT] Returning Java sources root: {java_sources_root}, tests_root was: {tests_root}" - ) - return java_sources_root - - # If no standard package prefix found, check if there's a 'java' directory - # (standard Maven structure: src/test/java) - for i, part in enumerate(parts): - if part == "java" and i > 0: - # Return up to and including 'java' - java_sources_root = Path(*parts[: i + 1]) - logger.debug(f"[JAVA] Detected Maven-style Java sources root: {java_sources_root}") - logger.debug( - f"[JAVA-ROOT] Returning Java sources root: {java_sources_root}, tests_root was: {tests_root}" - ) - return java_sources_root - - # Default: return tests_root as-is (original behavior) - logger.debug(f"[JAVA] Using tests_root as Java sources root: {tests_root}") - logger.debug(f"[JAVA-ROOT] Returning Java sources root: {tests_root}, tests_root was: {tests_root}") - return tests_root - - def _fix_java_test_paths( - self, behavior_source: str, perf_source: str, used_paths: set[Path] - ) -> tuple[Path, Path, str, str]: - """Fix Java test file paths to match package structure. - - Java requires test files to be in directories matching their package. - This method extracts the package and class from the generated tests - and returns correct paths. If the path would conflict with an already - used path, it renames the class by adding an index suffix. - - Args: - behavior_source: Source code of the behavior test. - perf_source: Source code of the performance test. - used_paths: Set of already used behavior file paths. - - Returns: - Tuple of (behavior_path, perf_path, modified_behavior_source, modified_perf_source) - with correct package structure and unique class names. - - """ - import re - - # Extract package from behavior source - package_match = re.search(r"^\s*package\s+([\w.]+)\s*;", behavior_source, re.MULTILINE) - package_name = package_match.group(1) if package_match else "" - - # JPMS: If a test module-info.java exists, remap the package to the - # test module namespace to avoid split-package errors. - # E.g., io.questdb.cairo -> io.questdb.test.cairo - test_dir = self._get_java_sources_root() - test_module_info = test_dir / "module-info.java" - if package_name and test_module_info.exists(): - mi_content = test_module_info.read_text() - mi_match = re.search(r"module\s+([\w.]+)", mi_content) - if mi_match: - test_module_name = mi_match.group(1) - main_dir = test_dir.parent.parent / "main" / "java" - main_module_info = main_dir / "module-info.java" - if main_module_info.exists(): - main_content = main_module_info.read_text() - main_match = re.search(r"module\s+([\w.]+)", main_content) - if main_match: - main_module_name = main_match.group(1) - if package_name.startswith(main_module_name): - suffix = package_name[len(main_module_name) :] - new_package = test_module_name + suffix - old_decl = f"package {package_name};" - new_decl = f"package {new_package};" - behavior_source = behavior_source.replace(old_decl, new_decl, 1) - perf_source = perf_source.replace(old_decl, new_decl, 1) - package_name = new_package - logger.debug(f"[JPMS] Remapped package: {old_decl} -> {new_decl}") - - # Extract class name from behavior source - # Use more specific pattern to avoid matching words like "command" or text in comments - class_match = re.search(r"^(?:public\s+)?class\s+(\w+)", behavior_source, re.MULTILINE) - behavior_class = class_match.group(1) if class_match else "GeneratedTest" - - # Extract class name from perf source - perf_class_match = re.search(r"^(?:public\s+)?class\s+(\w+)", perf_source, re.MULTILINE) - perf_class = perf_class_match.group(1) if perf_class_match else "GeneratedPerfTest" - - # Build paths with package structure - # Use the Java sources root, not tests_root, to avoid path duplication - # when tests_root already includes the package path - test_dir = self._get_java_sources_root() - - if package_name: - package_path = package_name.replace(".", "/") - behavior_path = test_dir / package_path / f"{behavior_class}.java" - perf_path = test_dir / package_path / f"{perf_class}.java" - else: - package_path = "" - behavior_path = test_dir / f"{behavior_class}.java" - perf_path = test_dir / f"{perf_class}.java" - - # If path already used, rename class by adding index suffix - modified_behavior_source = behavior_source - modified_perf_source = perf_source - if behavior_path in used_paths: - # Find a unique index - index = 2 - while True: - new_behavior_class = f"{behavior_class}_{index}" - new_perf_class = f"{perf_class}_{index}" - if package_path: - new_behavior_path = test_dir / package_path / f"{new_behavior_class}.java" - new_perf_path = test_dir / package_path / f"{new_perf_class}.java" - else: - new_behavior_path = test_dir / f"{new_behavior_class}.java" - new_perf_path = test_dir / f"{new_perf_class}.java" - if new_behavior_path not in used_paths: - behavior_path = new_behavior_path - perf_path = new_perf_path - # Rename class in source code - replace the class declaration - modified_behavior_source = re.sub( - rf"^((?:public\s+)?class\s+){re.escape(behavior_class)}(\b)", - rf"\g<1>{new_behavior_class}\g<2>", - behavior_source, - count=1, - flags=re.MULTILINE, - ) - modified_perf_source = re.sub( - rf"^((?:public\s+)?class\s+){re.escape(perf_class)}(\b)", - rf"\g<1>{new_perf_class}\g<2>", - perf_source, - count=1, - flags=re.MULTILINE, - ) - logger.debug(f"[JAVA] Renamed duplicate test class from {behavior_class} to {new_behavior_class}") - break - index += 1 - - # Create directories if needed - behavior_path.parent.mkdir(parents=True, exist_ok=True) - perf_path.parent.mkdir(parents=True, exist_ok=True) - - logger.debug(f"[JAVA] Fixed paths: behavior={behavior_path}, perf={perf_path}") - logger.debug( - f"[WRITE-PATH] Writing test to behavior_path={behavior_path}, perf_path={perf_path}, " - f"package={package_name}, behavior_class={behavior_class}, perf_class={perf_class}" - ) - return behavior_path, perf_path, modified_behavior_source, modified_perf_source - # note: this isn't called by the lsp, only called by cli def optimize_function(self) -> Result[BestOptimization, str]: initialization_result = self.can_be_optimized() if not is_successful(initialization_result): return Failure(initialization_result.failure()) should_run_experiment, code_context, original_helper_code = initialization_result.unwrap() - self.is_numerical_code = is_numerical_code(code_string=code_context.read_writable_code.flat) + self.analyze_code_characteristics(code_context) code_print( code_context.read_writable_code.flat, file_name=self.function_to_optimize.file_path, @@ -1213,7 +969,9 @@ def select_best_optimization( runtimes_list = [] for valid_opt in eval_ctx.valid_optimizations: - valid_opt_normalized_code = normalize_code(valid_opt.candidate.source_code.flat.strip()) + valid_opt_normalized_code = self.language_support.normalize_code( + valid_opt.candidate.source_code.flat.strip() + ) new_candidate_with_shorter_code = OptimizedCandidate( source_code=eval_ctx.ast_code_to_id[valid_opt_normalized_code]["shorter_source_code"], optimization_id=valid_opt.candidate.optimization_id, @@ -1307,6 +1065,7 @@ def process_single_candidate( eval_ctx: CandidateEvaluationContext, exp_type: str, function_references: str, + normalized_original: str, ) -> BestOptimization | None: """Process a single optimization candidate. @@ -1317,8 +1076,24 @@ def process_single_candidate( get_run_tmp_file(Path(f"test_return_values_{candidate_index}.bin")).unlink(missing_ok=True) get_run_tmp_file(Path(f"test_return_values_{candidate_index}.sqlite")).unlink(missing_ok=True) - logger.info(f"h3|Optimization candidate {candidate_index}/{total_candidates}:") candidate = candidate_node.candidate + + normalized_code = self.language_support.normalize_code(candidate.source_code.flat.strip()) + + if normalized_code == normalized_original: + logger.info(f"h3|Candidate {candidate_index}/{total_candidates}: Identical to original code, skipping.") + console.rule() + return None + + if normalized_code in eval_ctx.ast_code_to_id: + logger.info( + f"h3|Candidate {candidate_index}/{total_candidates}: Duplicate of a previous candidate, skipping." + ) + eval_ctx.handle_duplicate_candidate(candidate, normalized_code, code_context) + console.rule() + return None + + logger.info(f"h3|Optimization candidate {candidate_index}/{total_candidates}:") # Use correct extension based on language ext = self.language_support.file_extensions[0] code_print( @@ -1339,9 +1114,6 @@ def process_single_candidate( logger.info("No functions were replaced in the optimized code. Skipping optimization candidate.") console.rule() return None - - # Verbose: Log code after replacement - log_code_after_replacement(self.function_to_optimize.file_path, candidate_index) except (ValueError, SyntaxError, cst.ParserSyntaxError, AttributeError) as e: logger.error(e) self.write_code_and_helpers( @@ -1349,13 +1121,6 @@ def process_single_candidate( ) return None - # Check for duplicate candidates - normalized_code = normalize_code(candidate.source_code.flat.strip()) - if normalized_code in eval_ctx.ast_code_to_id: - logger.info("Current candidate has been encountered before in testing, Skipping optimization candidate.") - eval_ctx.handle_duplicate_candidate(candidate, normalized_code, code_context) - return None - eval_ctx.register_new_candidate(normalized_code, candidate, code_context) # Run the optimized candidate @@ -1525,6 +1290,7 @@ def determine_best_candidate( self.future_adaptive_optimizations, ) candidate_index = 0 + normalized_original = self.language_support.normalize_code(code_context.read_writable_code.flat.strip()) # Process candidates using queue-based approach while not processor.is_done(): @@ -1546,6 +1312,7 @@ def determine_best_candidate( eval_ctx=eval_ctx, exp_type=exp_type, function_references=function_references, + normalized_original=normalized_original, ) except KeyboardInterrupt as e: logger.exception(f"Optimization interrupted: {e}") @@ -1643,6 +1410,8 @@ def repair_optimization( def log_successful_optimization( self, explanation: Explanation, generated_tests: GeneratedTestsList, exp_type: str ) -> None: + if is_subagent_mode(): + return if is_LSP_enabled(): md_lines = [ "### ⚡️ Optimization Summary", @@ -1766,60 +1535,24 @@ def reformat_code_and_helpers( return new_code, new_helper_code + def group_functions_by_file(self, code_context: CodeOptimizationContext) -> dict[Path, set[str]]: + functions_by_file: dict[Path, set[str]] = defaultdict(set) + functions_by_file[self.function_to_optimize.file_path].add(self.function_to_optimize.qualified_name) + for helper in code_context.helper_functions: + if helper.definition_type != "class": + functions_by_file[helper.file_path].add(helper.qualified_name) + return functions_by_file + def replace_function_and_helpers_with_optimized_code( self, code_context: CodeOptimizationContext, optimized_code: CodeStringsMarkdown, original_helper_code: dict[Path, str], ) -> bool: - did_update = False - read_writable_functions_by_file_path = defaultdict(set) - read_writable_functions_by_file_path[self.function_to_optimize.file_path].add( - self.function_to_optimize.qualified_name - ) - for helper_function in code_context.helper_functions: - # Skip class definitions (definition_type may be None for non-Python languages) - if helper_function.definition_type != "class": - read_writable_functions_by_file_path[helper_function.file_path].add(helper_function.qualified_name) - for module_abspath, qualified_names in read_writable_functions_by_file_path.items(): - # Pass function_to_optimize for the main file to enable precise overload matching - func_to_opt = self.function_to_optimize if module_abspath == self.function_to_optimize.file_path else None - did_update |= replace_function_definitions_in_module( - function_names=list(qualified_names), - optimized_code=optimized_code, - module_abspath=module_abspath, - preexisting_objects=code_context.preexisting_objects, - project_root_path=self.project_root, - function_to_optimize=func_to_opt, - ) - unused_helpers = detect_unused_helper_functions(self.function_to_optimize, code_context, optimized_code) - - # Revert unused helper functions to their original definitions - if unused_helpers: - revert_unused_helper_functions(self.project_root, unused_helpers, original_helper_code) - - return did_update + raise NotImplementedError def get_code_optimization_context(self) -> Result[CodeOptimizationContext, str]: - try: - new_code_ctx = code_context_extractor.get_code_optimization_context( - self.function_to_optimize, self.project_root, call_graph=self.call_graph - ) - except ValueError as e: - return Failure(str(e)) - - return Success( - CodeOptimizationContext( - testgen_context=new_code_ctx.testgen_context, - read_writable_code=new_code_ctx.read_writable_code, - read_only_context_code=new_code_ctx.read_only_context_code, - hashing_code_context=new_code_ctx.hashing_code_context, - hashing_code_context_hash=new_code_ctx.hashing_code_context_hash, - helper_functions=new_code_ctx.helper_functions, - testgen_helper_fqns=new_code_ctx.testgen_helper_fqns, - preexisting_objects=new_code_ctx.preexisting_objects, - ) - ) + raise NotImplementedError @staticmethod def cleanup_leftover_test_return_values() -> None: @@ -1836,224 +1569,117 @@ def instrument_existing_tests(self, function_to_all_tests: dict[str, set[Functio func_qualname = self.function_to_optimize.qualified_name_with_modules_from_root(self.project_root) if func_qualname not in function_to_all_tests: logger.info(f"Did not find any pre-existing tests for '{func_qualname}', will only use generated tests.") - # Handle non-Python existing test instrumentation - elif not is_python(): - test_file_invocation_positions = defaultdict(list) - for tests_in_file in function_to_all_tests.get(func_qualname): - test_file_invocation_positions[ - (tests_in_file.tests_in_file.test_file, tests_in_file.tests_in_file.test_type) - ].append(tests_in_file) - - for (test_file, test_type), tests_in_file_list in test_file_invocation_positions.items(): - path_obj_test_file = Path(test_file) - if test_type == TestType.EXISTING_UNIT_TEST: - existing_test_files_count += 1 - elif test_type == TestType.REPLAY_TEST: - replay_test_files_count += 1 - elif test_type == TestType.CONCOLIC_COVERAGE_TEST: - concolic_coverage_test_files_count += 1 - else: - msg = f"Unexpected test type: {test_type}" - raise ValueError(msg) - - # Use language-specific instrumentation - # Read the test file first - with path_obj_test_file.open("r", encoding="utf8") as f: - original_test_source = f.read() - - success, injected_behavior_test = self.language_support.instrument_existing_test( - test_string=original_test_source, - call_positions=[test.position for test in tests_in_file_list], - function_to_optimize=self.function_to_optimize, - tests_project_root=self.test_cfg.tests_project_rootdir, - mode="behavior", - test_path=path_obj_test_file, - ) - if not success: - logger.debug(f"Failed to instrument test file {test_file} for behavior testing") - continue + return unique_instrumented_test_files + + test_file_invocation_positions = defaultdict(list) + for tests_in_file in function_to_all_tests.get(func_qualname): + test_file_invocation_positions[ + (tests_in_file.tests_in_file.test_file, tests_in_file.tests_in_file.test_type) + ].append(tests_in_file) + + for test_file, test_type in test_file_invocation_positions: + path_obj_test_file = Path(test_file) + if test_type == TestType.EXISTING_UNIT_TEST: + existing_test_files_count += 1 + elif test_type == TestType.REPLAY_TEST: + replay_test_files_count += 1 + elif test_type == TestType.CONCOLIC_COVERAGE_TEST: + concolic_coverage_test_files_count += 1 + else: + msg = f"Unexpected test type: {test_type}" + raise ValueError(msg) - success, injected_perf_test = self.language_support.instrument_existing_test( - test_string=original_test_source, - call_positions=[test.position for test in tests_in_file_list], - function_to_optimize=self.function_to_optimize, - tests_project_root=self.test_cfg.tests_project_rootdir, - mode="performance", - test_path=path_obj_test_file, - ) - if not success: - logger.debug(f"Failed to instrument test file {test_file} for performance testing") - continue + if existing_test_files_count > 0 or replay_test_files_count > 0 or concolic_coverage_test_files_count > 0: + logger.info( + f"Discovered {existing_test_files_count} existing unit test file" + f"{'s' if existing_test_files_count != 1 else ''}, {replay_test_files_count} replay test file" + f"{'s' if replay_test_files_count != 1 else ''}, and " + f"{concolic_coverage_test_files_count} concolic coverage test file" + f"{'s' if concolic_coverage_test_files_count != 1 else ''} for {func_qualname}" + ) + console.rule() - # Generate instrumented test file paths - # For JS/TS, preserve .test.ts or .spec.ts suffix for Jest pattern matching - def get_instrumented_path(original_path: str, suffix: str) -> Path: - """Generate instrumented test file path preserving .test/.spec pattern.""" - path_obj = Path(original_path) - stem = path_obj.stem # e.g., "fibonacci.test" - ext = path_obj.suffix # e.g., ".ts" - - # Check for .test or .spec in stem (JS/TS pattern) - if ".test" in stem: - # fibonacci.test -> fibonacci__suffix.test - base, _ = stem.rsplit(".test", 1) - new_stem = f"{base}{suffix}.test" - elif ".spec" in stem: - base, _ = stem.rsplit(".spec", 1) - new_stem = f"{base}{suffix}.spec" - else: - # Default Python-style: add suffix before extension - new_stem = f"{stem}{suffix}" - - return path_obj.parent / f"{new_stem}{ext}" - - # Use distinct suffixes for existing tests to avoid collisions - # with generated test paths (which use __perfinstrumented / __perfonlyinstrumented) - new_behavioral_test_path = get_instrumented_path(test_file, "__existing_perfinstrumented") - new_perf_test_path = get_instrumented_path(test_file, "__existing_perfonlyinstrumented") - - # For Java, the class name inside the file must match the file name. - # instrument_existing_test() renames to __perfinstrumented, but we use - # __existing_perfinstrumented for file paths, so fix the class name. - if is_java(): - if injected_behavior_test is not None: - injected_behavior_test = injected_behavior_test.replace( - "__perfinstrumented", "__existing_perfinstrumented" - ) - if injected_perf_test is not None: - injected_perf_test = injected_perf_test.replace( - "__perfonlyinstrumented", "__existing_perfonlyinstrumented" - ) + for (test_file, test_type), tests_in_file_list in test_file_invocation_positions.items(): + path_obj_test_file = Path(test_file) + test_string = path_obj_test_file.read_text(encoding="utf-8") + # Use language-specific instrumentation + success, injected_behavior_test = self.language_support.instrument_existing_test( + test_string=test_string, + call_positions=[test.position for test in tests_in_file_list], + function_to_optimize=self.function_to_optimize, + tests_project_root=self.test_cfg.tests_project_rootdir, + mode="behavior", + test_path=path_obj_test_file, + ) + if not success: + logger.debug(f"Failed to instrument test file {test_file} for behavior testing") + continue - if injected_behavior_test is not None: - with new_behavioral_test_path.open("w", encoding="utf8") as _f: - _f.write(injected_behavior_test) - logger.debug(f"[PIPELINE] Wrote instrumented behavior test to {new_behavioral_test_path}") - - # Verbose: Log instrumented existing behavior test - log_instrumented_test( - injected_behavior_test, - new_behavioral_test_path.name, - "Existing Behavioral Test", - language=self.function_to_optimize.language, - ) + success, injected_perf_test = self.language_support.instrument_existing_test( + test_string=test_string, + call_positions=[test.position for test in tests_in_file_list], + function_to_optimize=self.function_to_optimize, + tests_project_root=self.test_cfg.tests_project_rootdir, + mode="performance", + test_path=path_obj_test_file, + ) + if not success: + logger.debug(f"Failed to instrument test file {test_file} for performance testing") + continue + + # For JS/TS, preserve .test.ts or .spec.ts suffix for Jest pattern matching + def get_instrumented_path(original_path: str, suffix: str) -> Path: + path_obj = Path(original_path) + stem = path_obj.stem + ext = path_obj.suffix + + if ".test" in stem: + base, _ = stem.rsplit(".test", 1) + new_stem = f"{base}{suffix}.test" + elif ".spec" in stem: + base, _ = stem.rsplit(".spec", 1) + new_stem = f"{base}{suffix}.spec" else: - msg = "injected_behavior_test is None" - raise ValueError(msg) - - if injected_perf_test is not None: - with new_perf_test_path.open("w", encoding="utf8") as _f: - _f.write(injected_perf_test) - logger.debug(f"[PIPELINE] Wrote instrumented perf test to {new_perf_test_path}") - - # Verbose: Log instrumented existing performance test - log_instrumented_test( - injected_perf_test, - new_perf_test_path.name, - "Existing Performance Test", - language=self.function_to_optimize.language, - ) + new_stem = f"{stem}{suffix}" - unique_instrumented_test_files.add(new_behavioral_test_path) - unique_instrumented_test_files.add(new_perf_test_path) - - if not self.test_files.get_by_original_file_path(path_obj_test_file): - self.test_files.add( - TestFile( - instrumented_behavior_file_path=new_behavioral_test_path, - benchmarking_file_path=new_perf_test_path, - original_source=None, - original_file_path=Path(test_file), - test_type=test_type, - tests_in_file=[t.tests_in_file for t in tests_in_file_list], - ) - ) + return path_obj.parent / f"{new_stem}{ext}" - if existing_test_files_count > 0 or replay_test_files_count > 0 or concolic_coverage_test_files_count > 0: - logger.info( - f"Instrumented {existing_test_files_count} existing unit test file" - f"{'s' if existing_test_files_count != 1 else ''}, {replay_test_files_count} replay test file" - f"{'s' if replay_test_files_count != 1 else ''}, and " - f"{concolic_coverage_test_files_count} concolic coverage test file" - f"{'s' if concolic_coverage_test_files_count != 1 else ''} for {func_qualname}" - ) - console.rule() - else: - test_file_invocation_positions = defaultdict(list) - for tests_in_file in function_to_all_tests.get(func_qualname): - test_file_invocation_positions[ - (tests_in_file.tests_in_file.test_file, tests_in_file.tests_in_file.test_type) - ].append(tests_in_file) - for (test_file, test_type), tests_in_file_list in test_file_invocation_positions.items(): - path_obj_test_file = Path(test_file) - if test_type == TestType.EXISTING_UNIT_TEST: - existing_test_files_count += 1 - elif test_type == TestType.REPLAY_TEST: - replay_test_files_count += 1 - elif test_type == TestType.CONCOLIC_COVERAGE_TEST: - concolic_coverage_test_files_count += 1 - else: - msg = f"Unexpected test type: {test_type}" - raise ValueError(msg) - test_string = path_obj_test_file.read_text(encoding="utf-8") - success, injected_behavior_test = inject_profiling_into_existing_test( - test_string=test_string, - mode=TestingMode.BEHAVIOR, - test_path=path_obj_test_file, - call_positions=[test.position for test in tests_in_file_list], - function_to_optimize=self.function_to_optimize, - tests_project_root=self.test_cfg.tests_project_rootdir, - ) - if not success: - continue - success, injected_perf_test = inject_profiling_into_existing_test( - test_string=test_string, - mode=TestingMode.PERFORMANCE, - test_path=path_obj_test_file, - call_positions=[test.position for test in tests_in_file_list], - function_to_optimize=self.function_to_optimize, - tests_project_root=self.test_cfg.tests_project_rootdir, - ) - if not success: - continue - # TODO: this naming logic should be moved to a function and made more standard - new_behavioral_test_path = Path( - f"{os.path.splitext(test_file)[0]}__perfinstrumented{os.path.splitext(test_file)[1]}" # noqa: PTH122 - ) - new_perf_test_path = Path( - f"{os.path.splitext(test_file)[0]}__perfonlyinstrumented{os.path.splitext(test_file)[1]}" # noqa: PTH122 - ) - if injected_behavior_test is not None: - with new_behavioral_test_path.open("w", encoding="utf8") as _f: - _f.write(injected_behavior_test) - else: - msg = "injected_behavior_test is None" - raise ValueError(msg) - if injected_perf_test is not None: - with new_perf_test_path.open("w", encoding="utf8") as _f: - _f.write(injected_perf_test) - - unique_instrumented_test_files.add(new_behavioral_test_path) - unique_instrumented_test_files.add(new_perf_test_path) - - if not self.test_files.get_by_original_file_path(path_obj_test_file): - self.test_files.add( - TestFile( - instrumented_behavior_file_path=new_behavioral_test_path, - benchmarking_file_path=new_perf_test_path, - original_source=None, - original_file_path=Path(test_file), - test_type=test_type, - tests_in_file=[t.tests_in_file for t in tests_in_file_list], - ) + new_behavioral_test_path = get_instrumented_path(test_file, "__perfinstrumented") + new_perf_test_path = get_instrumented_path(test_file, "__perfonlyinstrumented") + + if injected_behavior_test is not None: + with new_behavioral_test_path.open("w", encoding="utf8") as _f: + _f.write(injected_behavior_test) + logger.debug(f"[PIPELINE] Wrote instrumented behavior test to {new_behavioral_test_path}") + else: + msg = "injected_behavior_test is None" + raise ValueError(msg) + + if injected_perf_test is not None: + with new_perf_test_path.open("w", encoding="utf8") as _f: + _f.write(injected_perf_test) + logger.debug(f"[PIPELINE] Wrote instrumented perf test to {new_perf_test_path}") + + unique_instrumented_test_files.add(new_behavioral_test_path) + unique_instrumented_test_files.add(new_perf_test_path) + + if not self.test_files.get_by_original_file_path(path_obj_test_file): + self.test_files.add( + TestFile( + instrumented_behavior_file_path=new_behavioral_test_path, + benchmarking_file_path=new_perf_test_path, + original_source=None, + original_file_path=Path(test_file), + test_type=test_type, + tests_in_file=[t.tests_in_file for t in tests_in_file_list], ) + ) + instrumented_count = len(unique_instrumented_test_files) // 2 # each test produces behavior + perf files + if instrumented_count > 0: logger.info( - f"Discovered {existing_test_files_count} existing unit test file" - f"{'s' if existing_test_files_count != 1 else ''}, {replay_test_files_count} replay test file" - f"{'s' if replay_test_files_count != 1 else ''}, and " - f"{concolic_coverage_test_files_count} concolic coverage test file" - f"{'s' if concolic_coverage_test_files_count != 1 else ''} for {func_qualname}" + f"Instrumented {instrumented_count} existing unit test file" + f"{'s' if instrumented_count != 1 else ''} for {func_qualname}" ) console.rule() return unique_instrumented_test_files @@ -2076,14 +1702,24 @@ def generate_tests( self.executor, testgen_context.markdown, helper_fqns, generated_test_paths, generated_perf_test_paths ) - future_concolic_tests = self.executor.submit( - generate_concolic_tests, self.test_cfg, self.args, self.function_to_optimize, self.function_to_optimize_ast - ) + if is_subagent_mode(): + future_concolic_tests = None + else: + future_concolic_tests = self.executor.submit( + self.language_support.generate_concolic_tests, + self.test_cfg, + self.args.project_root, + self.function_to_optimize, + self.function_to_optimize_ast, + ) if not self.args.no_gen_tests: # Wait for test futures to complete - concurrent.futures.wait([*future_tests, future_concolic_tests]) - else: + futures_to_wait = [*future_tests] + if future_concolic_tests is not None: + futures_to_wait.append(future_concolic_tests) + concurrent.futures.wait(futures_to_wait) + elif future_concolic_tests is not None: concurrent.futures.wait([future_concolic_tests]) # Process test generation results tests: list[GeneratedTests] = [] @@ -2112,7 +1748,10 @@ def generate_tests( logger.warning(f"Failed to generate and instrument tests for {self.function_to_optimize.function_name}") return Failure(f"/!\\ NO TESTS GENERATED for {self.function_to_optimize.function_name}") - function_to_concolic_tests, concolic_test_str = future_concolic_tests.result() + if future_concolic_tests is not None: + function_to_concolic_tests, concolic_test_str = future_concolic_tests.result() + else: + function_to_concolic_tests, concolic_test_str = {}, None count_tests = len(tests) if concolic_test_str: count_tests += 1 @@ -2145,7 +1784,7 @@ def generate_optimizations( ) future_references = self.executor.submit( - get_opt_review_metrics, + self.get_optimization_review_metrics, self.function_to_optimize_source_code, self.function_to_optimize.file_path, self.function_to_optimize.qualified_name, @@ -2236,8 +1875,7 @@ def setup_and_establish_baseline( original_code_baseline, test_functions_to_remove = baseline_result.unwrap() # Check test quantity for all languages quantity_ok = quantity_of_tests_critic(original_code_baseline) - # TODO: {Self} Only check coverage for Python - coverage infrastructure not yet reliable for JS/TS - coverage_ok = coverage_critic(original_code_baseline.coverage_results) if is_python() else True + coverage_ok = coverage_critic(original_code_baseline.coverage_results) if self.should_check_coverage() else True if isinstance(original_code_baseline, OriginalCodeBaseline) and (not coverage_ok or not quantity_ok): if self.args.override_fixtures: restore_conftest(original_conftest_content) @@ -2443,7 +2081,6 @@ def process_review( if ( self.function_to_optimize.is_async - and is_python() and original_code_baseline.async_throughput is not None and best_optimization.async_throughput is not None ): @@ -2535,7 +2172,20 @@ def process_review( self.optimization_review = opt_review_result.review # Display the reviewer result to the user - if opt_review_result.review: + if is_subagent_mode(): + subagent_log_optimization_result( + function_name=new_explanation.function_name, + file_path=new_explanation.file_path, + perf_improvement_line=new_explanation.perf_improvement_line, + original_runtime_ns=new_explanation.original_runtime_ns, + best_runtime_ns=new_explanation.best_runtime_ns, + raw_explanation=new_explanation.raw_explanation_message, + original_code=original_code_combined, + new_code=new_code_combined, + review=opt_review_result.review, + test_results=new_explanation.winning_behavior_test_results, + ) + elif opt_review_result.review: review_display = { "high": ("[bold green]High[/bold green]", "green", "Recommended to merge"), "medium": ("[bold yellow]Medium[/bold yellow]", "yellow", "Review recommended before merging"), @@ -2634,25 +2284,13 @@ def establish_original_code_baseline( test_env = self.get_test_env(codeflash_loop_index=0, codeflash_test_iteration=0, codeflash_tracer_disable=1) - if self.function_to_optimize.is_async and is_python(): - from codeflash.code_utils.instrument_existing_tests import add_async_decorator_to_function - - success = add_async_decorator_to_function( - self.function_to_optimize.file_path, - self.function_to_optimize, - TestingMode.BEHAVIOR, - project_root=self.project_root, - ) + if self.function_to_optimize.is_async: + self.instrument_async_for_mode(TestingMode.BEHAVIOR) # Instrument codeflash capture with progress_bar("Running tests to establish original code behavior..."): try: - # Only instrument Python code here - non-Python languages use their own runtime helpers - # which are already included in the generated/instrumented tests - if is_python(): - instrument_codeflash_capture( - self.function_to_optimize, file_path_to_helper_classes, self.test_cfg.tests_root - ) + self.instrument_capture(file_path_to_helper_classes) total_looping_time = TOTAL_LOOPING_TIME_EFFECTIVE logger.debug(f"[PIPELINE] Establishing baseline with {len(self.test_files)} test files") @@ -2681,7 +2319,7 @@ def establish_original_code_baseline( console.rule() return Failure("Failed to establish a baseline for the original code - bevhavioral tests failed.") # Skip coverage check for non-Python languages (coverage not yet supported) - if is_python() and not coverage_critic(coverage_results): + if self.should_check_coverage() and not coverage_critic(coverage_results): did_pass_all_tests = all(result.did_pass for result in behavioral_results) if not did_pass_all_tests: return Failure("Tests failed to pass for the original code.") @@ -2702,15 +2340,8 @@ def establish_original_code_baseline( for idx, tf in enumerate(self.test_files.test_files): logger.debug(f"[BENCHMARK-FILES] Test file {idx}: perf_file={tf.benchmarking_file_path}") - if self.function_to_optimize.is_async and is_python(): - from codeflash.code_utils.instrument_existing_tests import add_async_decorator_to_function - - add_async_decorator_to_function( - self.function_to_optimize.file_path, - self.function_to_optimize, - TestingMode.PERFORMANCE, - project_root=self.project_root, - ) + if self.function_to_optimize.is_async: + self.instrument_async_for_mode(TestingMode.PERFORMANCE) try: benchmarking_results, _ = self.run_and_parse_tests( @@ -2755,10 +2386,6 @@ def establish_original_code_baseline( return Failure("Failed to establish a baseline for the original code.") loop_count = benchmarking_results.effective_loop_count() - # For Java performance mode, loop indices are 0-indexed (loopId = outerLoop * maxInner + inner) - # so we need to add 1 to get the correct count for display - if is_java(): - loop_count += 1 logger.info( f"h3|⌚ Original code summed runtime measured over '{loop_count}' loop{'s' if loop_count > 1 else ''}: " f"'{humanize_runtime(total_timing)}' per full loop" @@ -2766,22 +2393,9 @@ def establish_original_code_baseline( console.rule() logger.debug(f"Total original code runtime (ns): {total_timing}") - async_throughput = None - concurrency_metrics = None - if self.function_to_optimize.is_async and is_python(): - async_throughput = calculate_function_throughput_from_test_results( - benchmarking_results, self.function_to_optimize.function_name - ) - logger.debug(f"Original async function throughput: {async_throughput} calls/second") - - concurrency_metrics = self.run_concurrency_benchmark( - code_context=code_context, original_helper_code=original_helper_code, test_env=test_env - ) - if concurrency_metrics: - logger.debug( - f"Original concurrency metrics: ratio={concurrency_metrics.concurrency_ratio:.2f}, " - f"seq={concurrency_metrics.sequential_time_ns}ns, conc={concurrency_metrics.concurrent_time_ns}ns" - ) + async_throughput, concurrency_metrics = self.collect_async_metrics( + benchmarking_results, code_context, original_helper_code, test_env + ) if self.args.benchmark: replay_benchmarking_test_results = benchmarking_results.group_by_benchmarks( @@ -2883,22 +2497,11 @@ def run_optimized_candidate( candidate_helper_code = {} for module_abspath in original_helper_code: candidate_helper_code[module_abspath] = Path(module_abspath).read_text("utf-8") - if self.function_to_optimize.is_async and is_python(): - from codeflash.code_utils.instrument_existing_tests import add_async_decorator_to_function - - add_async_decorator_to_function( - self.function_to_optimize.file_path, - self.function_to_optimize, - TestingMode.BEHAVIOR, - project_root=self.project_root, - ) + if self.function_to_optimize.is_async: + self.instrument_async_for_mode(TestingMode.BEHAVIOR) try: - # Only instrument Python code here - non-Python languages use their own runtime helpers - if is_python(): - instrument_codeflash_capture( - self.function_to_optimize, file_path_to_helper_classes, self.test_cfg.tests_root - ) + self.instrument_capture(file_path_to_helper_classes) total_looping_time = TOTAL_LOOPING_TIME_EFFECTIVE candidate_behavior_results, _ = self.run_and_parse_tests( @@ -2909,13 +2512,10 @@ def run_optimized_candidate( testing_time=total_looping_time, enable_coverage=False, ) - # Remove instrumentation finally: - # Only restore code for Python - non-Python tests are self-contained - if is_python(): - self.write_code_and_helpers( - candidate_fto_code, candidate_helper_code, self.function_to_optimize.file_path - ) + self.write_code_and_helpers( + candidate_fto_code, candidate_helper_code, self.function_to_optimize.file_path + ) console.print( TestResults.report_to_tree( candidate_behavior_results.get_test_pass_fail_report_by_type(), @@ -2924,49 +2524,9 @@ def run_optimized_candidate( ) console.rule() - if not is_python(): - # Check if candidate had any passing behavioral tests before attempting SQLite comparison. - # Python compares in-memory TestResults (no file dependency), but Java/JS require - # SQLite files that only exist when test instrumentation hooks fire successfully. - candidate_report = candidate_behavior_results.get_test_pass_fail_report_by_type() - total_passed = sum(r.get("passed", 0) for r in candidate_report.values()) - if total_passed == 0: - logger.warning( - "No behavioral tests passed for optimization candidate %d. Skipping correctness verification.", - optimization_candidate_index, - ) - return self.get_results_not_matched_error() - - # Use language-appropriate comparison - if not is_python(): - # Non-Python: Compare using language support with SQLite results if available - original_sqlite = get_run_tmp_file(Path("test_return_values_0.sqlite")) - candidate_sqlite = get_run_tmp_file(Path(f"test_return_values_{optimization_candidate_index}.sqlite")) - - if original_sqlite.exists() and candidate_sqlite.exists(): - # Full comparison using captured return values via language support - # Use js_project_root where node_modules is located - js_root = self.test_cfg.js_project_root or self.args.project_root - match, diffs = self.language_support.compare_test_results( - original_sqlite, candidate_sqlite, project_root=js_root - ) - # Cleanup SQLite files after comparison - candidate_sqlite.unlink(missing_ok=True) - else: - # CORRECTNESS REQUIREMENT: SQLite files must exist for proper behavioral verification - # TODO: Fix instrumentation to ensure SQLite files are always generated: - # 1. Java: Verify JavaTestInstrumentation captures all return values - # 2. JavaScript: Verify JS instrumentation runs before optimization - # 3. Other languages: Implement proper test result capture - logger.error( - "Cannot verify correctness: SQLite test result files not found. " - f"Expected: {original_sqlite} and {candidate_sqlite}. " - "Test instrumentation must capture return values to ensure optimization correctness." - ) - return self.get_results_not_matched_error() - else: - # Python: Compare using Python comparator - match, diffs = compare_test_results(baseline_results.behavior_test_results, candidate_behavior_results) + match, diffs = self.compare_candidate_results( + baseline_results, candidate_behavior_results, optimization_candidate_index + ) if match: logger.info("h3|Test results matched ✅") @@ -2980,16 +2540,8 @@ def run_optimized_candidate( logger.info(f"loading|Running performance tests for candidate {optimization_candidate_index}...") console.rule() - # For async functions, instrument at definition site for performance benchmarking - if self.function_to_optimize.is_async and is_python(): - from codeflash.code_utils.instrument_existing_tests import add_async_decorator_to_function - - add_async_decorator_to_function( - self.function_to_optimize.file_path, - self.function_to_optimize, - TestingMode.PERFORMANCE, - project_root=self.project_root, - ) + if self.function_to_optimize.is_async: + self.instrument_async_for_mode(TestingMode.PERFORMANCE) try: candidate_benchmarking_results, _ = self.run_and_parse_tests( @@ -3001,8 +2553,7 @@ def run_optimized_candidate( enable_coverage=False, ) finally: - # Restore original source if we instrumented it - if self.function_to_optimize.is_async and is_python(): + if self.function_to_optimize.is_async: self.write_code_and_helpers( candidate_fto_code, candidate_helper_code, self.function_to_optimize.file_path ) @@ -3010,10 +2561,6 @@ def run_optimized_candidate( # across all test cases. This is more accurate for JavaScript tests where # capturePerf does internal looping with potentially different iteration counts per test. loop_count = candidate_benchmarking_results.effective_loop_count() - # For Java performance mode, loop indices are 0-indexed (loopId = outerLoop * maxInner + inner) - # so we need to add 1 to get the correct count for display - if is_java(): - loop_count += 1 if (total_candidate_timing := candidate_benchmarking_results.total_passed_runtime()) == 0: logger.warning("The overall test runtime of the optimized function is 0, couldn't run tests.") @@ -3021,23 +2568,9 @@ def run_optimized_candidate( logger.debug(f"Total optimized code {optimization_candidate_index} runtime (ns): {total_candidate_timing}") - candidate_async_throughput = None - candidate_concurrency_metrics = None - if self.function_to_optimize.is_async and is_python(): - candidate_async_throughput = calculate_function_throughput_from_test_results( - candidate_benchmarking_results, self.function_to_optimize.function_name - ) - logger.debug(f"Candidate async function throughput: {candidate_async_throughput} calls/second") - - # Run concurrency benchmark for candidate - candidate_concurrency_metrics = self.run_concurrency_benchmark( - code_context=code_context, original_helper_code=candidate_helper_code, test_env=test_env - ) - if candidate_concurrency_metrics: - logger.debug( - f"Candidate concurrency metrics: ratio={candidate_concurrency_metrics.concurrency_ratio:.2f}, " - f"seq={candidate_concurrency_metrics.sequential_time_ns}ns, conc={candidate_concurrency_metrics.concurrent_time_ns}ns" - ) + candidate_async_throughput, candidate_concurrency_metrics = self.collect_async_metrics( + candidate_benchmarking_results, code_context, candidate_helper_code, test_env + ) if self.args.benchmark: candidate_replay_benchmarking_results = candidate_benchmarking_results.group_by_benchmarks( @@ -3072,9 +2605,8 @@ def run_and_parse_tests( testing_time: float = TOTAL_LOOPING_TIME_EFFECTIVE, *, enable_coverage: bool = False, - min_outer_loops: int = 5, - max_outer_loops: int = 250, - inner_iterations: int | None = None, + pytest_min_loops: int = 5, + pytest_max_loops: int = 250, code_context: CodeOptimizationContext | None = None, line_profiler_output_file: Path | None = None, ) -> tuple[TestResults | dict, CoverageData | None]: @@ -3082,50 +2614,40 @@ def run_and_parse_tests( coverage_config_file = None try: if testing_type == TestingMode.BEHAVIOR: - result_file_path, run_result, coverage_database_file, coverage_config_file = run_behavioral_tests( - test_files, - test_framework=self.test_cfg.test_framework, - cwd=self.project_root, - test_env=test_env, - pytest_timeout=INDIVIDUAL_TESTCASE_TIMEOUT, - enable_coverage=enable_coverage, - js_project_root=self.test_cfg.js_project_root, - candidate_index=optimization_iteration, + result_file_path, run_result, coverage_database_file, coverage_config_file = ( + self.language_support.run_behavioral_tests( + test_paths=test_files, + test_env=test_env, + cwd=self.project_root, + timeout=INDIVIDUAL_TESTCASE_TIMEOUT, + project_root=self.test_cfg.js_project_root, + enable_coverage=enable_coverage, + candidate_index=optimization_iteration, + ) ) elif testing_type == TestingMode.LINE_PROFILE: - result_file_path, run_result = run_line_profile_tests( - test_files, - cwd=self.project_root, + result_file_path, run_result = self.language_support.run_line_profile_tests( + test_paths=test_files, test_env=test_env, - pytest_cmd=self.test_cfg.pytest_cmd, - pytest_timeout=INDIVIDUAL_TESTCASE_TIMEOUT, - pytest_target_runtime_seconds=testing_time, - test_framework=self.test_cfg.test_framework, - js_project_root=self.test_cfg.js_project_root, - line_profiler_output_file=line_profiler_output_file, + cwd=self.project_root, + timeout=INDIVIDUAL_TESTCASE_TIMEOUT, + project_root=self.test_cfg.js_project_root, + line_profile_output_file=line_profiler_output_file, ) elif testing_type == TestingMode.PERFORMANCE: - result_file_path, run_result = run_benchmarking_tests( - test_files, - cwd=self.project_root, + result_file_path, run_result = self.language_support.run_benchmarking_tests( + test_paths=test_files, test_env=test_env, - pytest_cmd=self.test_cfg.pytest_cmd, + cwd=self.project_root, timeout=INDIVIDUAL_TESTCASE_TIMEOUT, - target_runtime_seconds=testing_time, - min_outer_loops=min_outer_loops, - max_outer_loops=max_outer_loops, - inner_iterations=inner_iterations, - test_framework=self.test_cfg.test_framework, - js_project_root=self.test_cfg.js_project_root, + project_root=self.test_cfg.js_project_root, + min_loops=pytest_min_loops, + max_loops=pytest_max_loops, + target_duration_seconds=testing_time, ) else: msg = f"Unexpected testing type: {testing_type}" raise ValueError(msg) - - # Verbose: Log test run output - log_test_run_output( - run_result.stdout, run_result.stderr, f"Test Run ({testing_type.name})", run_result.returncode - ) except subprocess.TimeoutExpired: logger.exception( f"Error running tests in {', '.join(str(f) for f in test_files.test_files)}.\nTimeout Error" @@ -3153,9 +2675,7 @@ def run_and_parse_tests( console.print(panel) if testing_type in {TestingMode.BEHAVIOR, TestingMode.PERFORMANCE}: - # For non-Python behavior tests, skip SQLite cleanup - files needed for language-native comparison - non_python_original_code = not is_python() and optimization_iteration == 0 - skip_cleanup = (not is_python() and testing_type == TestingMode.BEHAVIOR) or non_python_original_code + skip_cleanup = self.should_skip_sqlite_cleanup(testing_type, optimization_iteration) results, coverage_results = parse_test_results( test_xml_path=result_file_path, @@ -3169,18 +2689,11 @@ def run_and_parse_tests( coverage_database_file=coverage_database_file, coverage_config_file=coverage_config_file, skip_sqlite_cleanup=skip_cleanup, - testing_type=testing_type, ) if testing_type == TestingMode.PERFORMANCE: results.perf_stdout = run_result.stdout return results, coverage_results - # For LINE_PROFILE mode, Python uses .lprof files while JavaScript uses JSON - # Return TestResults for JavaScript so _line_profiler_step_javascript can parse the JSON - if testing_type == TestingMode.LINE_PROFILE: - results, coverage_results = parse_line_profile_results(line_profiler_output_file=line_profiler_output_file) - return results, coverage_results - logger.error(f"Unexpected testing type: {testing_type}") - return TestResults(), None + return self.parse_line_profile_test_results(line_profiler_output_file) def submit_test_generation_tasks( self, @@ -3237,102 +2750,9 @@ def get_test_env( def line_profiler_step( self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], candidate_index: int - ) -> dict: - # Dispatch to language-specific implementation - if is_python(): - return self._line_profiler_step_python(code_context, original_helper_code, candidate_index) - - if self.language_support is not None and hasattr(self.language_support, "instrument_source_for_line_profiler"): - try: - line_profiler_output_path = get_run_tmp_file(Path("line_profiler_output.json")) - success = self.language_support.instrument_source_for_line_profiler( - func_info=self.function_to_optimize, line_profiler_output_file=line_profiler_output_path - ) - if not success: - return {"timings": {}, "unit": 0, "str_out": ""} - - test_env = self.get_test_env( - codeflash_loop_index=0, codeflash_test_iteration=candidate_index, codeflash_tracer_disable=1 - ) - - _test_results, _ = self.run_and_parse_tests( - testing_type=TestingMode.LINE_PROFILE, - test_env=test_env, - test_files=self.test_files, - optimization_iteration=0, - testing_time=TOTAL_LOOPING_TIME_EFFECTIVE, - enable_coverage=False, - code_context=code_context, - line_profiler_output_file=line_profiler_output_path, - ) - - if not hasattr(self.language_support, "parse_line_profile_results"): - raise ValueError("Language support does not implement parse_line_profile_results") # noqa: TRY301 - - return self.language_support.parse_line_profile_results(line_profiler_output_path) - except Exception as e: - logger.warning(f"Failed to run line profiling: {e}") - return {"timings": {}, "unit": 0, "str_out": ""} - - logger.warning(f"Language support for {self.language_support.language} doesn't support line profiling") + ) -> dict[str, Any]: return {"timings": {}, "unit": 0, "str_out": ""} - def _line_profiler_step_python( - self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], candidate_index: int - ) -> dict: - """Python-specific line profiler using decorator imports.""" - # Check if candidate code contains JIT decorators - line profiler doesn't work with JIT compiled code - candidate_fto_code = Path(self.function_to_optimize.file_path).read_text("utf-8") - if contains_jit_decorator(candidate_fto_code): - logger.info( - f"Skipping line profiler for {self.function_to_optimize.function_name} - code contains JIT decorator" - ) - return {"timings": {}, "unit": 0, "str_out": ""} - - # Check helper code for JIT decorators - for module_abspath in original_helper_code: - candidate_helper_code = Path(module_abspath).read_text("utf-8") - if contains_jit_decorator(candidate_helper_code): - logger.info( - f"Skipping line profiler for {self.function_to_optimize.function_name} - helper code contains JIT decorator" - ) - return {"timings": {}, "unit": 0, "str_out": ""} - - try: - console.rule() - - test_env = self.get_test_env( - codeflash_loop_index=0, codeflash_test_iteration=candidate_index, codeflash_tracer_disable=1 - ) - line_profiler_output_file = add_decorator_imports(self.function_to_optimize, code_context) - line_profile_results, _ = self.run_and_parse_tests( - testing_type=TestingMode.LINE_PROFILE, - test_env=test_env, - test_files=self.test_files, - optimization_iteration=0, - testing_time=TOTAL_LOOPING_TIME_EFFECTIVE, - enable_coverage=False, - code_context=code_context, - line_profiler_output_file=line_profiler_output_file, - ) - finally: - # Remove codeflash capture - self.write_code_and_helpers( - self.function_to_optimize_source_code, original_helper_code, self.function_to_optimize.file_path - ) - # this will happen when a timeoutexpired exception happens - if isinstance(line_profile_results, TestResults) and not line_profile_results.test_results: - logger.warning( - f"Timeout occurred while running line profiler for original function {self.function_to_optimize.function_name}" - ) - # set default value for line profiler results - return {"timings": {}, "unit": 0, "str_out": ""} - if line_profile_results["str_out"] == "": - logger.warning( - f"Couldn't run line profiler for original function {self.function_to_optimize.function_name}" - ) - return line_profile_results - def run_concurrency_benchmark( self, code_context: CodeOptimizationContext, original_helper_code: dict[Path, str], test_env: dict[str, str] ) -> ConcurrencyMetrics | None: @@ -3372,8 +2792,8 @@ def run_concurrency_benchmark( testing_time=5.0, # Short benchmark time enable_coverage=False, code_context=code_context, - min_outer_loops=1, - max_outer_loops=3, + pytest_min_loops=1, + pytest_max_loops=3, ) except Exception as e: logger.debug(f"Concurrency benchmark failed: {e}") diff --git a/codeflash/optimization/optimizer.py b/codeflash/optimization/optimizer.py index ed99e8083..208322d32 100644 --- a/codeflash/optimization/optimizer.py +++ b/codeflash/optimization/optimizer.py @@ -1,6 +1,5 @@ from __future__ import annotations -import ast import copy import os import tempfile @@ -30,20 +29,20 @@ ) from codeflash.code_utils.time_utils import humanize_runtime from codeflash.either import is_successful -from codeflash.languages import current_language_support, is_java, is_javascript, set_current_language -from codeflash.models.models import ValidCode +from codeflash.languages import current_language_support, set_current_language +from codeflash.lsp.helpers import is_subagent_mode from codeflash.telemetry.posthog_cf import ph -from codeflash.verification.parse_test_output import clear_test_file_path_cache from codeflash.verification.verification_utils import TestConfig if TYPE_CHECKING: + import ast from argparse import Namespace from codeflash.benchmarking.function_ranker import FunctionRanker from codeflash.code_utils.checkpoint import CodeflashRunCheckpoint from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.base import DependencyResolver - from codeflash.models.models import BenchmarkKey, FunctionCalledInTest + from codeflash.models.models import BenchmarkKey, FunctionCalledInTest, ValidCode from codeflash.optimization.function_optimizer import FunctionOptimizer @@ -72,59 +71,6 @@ def __init__(self, args: Namespace) -> None: self.original_args_and_test_cfg: tuple[Namespace, TestConfig] | None = None self.patch_files: list[Path] = [] - @staticmethod - def _find_js_project_root(file_path: Path) -> Path | None: - """Find the JavaScript/TypeScript project root by looking for package.json. - - Traverses up from the given file path to find the nearest directory - containing package.json or jest.config.js. - - Args: - file_path: A file path within the JavaScript project. - - Returns: - The project root directory, or None if not found. - - """ - current = file_path.parent if file_path.is_file() else file_path - while current != current.parent: # Stop at filesystem root - if ( - (current / "package.json").exists() - or (current / "jest.config.js").exists() - or (current / "jest.config.ts").exists() - or (current / "tsconfig.json").exists() - ): - return current - current = current.parent - return None - - def _verify_js_requirements(self) -> None: - """Verify JavaScript/TypeScript requirements before optimization. - - Checks that Node.js, npm, and the test framework are available. - Logs warnings if requirements are not met but does not abort. - - """ - from codeflash.languages import get_language_support - from codeflash.languages.base import Language - from codeflash.languages.test_framework import get_js_test_framework_or_default - - js_project_root = self.test_cfg.js_project_root - if not js_project_root: - return - - try: - js_support = get_language_support(Language.JAVASCRIPT) - test_framework = get_js_test_framework_or_default() - success, errors = js_support.verify_requirements(js_project_root, test_framework) - - if not success: - logger.warning("JavaScript requirements check found issues:") - for error in errors: - logger.warning(f" - {error}") - except Exception as e: - logger.debug(f"Failed to verify JS requirements: {e}") - def run_benchmarks( self, file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]], num_optimizable_functions: int ) -> tuple[dict[str, dict[BenchmarkKey, float]], dict[BenchmarkKey, float]]: @@ -247,26 +193,8 @@ def create_function_optimizer( function_to_optimize_source_code: str | None = "", function_benchmark_timings: dict[str, dict[BenchmarkKey, float]] | None = None, total_benchmark_timings: dict[BenchmarkKey, float] | None = None, - original_module_ast: ast.Module | None = None, - original_module_path: Path | None = None, call_graph: DependencyResolver | None = None, ) -> FunctionOptimizer | None: - from codeflash.languages.python.static_analysis.static_analysis import ( - get_first_top_level_function_or_method_ast, - ) - from codeflash.optimization.function_optimizer import FunctionOptimizer - - if function_to_optimize_ast is None and original_module_ast is not None: - function_to_optimize_ast = get_first_top_level_function_or_method_ast( - function_to_optimize.function_name, function_to_optimize.parents, original_module_ast - ) - if function_to_optimize_ast is None: - logger.info( - f"Function {function_to_optimize.qualified_name} not found in {original_module_path}.\n" - f"Skipping optimization." - ) - return None - qualified_name_w_module = function_to_optimize.qualified_name_with_modules_from_root(self.args.project_root) function_specific_timings = None @@ -279,7 +207,11 @@ def create_function_optimizer( ): function_specific_timings = function_benchmark_timings[qualified_name_w_module] - return FunctionOptimizer( + cls = current_language_support().function_optimizer_class + + # TODO: _resolve_function_ast re-parses source via ast.parse() per function, even when the caller already + # has a parsed module AST. Consider passing the pre-parsed AST through to avoid redundant parsing. + function_optimizer = cls( function_to_optimize=function_to_optimize, test_cfg=self.test_cfg, function_to_optimize_source_code=function_to_optimize_source_code, @@ -292,62 +224,26 @@ def create_function_optimizer( replay_tests_dir=self.replay_tests_dir, call_graph=call_graph, ) + if function_optimizer.function_to_optimize_ast is None and function_optimizer.requires_function_ast(): + logger.info( + f"Function {function_to_optimize.qualified_name} not found in " + f"{function_to_optimize.file_path}.\nSkipping optimization." + ) + return None + return function_optimizer def prepare_module_for_optimization( self, original_module_path: Path ) -> tuple[dict[Path, ValidCode], ast.Module | None] | None: - from codeflash.languages.python.static_analysis.code_replacer import normalize_code, normalize_node - from codeflash.languages.python.static_analysis.static_analysis import analyze_imported_modules - logger.info(f"loading|Examining file {original_module_path!s}") console.rule() original_module_code: str = original_module_path.read_text(encoding="utf8") - # For JavaScript/TypeScript/Java, skip Python-specific AST parsing - if is_javascript() or is_java(): - validated_original_code: dict[Path, ValidCode] = { - original_module_path: ValidCode(source_code=original_module_code, normalized_code=original_module_code) - } - return validated_original_code, None - - # Python-specific parsing - try: - original_module_ast = ast.parse(original_module_code) - except SyntaxError as e: - logger.warning(f"Syntax error parsing code in {original_module_path}: {e}") - logger.info("Skipping optimization due to file error.") - return None - normalized_original_module_code = ast.unparse(normalize_node(original_module_ast)) - validated_original_code = { - original_module_path: ValidCode( - source_code=original_module_code, normalized_code=normalized_original_module_code - ) - } - - imported_module_analyses = analyze_imported_modules( + return current_language_support().prepare_module( original_module_code, original_module_path, self.args.project_root ) - has_syntax_error = False - for analysis in imported_module_analyses: - callee_original_code = analysis.file_path.read_text(encoding="utf8") - try: - normalized_callee_original_code = normalize_code(callee_original_code) - except SyntaxError as e: - logger.warning(f"Syntax error parsing code in callee module {analysis.file_path}: {e}") - logger.info("Skipping optimization due to helper file error.") - has_syntax_error = True - break - validated_original_code[analysis.file_path] = ValidCode( - source_code=callee_original_code, normalized_code=normalized_callee_original_code - ) - - if has_syntax_error: - return None - - return validated_original_code, original_module_ast - def discover_tests( self, file_to_funcs_to_optimize: dict[Path, list[FunctionToOptimize]] ) -> tuple[dict[str, set[FunctionCalledInTest]], int]: @@ -556,11 +452,7 @@ def run(self) -> None: if funcs and funcs[0].language: set_current_language(funcs[0].language) self.test_cfg.set_language(funcs[0].language) - # For JavaScript, also set js_project_root for test execution - if is_javascript(): - self.test_cfg.js_project_root = self._find_js_project_root(file_path) - # Verify JS requirements before proceeding - self._verify_js_requirements() + current_language_support().setup_test_config(self.test_cfg, file_path) break if self.args.all: @@ -604,7 +496,7 @@ def run(self) -> None: return function_to_tests, _ = self.discover_tests(file_to_funcs_to_optimize) - if self.args.all: + if self.args.all and not self.args.subagent: self.functions_checkpoint = CodeflashRunCheckpoint(self.args.module_root) # GLOBAL RANKING: Rank all functions together before optimizing @@ -624,7 +516,7 @@ def run(self) -> None: continue prepared_modules[original_module_path] = module_prep_result - validated_original_code, original_module_ast = prepared_modules[original_module_path] + validated_original_code, _original_module_ast = prepared_modules[original_module_path] function_iterator_count = i + 1 logger.info( @@ -632,15 +524,6 @@ def run(self) -> None: f"{function_to_optimize.qualified_name} (in {original_module_path.name})" ) console.rule() - - # Safety-net cleanup: remove any leftover instrumented test files from previous iterations. - # This prevents a broken test file from one function from cascading compilation failures - # to all subsequent functions (e.g., when Maven compiles all test files together). - leftover_files = Optimizer.find_leftover_instrumented_test_files(self.test_cfg.tests_root) - if leftover_files: - logger.debug(f"Cleaning up {len(leftover_files)} leftover instrumented test file(s)") - cleanup_paths(leftover_files) - function_optimizer = None try: function_optimizer = self.create_function_optimizer( @@ -649,8 +532,6 @@ def run(self) -> None: function_to_optimize_source_code=validated_original_code[original_module_path].source_code, function_benchmark_timings=function_benchmark_timings, total_benchmark_timings=total_benchmark_timings, - original_module_ast=original_module_ast, - original_module_path=original_module_path, call_graph=resolver, ) if function_optimizer is None: @@ -667,7 +548,7 @@ def run(self) -> None: if is_successful(best_optimization): optimizations_found += 1 # create a diff patch for successful optimization - if self.current_worktree: + if self.current_worktree and not is_subagent_mode(): best_opt = best_optimization.unwrap() read_writable_code = best_opt.code_context.read_writable_code relative_file_paths = [ @@ -690,7 +571,6 @@ def run(self) -> None: if function_optimizer is not None: function_optimizer.executor.shutdown(wait=True) function_optimizer.cleanup_generated_files() - clear_test_file_path_cache() ph("cli-optimize-run-finished", {"optimizations_found": optimizations_found}) if len(self.patch_files) > 0: @@ -701,7 +581,12 @@ def run(self) -> None: self.functions_checkpoint.cleanup() if hasattr(self.args, "command") and self.args.command == "optimize": self.cleanup_replay_tests() - if optimizations_found == 0: + if is_subagent_mode(): + if optimizations_found == 0: + import sys + + sys.stdout.write("No optimizations found.\n") + elif optimizations_found == 0: logger.info("❌ No optimizations found.") elif self.args.all: logger.info("✨ All functions have been optimized! ✨") @@ -735,12 +620,6 @@ def find_leftover_instrumented_test_files(test_root: Path) -> list[Path]: - '*__perfinstrumented.spec.{js,ts,jsx,tsx}' - '*__perfonlyinstrumented.spec.{js,ts,jsx,tsx}' - Java patterns: - - '*Test__perfinstrumented.java' - - '*Test__perfonlyinstrumented.java' - - '*Test__perfinstrumented_{n}.java' (with optional numeric suffix) - - '*Test__perfonlyinstrumented_{n}.java' (with optional numeric suffix) - Returns a list of matching file paths. """ import re @@ -751,8 +630,8 @@ def find_leftover_instrumented_test_files(test_root: Path) -> list[Path]: r"test.*__perf_test_\d?\.py|test_.*__unit_test_\d?\.py|test_.*__perfinstrumented\.py|test_.*__perfonlyinstrumented\.py|" # JavaScript/TypeScript patterns (new naming with .test/.spec preserved) r".*__perfinstrumented\.(?:test|spec)\.(?:js|ts|jsx|tsx)|.*__perfonlyinstrumented\.(?:test|spec)\.(?:js|ts|jsx|tsx)|" - # Java patterns (with optional numeric suffix _2, _3, etc., and existing_ prefix variant) - r".*Test__(?:existing_)?perfinstrumented(?:_\d+)?\.java|.*Test__(?:existing_)?perfonlyinstrumented(?:_\d+)?\.java" + # Java patterns + r".*__perfinstrumented(?:_\d+)?\.java|.*__perfonlyinstrumented(?:_\d+)?\.java" r")$" ) @@ -775,6 +654,8 @@ def cleanup_temporary_paths(self) -> None: if hasattr(get_run_tmp_file, "tmpdir"): get_run_tmp_file.tmpdir.cleanup() del get_run_tmp_file.tmpdir + if hasattr(get_run_tmp_file, "tmpdir_path"): + del get_run_tmp_file.tmpdir_path # Always clean up concolic test directory cleanup_paths([self.test_cfg.concolic_test_root_dir]) diff --git a/codeflash/result/create_pr.py b/codeflash/result/create_pr.py index 01bf5e57e..9325110fa 100644 --- a/codeflash/result/create_pr.py +++ b/codeflash/result/create_pr.py @@ -14,7 +14,7 @@ from codeflash.code_utils.tabulate import tabulate from codeflash.code_utils.time_utils import format_perf, format_time from codeflash.github.PrComment import FileDiffContent, PrComment -from codeflash.languages import is_java +from codeflash.languages import current_language_support from codeflash.languages.python.static_analysis.code_replacer import is_zero_diff from codeflash.result.critic import performance_gain @@ -139,17 +139,19 @@ def existing_tests_source_for( logger.debug(f"[PR-DEBUG] Mapped {instrumented_abs_path.name} -> {abs_path.name}") else: logger.debug(f"[PR-DEBUG] No mapping found for {instrumented_abs_path.name}") - elif is_java(): - # Java: test_module_path is the class name (e.g., "BubbleSortTest") - # Search non_generated_tests for a matching .java file - abs_path = (test_cfg.tests_project_rootdir / f"{test_module_path}.java").resolve() - for candidate in non_generated_tests: - if candidate.stem == test_module_path: - abs_path = candidate - break else: - # Python: convert module name to path - abs_path = Path(test_module_path.replace(".", os.sep)).with_suffix(".py").resolve() + lang = current_language_support() + # Let language-specific resolution handle non-Python module paths + lang_result = lang.resolve_test_module_path_for_pr( + test_module_path, test_cfg.tests_project_rootdir, non_generated_tests + ) + if lang_result is not None: + abs_path = lang_result + else: + # Default (Python): convert module name to path + abs_path = ( + Path(test_module_path.replace(".", os.sep)).with_suffix(lang.default_file_extension).resolve() + ) if abs_path not in non_generated_tests: skipped_count += 1 if skipped_count <= 5: diff --git a/codeflash/tracing/tracing_new_process.py b/codeflash/tracing/tracing_new_process.py index e2e54a708..9edfc5c38 100644 --- a/codeflash/tracing/tracing_new_process.py +++ b/codeflash/tracing/tracing_new_process.py @@ -125,6 +125,7 @@ def __init__( self.max_function_count = max_function_count self.config = config self.project_root = project_root + self.project_root_str = str(project_root) + os.sep if project_root else "" console.rule(f"Project Root: {self.project_root}", style="bold blue") self.ignored_functions = {"", "", "", "", "", ""} @@ -327,19 +328,20 @@ def tracer_logic(self, frame: FrameType, event: str) -> None: if code.co_name in self.ignored_functions: return - # Now resolve file path only if we need it + # Resolve file path and check validity (cached) co_filename = code.co_filename if co_filename in self.path_cache: - file_name = self.path_cache[co_filename] + file_name, is_valid = self.path_cache[co_filename] + if not is_valid: + return else: - file_name = Path(co_filename).resolve() - self.path_cache[co_filename] = file_name - # TODO : It currently doesn't log the last return call from the first function - - if not file_name.is_relative_to(self.project_root): - return - if not file_name.exists(): - return + resolved = os.path.realpath(co_filename) + # startswith is cheaper than Path.is_relative_to, os.path.exists avoids Path construction + is_valid = resolved.startswith(self.project_root_str) and os.path.exists(resolved) # noqa: PTH110 + self.path_cache[co_filename] = (resolved, is_valid) + if not is_valid: + return + file_name = resolved if self.functions and code.co_name not in self.functions: return class_name = None @@ -376,10 +378,11 @@ def tracer_logic(self, frame: FrameType, event: str) -> None: if function_qualified_name in self.ignored_qualified_functions: return if function_qualified_name not in self.function_count: - # seeing this function for the first time + # seeing this function for the first time — Path construction only happens here self.function_count[function_qualified_name] = 1 + file_path = Path(file_name) file_valid = filter_files_optimized( - file_path=file_name, + file_path=file_path, tests_root=Path(self.config["tests_root"]), ignore_paths=[Path(p) for p in self.config["ignore_paths"]], module_root=Path(self.config["module_root"]), @@ -391,8 +394,8 @@ def tracer_logic(self, frame: FrameType, event: str) -> None: self.function_modules.append( FunctionModules( function_name=code.co_name, - file_name=file_name, - module_name=module_name_from_file_path(file_name, project_root_path=self.project_root), + file_name=file_path, + module_name=module_name_from_file_path(file_path, project_root_path=self.project_root), class_name=class_name, line_no=code.co_firstlineno, ) @@ -432,16 +435,7 @@ def tracer_logic(self, frame: FrameType, event: str) -> None: cur.execute( "INSERT INTO function_calls VALUES(?, ?, ?, ?, ?, ?, ?, ?)", - ( - event, - code.co_name, - class_name, - str(file_name), - frame.f_lineno, - frame.f_back.__hash__(), - t_ns, - local_vars, - ), + (event, code.co_name, class_name, file_name, frame.f_lineno, frame.f_back.__hash__(), t_ns, local_vars), ) self.trace_count += 1 self.next_insert -= 1 diff --git a/codeflash/verification/comparator.py b/codeflash/verification/comparator.py index 6429b5520..41ed1a413 100644 --- a/codeflash/verification/comparator.py +++ b/codeflash/verification/comparator.py @@ -3,9 +3,11 @@ import datetime import decimal import enum +import itertools import math import re import types +import warnings import weakref from collections import ChainMap, OrderedDict, deque from importlib.util import find_spec @@ -28,6 +30,33 @@ HAS_NUMBA = find_spec("numba") is not None HAS_PYARROW = find_spec("pyarrow") is not None +if HAS_NUMPY: + import numpy as np +if HAS_SCIPY: + import scipy # type: ignore # noqa: PGH003 +if HAS_JAX: + import jax # type: ignore # noqa: PGH003 + import jax.numpy as jnp # type: ignore # noqa: PGH003 +if HAS_XARRAY: + import xarray # type: ignore # noqa: PGH003 +if HAS_TENSORFLOW: + import tensorflow as tf # type: ignore # noqa: PGH003 +if HAS_SQLALCHEMY: + import sqlalchemy # type: ignore # noqa: PGH003 +if HAS_PYARROW: + import pyarrow as pa # type: ignore # noqa: PGH003 +if HAS_PANDAS: + import pandas # noqa: ICN001 +if HAS_TORCH: + import torch # type: ignore # noqa: PGH003 +if HAS_NUMBA: + import numba # type: ignore # noqa: PGH003 + from numba.core.dispatcher import Dispatcher # type: ignore # noqa: PGH003 + from numba.typed import Dict as NumbaDict # type: ignore # noqa: PGH003 + from numba.typed import List as NumbaList # type: ignore # noqa: PGH003 +if HAS_PYRSISTENT: + import pyrsistent # type: ignore # noqa: PGH003 + # Pattern to match pytest temp directories: /tmp/pytest-of-/pytest-/ # These paths vary between test runs but are logically equivalent PYTEST_TEMP_PATH_PATTERN = re.compile(r"/tmp/pytest-of-[^/]+/pytest-\d+/") # noqa: S108 @@ -36,6 +65,31 @@ # Created by tempfile.mkdtemp() or tempfile.TemporaryDirectory() PYTHON_TEMPFILE_PATTERN = re.compile(r"/tmp/tmp[a-zA-Z0-9_]+/") # noqa: S108 +_DICT_KEYS_TYPE = type({}.keys()) +_DICT_VALUES_TYPE = type({}.values()) +_DICT_ITEMS_TYPE = type({}.items()) + +_EQUALITY_TYPES = ( + int, + bool, + complex, + type(None), + type(Ellipsis), + decimal.Decimal, + set, + bytes, + bytearray, + memoryview, + frozenset, + enum.Enum, + type, + range, + slice, + OrderedDict, + types.GenericAlias, + *((_union_type,) if (_union_type := getattr(types, "UnionType", None)) else ()), +) + def _normalize_temp_path(path: str) -> str: """Normalize temporary file paths by replacing session-specific components. @@ -145,28 +199,7 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return _normalize_temp_path(orig) == _normalize_temp_path(new) return False - if isinstance( - orig, - ( - int, - bool, - complex, - type(None), - type(Ellipsis), - decimal.Decimal, - set, - bytes, - bytearray, - memoryview, - frozenset, - enum.Enum, - type, - range, - slice, - OrderedDict, - types.GenericAlias, - ), - ): + if isinstance(orig, _EQUALITY_TYPES): return orig == new if isinstance(orig, float): if math.isnan(orig) and math.isnan(new): @@ -185,9 +218,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return comparator(orig_referent, new_referent, superset_obj) if HAS_JAX: - import jax # type: ignore # noqa: PGH003 - import jax.numpy as jnp # type: ignore # noqa: PGH003 - # Handle JAX arrays first to avoid boolean context errors in other conditions if isinstance(orig, jax.Array): if orig.dtype != new.dtype: @@ -198,15 +228,11 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: # Handle xarray objects before numpy to avoid boolean context errors if HAS_XARRAY: - import xarray # type: ignore # noqa: PGH003 - if isinstance(orig, (xarray.Dataset, xarray.DataArray)): return orig.identical(new) # Handle TensorFlow objects early to avoid boolean context errors if HAS_TENSORFLOW: - import tensorflow as tf # type: ignore # noqa: PGH003 - if isinstance(orig, tf.Tensor): if orig.dtype != new.dtype: return False @@ -232,7 +258,9 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: if not comparator(orig.dense_shape.numpy(), new.dense_shape.numpy(), superset_obj): return False return comparator(orig.indices.numpy(), new.indices.numpy(), superset_obj) and comparator( - orig.values.numpy(), new.values.numpy(), superset_obj + orig.values.numpy(), # noqa: PD011 + new.values.numpy(), # noqa: PD011 + superset_obj, ) if isinstance(orig, tf.RaggedTensor): @@ -243,8 +271,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return comparator(orig.to_list(), new.to_list(), superset_obj) if HAS_SQLALCHEMY: - import sqlalchemy # type: ignore # noqa: PGH003 - try: insp = sqlalchemy.inspection.inspect(orig) insp = sqlalchemy.inspection.inspect(new) @@ -260,8 +286,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: except sqlalchemy.exc.NoInspectionAvailable: pass - if HAS_SCIPY: - import scipy # type: ignore # noqa: PGH003 # scipy condition because dok_matrix type is also a instance of dict, but dict comparison doesn't work for it if isinstance(orig, dict) and not (HAS_SCIPY and isinstance(orig, scipy.sparse.spmatrix)): if superset_obj: @@ -280,21 +304,14 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return comparator(dict(orig), dict(new), superset_obj) # Handle dict view types (dict_keys, dict_values, dict_items) - # Use type name checking since these are not directly importable types - type_name = type(orig).__name__ - if type_name == "dict_keys": - # dict_keys can be compared as sets (order doesn't matter) + if isinstance(orig, _DICT_KEYS_TYPE): return comparator(set(orig), set(new)) - if type_name == "dict_values": - # dict_values need element-wise comparison (order matters) + if isinstance(orig, _DICT_VALUES_TYPE): return comparator(list(orig), list(new)) - if type_name == "dict_items": - # Convert to dict for order-insensitive comparison (handles unhashable values) + if isinstance(orig, _DICT_ITEMS_TYPE): return comparator(dict(orig), dict(new), superset_obj) if HAS_NUMPY: - import numpy as np - if isinstance(orig, (np.datetime64, np.timedelta64)): # Handle NaT (Not a Time) - numpy's equivalent of NaN for datetime if np.isnat(orig) and np.isnat(new): @@ -356,8 +373,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return (orig != new).nnz == 0 if HAS_PYARROW: - import pyarrow as pa # type: ignore # noqa: PGH003 - if isinstance(orig, pa.Table): if orig.schema != new.schema: return False @@ -400,8 +415,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return bool(orig.equals(new)) if HAS_PANDAS: - import pandas # noqa: ICN001 - if isinstance( orig, (pandas.DataFrame, pandas.Series, pandas.Index, pandas.Categorical, pandas.arrays.SparseArray) ): @@ -432,8 +445,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: pass if HAS_TORCH: - import torch # type: ignore # noqa: PGH003 - if isinstance(orig, torch.Tensor): if orig.dtype != new.dtype: return False @@ -452,11 +463,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return orig == new if HAS_NUMBA: - import numba - from numba.core.dispatcher import Dispatcher - from numba.typed import Dict as NumbaDict - from numba.typed import List as NumbaList - # Handle numba typed List if isinstance(orig, NumbaList): if len(orig) != len(new): @@ -488,8 +494,6 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: return orig.py_func is new.py_func if HAS_PYRSISTENT: - import pyrsistent # type: ignore # noqa: PGH003 - if isinstance( orig, ( @@ -526,6 +530,55 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: ) return comparator(orig_dict, new_dict, superset_obj) + # Handle itertools infinite iterators + if isinstance(orig, itertools.count): + # repr reliably reflects internal state, e.g. "count(5)" or "count(5, 2)" + return repr(orig) == repr(new) + + if isinstance(orig, itertools.repeat): + # repr reliably reflects internal state, e.g. "repeat(5)" or "repeat(5, 3)" + return repr(orig) == repr(new) + + if isinstance(orig, itertools.cycle): + # cycle has no useful repr and no public attributes; use __reduce__ to extract state. + # __reduce__ returns (cls, (remaining_iter,), (saved_items, first_pass_done)). + # NOTE: consuming the remaining_iter is destructive to the cycle object, but this is + # acceptable since the comparator is the final consumer of captured return values. + # NOTE: __reduce__ on itertools.cycle was removed in Python 3.14. + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + orig_reduce = orig.__reduce__() + new_reduce = new.__reduce__() + orig_remaining = list(orig_reduce[1][0]) + new_remaining = list(new_reduce[1][0]) + orig_saved, orig_started = orig_reduce[2] + new_saved, new_started = new_reduce[2] + if orig_started != new_started: + return False + return comparator(orig_remaining, new_remaining, superset_obj) and comparator( + orig_saved, new_saved, superset_obj + ) + except TypeError: + # Python 3.14+: __reduce__ removed. Fall back to consuming elements from both + # cycles and comparing. Since the comparator is the final consumer, this is safe. + sample_size = 200 + orig_sample = [next(orig) for _ in range(sample_size)] + new_sample = [next(new) for _ in range(sample_size)] + return comparator(orig_sample, new_sample, superset_obj) + + # Handle remaining itertools types (chain, islice, starmap, product, permutations, etc.) + # by materializing into lists. count/repeat/cycle are already handled above. + # NOTE: materializing is destructive (consumes the iterator) and will hang on infinite input, + # but the three infinite itertools types are already handled above. + if type(orig).__module__ == "itertools": + if isinstance(orig, itertools.groupby): + # groupby yields (key, group_iterator) — materialize groups too + orig_groups = [(k, list(g)) for k, g in orig] + new_groups = [(k, list(g)) for k, g in new] + return comparator(orig_groups, new_groups, superset_obj) + return comparator(list(orig), list(new), superset_obj) + # re.Pattern can be made better by DFA Minimization and then comparing if isinstance( orig, (datetime.datetime, datetime.date, datetime.timedelta, datetime.time, datetime.timezone, re.Pattern) @@ -535,7 +588,7 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: # If the object passed has a user defined __eq__ method, use that # This could fail if the user defined __eq__ is defined with C-extensions try: - if hasattr(orig, "__eq__") and str(type(orig.__eq__)) == "": + if hasattr(orig, "__eq__") and isinstance(orig.__eq__, types.MethodType): return orig == new except Exception: pass @@ -562,6 +615,18 @@ def comparator(orig: Any, new: Any, superset_obj: bool = False) -> bool: new_keys = {k: v for k, v in new.__dict__.items() if k != "parent"} return comparator(orig_keys, new_keys, superset_obj) + # For objects with __slots__ but no __dict__, compare slot attributes + if hasattr(type(orig), "__slots__"): + all_slots = set() + for cls in type(orig).__mro__: + if hasattr(cls, "__slots__"): + all_slots.update(cls.__slots__) + orig_vals = {s: getattr(orig, s, None) for s in all_slots} + new_vals = {s: getattr(new, s, None) for s in all_slots} + if superset_obj: + return all(k in new_vals and comparator(v, new_vals[k], superset_obj) for k, v in orig_vals.items()) + return comparator(orig_vals, new_vals, superset_obj) + if type(orig) in {types.BuiltinFunctionType, types.BuiltinMethodType}: return new == orig if str(type(orig)) == "": diff --git a/codeflash/verification/concolic_testing.py b/codeflash/verification/concolic_testing.py deleted file mode 100644 index 7214a123b..000000000 --- a/codeflash/verification/concolic_testing.py +++ /dev/null @@ -1,136 +0,0 @@ -from __future__ import annotations - -import ast -import importlib.util -import subprocess -import tempfile -import time -from pathlib import Path -from typing import TYPE_CHECKING - -from codeflash.cli_cmds.console import console, logger -from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE -from codeflash.code_utils.shell_utils import make_env_with_project_root -from codeflash.discovery.discover_unit_tests import discover_unit_tests -from codeflash.languages import is_python -from codeflash.languages.python.static_analysis.concolic_utils import clean_concolic_tests, is_valid_concolic_test -from codeflash.languages.python.static_analysis.static_analysis import has_typed_parameters -from codeflash.lsp.helpers import is_LSP_enabled -from codeflash.telemetry.posthog_cf import ph -from codeflash.verification.verification_utils import TestConfig - -CROSSHAIR_AVAILABLE = importlib.util.find_spec("crosshair") is not None - -if TYPE_CHECKING: - from argparse import Namespace - - from codeflash.discovery.functions_to_optimize import FunctionToOptimize - from codeflash.models.models import FunctionCalledInTest - - -def generate_concolic_tests( - test_cfg: TestConfig, args: Namespace, function_to_optimize: FunctionToOptimize, function_to_optimize_ast: ast.AST -) -> tuple[dict[str, set[FunctionCalledInTest]], str]: - """Generate concolic tests using CrossHair (Python only). - - CrossHair is a Python-specific symbolic execution tool. For non-Python languages - (JavaScript, TypeScript, etc.), this function returns early with empty results. - - Args: - test_cfg: Test configuration - args: Command line arguments - function_to_optimize: The function being optimized - function_to_optimize_ast: AST of the function (Python ast.FunctionDef) - - Returns: - Tuple of (function_to_tests mapping, concolic test suite code) - - """ - start_time = time.perf_counter() - function_to_concolic_tests = {} - concolic_test_suite_code = "" - - # CrossHair is Python-only - skip for other languages - if not is_python(): - logger.debug("Skipping concolic test generation for non-Python languages (CrossHair is Python-only)") - return function_to_concolic_tests, concolic_test_suite_code - - if not CROSSHAIR_AVAILABLE: - logger.debug("Skipping concolic test generation (crosshair-tool is not installed)") - return function_to_concolic_tests, concolic_test_suite_code - - if is_LSP_enabled(): - logger.debug("Skipping concolic test generation in LSP mode") - return function_to_concolic_tests, concolic_test_suite_code - - if ( - test_cfg.concolic_test_root_dir - and isinstance(function_to_optimize_ast, ast.FunctionDef) - and has_typed_parameters(function_to_optimize_ast, function_to_optimize.parents) - ): - logger.info("Generating concolic opcode coverage tests for the original code…") - console.rule() - try: - env = make_env_with_project_root(args.project_root) - cover_result = subprocess.run( - [ - SAFE_SYS_EXECUTABLE, - "-m", - "crosshair", - "cover", - "--example_output_format=pytest", - "--per_condition_timeout=20", - ".".join( - [ - function_to_optimize.file_path.relative_to(args.project_root) - .with_suffix("") - .as_posix() - .replace("/", "."), - function_to_optimize.qualified_name, - ] - ), - ], - capture_output=True, - text=True, - cwd=args.project_root, - check=False, - # Timeout for CrossHair concolic test generation (seconds). - # Override via CODEFLASH_CONCOLIC_TIMEOUT env var, - # falling back to CODEFLASH_TEST_TIMEOUT, then default 600s. - timeout=600, - env=env, - ) - except subprocess.TimeoutExpired: - logger.debug("CrossHair Cover test generation timed out") - return function_to_concolic_tests, concolic_test_suite_code - - if cover_result.returncode == 0: - generated_concolic_test: str = cover_result.stdout - if not is_valid_concolic_test(generated_concolic_test, project_root=str(args.project_root)): - logger.debug("CrossHair generated invalid test, skipping") - console.rule() - return function_to_concolic_tests, concolic_test_suite_code - concolic_test_suite_code: str = clean_concolic_tests(generated_concolic_test) - concolic_test_suite_dir = Path(tempfile.mkdtemp(dir=test_cfg.concolic_test_root_dir)) - concolic_test_suite_path = concolic_test_suite_dir / "test_concolic_coverage.py" - concolic_test_suite_path.write_text(concolic_test_suite_code, encoding="utf8") - - concolic_test_cfg = TestConfig( - tests_root=concolic_test_suite_dir, - tests_project_rootdir=test_cfg.concolic_test_root_dir, - project_root_path=args.project_root, - ) - function_to_concolic_tests, num_discovered_concolic_tests, _ = discover_unit_tests(concolic_test_cfg) - logger.info( - f"Created {num_discovered_concolic_tests} " - f"concolic unit test case{'s' if num_discovered_concolic_tests != 1 else ''} " - ) - console.rule() - ph("cli-optimize-concolic-tests", {"num_tests": num_discovered_concolic_tests}) - - else: - logger.debug(f"Error running CrossHair Cover {': ' + cover_result.stderr if cover_result.stderr else '.'}") - console.rule() - end_time = time.perf_counter() - logger.debug(f"Generated concolic tests in {end_time - start_time:.2f} seconds") - return function_to_concolic_tests, concolic_test_suite_code diff --git a/codeflash/verification/parse_line_profile_test_output.py b/codeflash/verification/parse_line_profile_test_output.py index ebe8f7296..85e6a7460 100644 --- a/codeflash/verification/parse_line_profile_test_output.py +++ b/codeflash/verification/parse_line_profile_test_output.py @@ -3,16 +3,10 @@ from __future__ import annotations import inspect -import json import linecache import os -from pathlib import Path -from typing import Optional - -import dill as pickle from codeflash.code_utils.tabulate import tabulate -from codeflash.languages import is_python def show_func( @@ -118,53 +112,3 @@ def show_text_non_python(stats: dict, line_contents: dict[tuple[str, int], str]) ) out_table += "\n" return out_table - - -def parse_line_profile_results(line_profiler_output_file: Optional[Path]) -> dict: - if is_python(): - line_profiler_output_file = line_profiler_output_file.with_suffix(".lprof") - stats_dict = {} - if not line_profiler_output_file.exists(): - return {"timings": {}, "unit": 0, "str_out": ""}, None - with line_profiler_output_file.open("rb") as f: - stats = pickle.load(f) - stats_dict["timings"] = stats.timings - stats_dict["unit"] = stats.unit - str_out = show_text(stats_dict) - stats_dict["str_out"] = str_out - return stats_dict, None - - stats_dict = {} - if line_profiler_output_file is None or not line_profiler_output_file.exists(): - return {"timings": {}, "unit": 0, "str_out": ""}, None - - with line_profiler_output_file.open("r", encoding="utf-8") as f: - raw_data = json.load(f) - - # Convert Java/JS JSON output into Python line_profiler-compatible shape. - # timings: {(filename, start_lineno, func_name): [(lineno, hits, time_raw), ...]} - grouped_timings: dict[tuple[str, int, str], list[tuple[int, int, int]]] = {} - lines_by_file: dict[str, list[tuple[int, int, int]]] = {} - line_contents: dict[tuple[str, int], str] = {} - for key, stats in raw_data.items(): - file_path = stats.get("file") - line_num = stats.get("line") - if file_path is None or line_num is None: - file_path, line_str = key.rsplit(":", 1) - line_num = int(line_str) - line_num = int(line_num) - - lines_by_file.setdefault(file_path, []).append((line_num, int(stats.get("hits", 0)), int(stats.get("time", 0)))) - line_contents[(file_path, line_num)] = stats.get("content", "") - - for file_path, line_stats in lines_by_file.items(): - sorted_line_stats = sorted(line_stats, key=lambda t: t[0]) - if not sorted_line_stats: - continue - start_lineno = sorted_line_stats[0][0] - grouped_timings[(file_path, start_lineno, Path(file_path).name)] = sorted_line_stats - - stats_dict["timings"] = grouped_timings - stats_dict["unit"] = 1e-9 - stats_dict["str_out"] = show_text_non_python(stats_dict, line_contents) - return stats_dict, None diff --git a/codeflash/verification/parse_test_output.py b/codeflash/verification/parse_test_output.py index d44a347fc..f1c1a9957 100644 --- a/codeflash/verification/parse_test_output.py +++ b/codeflash/verification/parse_test_output.py @@ -9,7 +9,6 @@ from typing import TYPE_CHECKING import dill as pickle -from junitparser.xunit2 import JUnitXml from lxml.etree import XMLParser, parse from codeflash.cli_cmds.console import DEBUG_MODE, console, logger @@ -17,23 +16,17 @@ file_name_from_test_module_name, file_path_from_module_name, get_run_tmp_file, - module_name_from_file_path, ) from codeflash.discovery.discover_unit_tests import discover_parameters_unittest -from codeflash.languages import is_java, is_javascript, is_python - -# Import Jest-specific parsing from the JavaScript language module -from codeflash.languages.javascript.parse import parse_jest_test_xml as _parse_jest_test_xml +from codeflash.languages.current import current_language_support from codeflash.models.models import ( ConcurrencyMetrics, FunctionTestInvocation, InvocationId, - TestingMode, TestResults, TestType, VerificationType, ) -from codeflash.verification.coverage_utils import CoverageUtils, JacocoCoverageUtils, JestCoverageUtils if TYPE_CHECKING: import subprocess @@ -143,16 +136,8 @@ def parse_concurrency_metrics(test_results: TestResults, function_name: str) -> ) -# Cache for resolved test file paths to avoid repeated rglob calls -_test_file_path_cache: dict[tuple[str, Path], Path | None] = {} - - -def clear_test_file_path_cache() -> None: - _test_file_path_cache.clear() - - def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> Path | None: - """Resolve test file path from pytest's test class path or Java class path. + """Resolve test file path from pytest's test class path. This function handles various cases where pytest's classname in JUnit XML includes parent directories that may already be part of base_dir. @@ -160,7 +145,6 @@ def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> P Args: test_class_path: The full class path from pytest (e.g., "project.tests.test_file.TestClass") or a file path from Jest (e.g., "tests/test_file.test.js") - or a Java class path (e.g., "com.example.AlgorithmsTest") base_dir: The base directory for tests (tests project root) Returns: @@ -172,61 +156,12 @@ def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> P >>> # Should find: /path/to/tests/unittest/test_file.py """ - # Check cache first - cache_key = (test_class_path, base_dir) - if cache_key in _test_file_path_cache: - cached_result = _test_file_path_cache[cache_key] - logger.debug(f"[RESOLVE] Cache hit for {test_class_path}: {cached_result}") - return cached_result - - # Handle Java class paths (convert dots to path and add .java extension) - # Java class paths look like "com.example.TestClass" and should map to - # src/test/java/com/example/TestClass.java - if is_java(): - logger.debug(f"[RESOLVE] Input: test_class_path={test_class_path}, base_dir={base_dir}") - # Convert dots to path separators - relative_path = test_class_path.replace(".", "/") + ".java" - - # Try various locations - # 1. Directly under base_dir - potential_path = base_dir / relative_path - logger.debug(f"[RESOLVE] Attempt 1: checking {potential_path}") - if potential_path.exists(): - logger.debug(f"[RESOLVE] Attempt 1 SUCCESS: found {potential_path}") - _test_file_path_cache[cache_key] = potential_path - return potential_path - - # 2. Under src/test/java relative to project root - project_root = base_dir.parent if base_dir.name == "java" else base_dir - while project_root.name not in ("", "/") and not (project_root / "pom.xml").exists(): - project_root = project_root.parent - if (project_root / "pom.xml").exists(): - potential_path = project_root / "src" / "test" / "java" / relative_path - logger.debug(f"[RESOLVE] Attempt 2: checking {potential_path} (project_root={project_root})") - if potential_path.exists(): - logger.debug(f"[RESOLVE] Attempt 2 SUCCESS: found {potential_path}") - _test_file_path_cache[cache_key] = potential_path - return potential_path - - # 3. Search for the file in base_dir and its subdirectories - file_name = test_class_path.rsplit(".", maxsplit=1)[-1] + ".java" - logger.debug(f"[RESOLVE] Attempt 3: rglob for {file_name} in {base_dir}") - for java_file in base_dir.rglob(file_name): - logger.debug(f"[RESOLVE] Attempt 3 SUCCESS: rglob found {java_file}") - _test_file_path_cache[cache_key] = java_file - return java_file - - logger.warning(f"[RESOLVE] FAILED to resolve {test_class_path} in base_dir {base_dir}") - _test_file_path_cache[cache_key] = None # Cache negative results too - return None - # Handle file paths (contain slashes and extensions like .js/.ts) if "/" in test_class_path or "\\" in test_class_path: # This is a file path, not a Python module path # Try the path as-is if it's absolute potential_path = Path(test_class_path) if potential_path.is_absolute() and potential_path.exists(): - _test_file_path_cache[cache_key] = potential_path return potential_path # Try to resolve relative to base_dir's parent (project root) @@ -236,7 +171,6 @@ def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> P try: potential_path = potential_path.resolve() if potential_path.exists(): - _test_file_path_cache[cache_key] = potential_path return potential_path except (OSError, RuntimeError): pass @@ -246,14 +180,17 @@ def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> P try: potential_path = potential_path.resolve() if potential_path.exists(): - _test_file_path_cache[cache_key] = potential_path return potential_path except (OSError, RuntimeError): pass - _test_file_path_cache[cache_key] = None # Cache negative results return None + # Let language-specific resolution handle non-Python class paths (e.g., Java package names) + lang_result = current_language_support().resolve_test_file_from_class_path(test_class_path, base_dir) + if lang_result is not None: + return lang_result + # First try the full path (Python module path) test_file_path = file_name_from_test_module_name(test_class_path, base_dir) @@ -282,8 +219,6 @@ def resolve_test_file_from_class_path(test_class_path: str, base_dir: Path) -> P if test_file_path: break - # Cache the result (could be None) - _test_file_path_cache[cache_key] = test_file_path return test_file_path @@ -502,9 +437,10 @@ def parse_sqlite_test_results(sqlite_file_path: Path, test_files: TestFiles, tes finally: db.close() - # Check if this is a JavaScript or Java test (use JSON) or Python test (use pickle) - is_jest = is_javascript() - is_java_test = is_java() + # Check serialization format: JavaScript uses JSON, Python uses pickle + from codeflash.languages.current import current_language_support + + is_json_format = current_language_support().test_result_serialization_format == "json" for val in data: try: @@ -517,7 +453,7 @@ def parse_sqlite_test_results(sqlite_file_path: Path, test_files: TestFiles, tes # - A module-style path: "tests.fibonacci.test.ts" (dots as separators) # - A file path: "tests/fibonacci.test.ts" (slashes as separators) # For Python, it's a module path (e.g., "tests.test_foo") that needs conversion - if is_jest: + if is_json_format: # Jest test file extensions (including .test.ts, .spec.ts patterns) jest_test_extensions = ( ".test.ts", @@ -562,36 +498,6 @@ def parse_sqlite_test_results(sqlite_file_path: Path, test_files: TestFiles, tes else: # Already a file path test_file_path = test_config.tests_project_rootdir / test_module_path - elif is_java_test: - # Java: test_module_path is the class name (e.g., "CounterTest") - # We need to find the test file by searching for it in the test files - test_file_path = None - for test_file in test_files.test_files: - # Check instrumented behavior file path - if test_file.instrumented_behavior_file_path: - # Java class name is stored without package prefix in SQLite - # Check if the file name matches the module path - file_stem = test_file.instrumented_behavior_file_path.stem - # The instrumented file has __perfinstrumented suffix - original_class = file_stem.replace("__perfinstrumented", "").replace( - "__perfonlyinstrumented", "" - ) - if test_module_path in (original_class, file_stem): - test_file_path = test_file.instrumented_behavior_file_path - break - # Check original file path - if test_file.original_file_path: - if test_file.original_file_path.stem == test_module_path: - test_file_path = test_file.original_file_path - break - if test_file_path is None: - # Fallback: try to find by searching in tests_project_rootdir - java_files = list(test_config.tests_project_rootdir.rglob(f"*{test_module_path}*.java")) - if java_files: - test_file_path = java_files[0] - else: - logger.debug(f"Could not find Java test file for module path: {test_module_path}") - test_file_path = test_config.tests_project_rootdir / f"{test_module_path}.java" else: # Python: convert module path to file path test_file_path = file_path_from_module_name(test_module_path, test_config.tests_project_rootdir) @@ -605,27 +511,31 @@ def parse_sqlite_test_results(sqlite_file_path: Path, test_files: TestFiles, tes else: # Try original_file_path first (for existing tests that were instrumented) test_type = test_files.get_test_type_by_original_file_path(test_file_path) + logger.debug(f"[PARSE-DEBUG] test_module={test_module_path}, test_file_path={test_file_path}") + logger.debug(f"[PARSE-DEBUG] by_original_file_path: {test_type}") # If not found, try instrumented_behavior_file_path (for generated tests) if test_type is None: test_type = test_files.get_test_type_by_instrumented_file_path(test_file_path) - # Default to GENERATED_REGRESSION for Jest/Java tests when test type can't be determined - if test_type is None and (is_jest or is_java_test): + logger.debug(f"[PARSE-DEBUG] by_instrumented_file_path: {test_type}") + # Default to GENERATED_REGRESSION for Jest tests when test type can't be determined + if test_type is None and is_json_format: test_type = TestType.GENERATED_REGRESSION + logger.debug("[PARSE-DEBUG] defaulting to GENERATED_REGRESSION (Jest)") elif test_type is None: # Skip results where test type cannot be determined logger.debug(f"Skipping result for {test_function_name}: could not determine test type") continue + logger.debug(f"[PARSE-DEBUG] FINAL test_type={test_type}") # Deserialize return value - # For Jest/Java: Store as serialized JSON - comparison happens via language-specific comparator + # For Jest: Skip deserialization - comparison happens via language-specific comparator # For Python: Use pickle to deserialize ret_val = None if loop_index == 1 and val[7]: try: - if is_jest or is_java_test: - # Jest/Java comparison happens via language-specific comparator + if is_json_format: + # Jest comparison happens via Node.js script (language_support.compare_test_results) # Store a marker indicating data exists but is not deserialized in Python - # For Java, val[7] is a JSON string from Gson serialization ret_val = ("__serialized__", val[7]) else: # Python uses pickle serialization @@ -668,309 +578,7 @@ def parse_test_xml( test_config: TestConfig, run_result: subprocess.CompletedProcess | None = None, ) -> TestResults: - # Route to Jest-specific parser for JavaScript/TypeScript tests - if is_javascript(): - return _parse_jest_test_xml( - test_xml_file_path, - test_files, - test_config, - run_result, - parse_func=parse_func, - resolve_test_file_from_class_path=resolve_test_file_from_class_path, - ) - - test_results = TestResults() - # Parse unittest output - if not test_xml_file_path.exists(): - logger.warning(f"No test results for {test_xml_file_path} found.") - console.rule() - return test_results - try: - xml = JUnitXml.fromfile(str(test_xml_file_path), parse_func=parse_func) - except Exception as e: - logger.warning(f"Failed to parse {test_xml_file_path} as JUnitXml. Exception: {e}") - return test_results - # Always use tests_project_rootdir since pytest is now the test runner for all frameworks - base_dir = test_config.tests_project_rootdir - - # For Java: pre-parse fallback stdout once (not per testcase) to avoid O(n²) complexity - java_fallback_stdout = None - java_fallback_begin_matches = None - java_fallback_end_matches = None - if is_java() and run_result is not None: - try: - fallback_stdout = run_result.stdout if isinstance(run_result.stdout, str) else run_result.stdout.decode() - begin_matches = list(start_pattern.finditer(fallback_stdout)) - if begin_matches: - java_fallback_stdout = fallback_stdout - java_fallback_begin_matches = begin_matches - java_fallback_end_matches = {} - for match in end_pattern.finditer(fallback_stdout): - groups = match.groups() - java_fallback_end_matches[groups[:5]] = match - logger.debug(f"Java: Found {len(begin_matches)} timing markers in subprocess stdout (fallback)") - except (AttributeError, UnicodeDecodeError): - pass - - for suite in xml: - for testcase in suite: - class_name = testcase.classname - test_file_name = suite._elem.attrib.get("file") # noqa: SLF001 - if ( - test_file_name == f"unittest{os.sep}loader.py" - and class_name == "unittest.loader._FailedTest" - and suite.errors == 1 - and suite.tests == 1 - ): - # This means that the test failed to load, so we don't want to crash on it - logger.info("Test failed to load, skipping it.") - if run_result is not None: - if isinstance(run_result.stdout, str) and isinstance(run_result.stderr, str): - logger.info(f"Test log - STDOUT : {run_result.stdout} \n STDERR : {run_result.stderr}") - else: - logger.info( - f"Test log - STDOUT : {run_result.stdout.decode()} \n STDERR : {run_result.stderr.decode()}" - ) - return test_results - - test_class_path = testcase.classname - logger.debug(f"[PARSE-XML] Processing testcase: classname={test_class_path}, name={testcase.name}") - try: - if testcase.name is None: - logger.debug( - f"testcase.name is None for testcase {testcase!r} in file {test_xml_file_path}, skipping" - ) - continue - test_function = testcase.name.split("[", 1)[0] if "[" in testcase.name else testcase.name - except (AttributeError, TypeError) as e: - msg = ( - f"Accessing testcase.name in parse_test_xml for testcase {testcase!r} in file" - f" {test_xml_file_path} has exception: {e}" - ) - logger.exception(msg) - continue - if test_file_name is None: - if test_class_path: - # TODO : This might not be true if the test is organized under a class - test_file_path = resolve_test_file_from_class_path(test_class_path, base_dir) - - if test_file_path is None: - logger.warning(f"Could not find the test for file name - {test_class_path} ") - continue - else: - test_file_path = file_path_from_module_name(test_function, base_dir) - else: - test_file_path = base_dir / test_file_name - assert test_file_path, f"Test file path not found for {test_file_name}" - - if not test_file_path.exists(): - logger.warning(f"Could not find the test for file name - {test_file_path} ") - continue - # Try to match by instrumented file path first (for generated/instrumented tests) - test_type = test_files.get_test_type_by_instrumented_file_path(test_file_path) - if test_type is None: - # Fallback: try to match by original file path (for existing unit tests that were instrumented) - # JUnit XML may reference the original class name, resolving to the original file path - test_type = test_files.get_test_type_by_original_file_path(test_file_path) - if test_type is None: - # Log registered paths for debugging - registered_paths = [str(tf.instrumented_behavior_file_path) for tf in test_files.test_files] - logger.warning( - f"Test type not found for '{test_file_path}'. " - f"Registered test files: {registered_paths}. Skipping test case." - ) - continue - test_module_path = module_name_from_file_path(test_file_path, test_config.tests_project_rootdir) - result = testcase.is_passed # TODO: See for the cases of ERROR and SKIPPED - test_class = None - if class_name is not None and class_name.startswith(test_module_path): - test_class = class_name[len(test_module_path) + 1 :] # +1 for the dot, gets Unittest class name - - loop_index = int(testcase.name.split("[ ")[-1][:-2]) if testcase.name and "[" in testcase.name else 1 - - timed_out = False - if len(testcase.result) > 1: - logger.debug(f"!!!!!Multiple results for {testcase.name or ''} in {test_xml_file_path}!!!") - if len(testcase.result) == 1: - message = testcase.result[0].message - if message is not None: - message = message.lower() - if "failed: timeout >" in message or "timed out" in message: - timed_out = True - - sys_stdout = testcase.system_out or "" - - # Use different patterns for Java (5-field start, 6-field end) vs Python (6-field both) - # Java format: !$######module:class.test:func:loop:iter######$! (start) - # !######module:class.test:func:loop:iter:duration######! (end) - if is_java(): - begin_matches = list(start_pattern.finditer(sys_stdout)) - end_matches = {} - for match in end_pattern.finditer(sys_stdout): - groups = match.groups() - # Key is first 5 groups (module, class.test, func, loop, iter) - end_matches[groups[:5]] = match - - # For Java: fallback to pre-parsed subprocess stdout when XML system-out has no timing markers - # This happens when using JUnit Console Launcher directly (bypassing Maven) - if not begin_matches and java_fallback_begin_matches is not None: - sys_stdout = java_fallback_stdout - begin_matches = java_fallback_begin_matches - end_matches = java_fallback_end_matches - else: - begin_matches = list(matches_re_start.finditer(sys_stdout)) - end_matches = {} - for match in matches_re_end.finditer(sys_stdout): - groups = match.groups() - if len(groups[5].split(":")) > 1: - iteration_id = groups[5].split(":")[0] - groups = (*groups[:5], iteration_id) - end_matches[groups] = match - - # TODO: I am not sure if this is the correct approach. see if this was needed for test - # pass/fail status extraction in python. otherwise not needed. - if not begin_matches: - # For Java tests, use the JUnit XML time attribute for runtime - runtime_from_xml = None - # if is_java(): - # try: - # # JUnit XML time is in seconds, convert to nanoseconds - # # Use a minimum of 1000ns (1 microsecond) for any successful test - # # to avoid 0 runtime being treated as "no runtime" - # test_time = float(testcase.time) if hasattr(testcase, "time") and testcase.time else 0.0 - # runtime_from_xml = max(int(test_time * 1_000_000_000), 1000) - # except (ValueError, TypeError): - # # If we can't get time from XML, use 1 microsecond as minimum - # runtime_from_xml = 1000 - - test_results.add( - FunctionTestInvocation( - loop_index=loop_index, - id=InvocationId( - test_module_path=test_module_path, - test_class_name=test_class, - test_function_name=test_function, - function_getting_tested="", # TODO: Fix this - iteration_id="", - ), - file_name=test_file_path, - runtime=runtime_from_xml, - test_framework=test_config.test_framework, - did_pass=result, - test_type=test_type, - return_value=None, - timed_out=timed_out, - stdout="", - ) - ) - - else: - for match_index, match in enumerate(begin_matches): - groups = match.groups() - - # Java and Python have different marker formats: - # Java: 5 groups - (module, class.test, func, loop_index, iteration_id) - # Python: 6 groups - (module, class.test, _, func, loop_index, iteration_id) - if is_java(): - # Java format: !$######module:class.test:func:loop:iter######$! - end_key = groups[:5] # Use all 5 groups as key - end_match = end_matches.get(end_key) - iteration_id = groups[4] # iter is at index 4 - loop_idx = int(groups[3]) # loop is at index 3 - test_module = groups[0] # module - # groups[1] is "class.testMethod" — extract class and test name - class_test_field = groups[1] - if "." in class_test_field: - test_class_str, test_func = class_test_field.rsplit(".", 1) - else: - test_class_str = class_test_field - test_func = test_function # Fallback to testcase name from XML - func_getting_tested = groups[2] # func being tested - runtime = None - - if end_match: - stdout = sys_stdout[match.end() : end_match.start()] - runtime = int(end_match.groups()[5]) # duration is at index 5 - elif match_index == len(begin_matches) - 1: - stdout = sys_stdout[match.end() :] - else: - stdout = sys_stdout[match.end() : begin_matches[match_index + 1].start()] - - test_results.add( - FunctionTestInvocation( - loop_index=loop_idx, - id=InvocationId( - test_module_path=test_module, - test_class_name=test_class_str if test_class_str else None, - test_function_name=test_func, - function_getting_tested=func_getting_tested, - iteration_id=iteration_id, - ), - file_name=test_file_path, - runtime=runtime, - test_framework=test_config.test_framework, - did_pass=result, - test_type=test_type, - return_value=None, - timed_out=timed_out, - stdout=stdout, - ) - ) - else: - # Python format: 6 groups - end_match = end_matches.get(groups) - iteration_id, runtime = groups[5], None - if end_match: - stdout = sys_stdout[match.end() : end_match.start()] - split_val = end_match.groups()[5].split(":") - if len(split_val) > 1: - iteration_id = split_val[0] - runtime = int(split_val[1]) - else: - iteration_id, runtime = split_val[0], None - elif match_index == len(begin_matches) - 1: - stdout = sys_stdout[match.end() :] - else: - stdout = sys_stdout[match.end() : begin_matches[match_index + 1].start()] - - test_results.add( - FunctionTestInvocation( - loop_index=int(groups[4]), - id=InvocationId( - test_module_path=groups[0], - test_class_name=None if groups[1] == "" else groups[1][:-1], - test_function_name=groups[2], - function_getting_tested=groups[3], - iteration_id=iteration_id, - ), - file_name=test_file_path, - runtime=runtime, - test_framework=test_config.test_framework, - did_pass=result, - test_type=test_type, - return_value=None, - timed_out=timed_out, - stdout=stdout, - ) - ) - - if not test_results: - # Show actual test file paths being used (behavior or original), not just original_file_path - # For AI-generated tests, original_file_path is None, so show instrumented_behavior_file_path instead - test_paths_display = [ - str(test_file.instrumented_behavior_file_path or test_file.original_file_path) - for test_file in test_files.test_files - ] - logger.info(f"Tests {test_paths_display} failed to run, skipping") - if run_result is not None: - stdout, stderr = "", "" - try: - stdout = run_result.stdout.decode() - stderr = run_result.stderr.decode() - except AttributeError: - stdout = run_result.stderr - logger.debug(f"Test log - STDOUT : {stdout} \n STDERR : {stderr}") - return test_results + return current_language_support().parse_test_xml(test_xml_file_path, test_files, test_config, run_result) def merge_test_results( @@ -1182,7 +790,6 @@ def parse_test_results( code_context: CodeOptimizationContext | None = None, run_result: subprocess.CompletedProcess | None = None, skip_sqlite_cleanup: bool = False, - testing_type: TestingMode = TestingMode.BEHAVIOR, ) -> tuple[TestResults, CoverageData | None]: test_results_xml = parse_test_xml( test_xml_path, test_files=test_files, test_config=test_config, run_result=run_result @@ -1195,7 +802,7 @@ def parse_test_results( try: sql_results_file = get_run_tmp_file(Path(f"test_return_values_{optimization_iteration}.sqlite")) - if sql_results_file.exists() and testing_type != TestingMode.PERFORMANCE: + if sql_results_file.exists(): test_results_data = parse_sqlite_test_results( sqlite_file_path=sql_results_file, test_files=test_files, test_config=test_config ) @@ -1206,7 +813,9 @@ def parse_test_results( # Also try to read legacy binary format for Python tests # Binary file may contain additional results (e.g., from codeflash_wrap) even if SQLite has data # from @codeflash_capture. We need to merge both sources. - if is_python(): + from codeflash.languages.current import current_language_support as _cls + + if _cls().test_result_serialization_format == "pickle": try: bin_results_file = get_run_tmp_file(Path(f"test_return_values_{optimization_iteration}.bin")) if bin_results_file.exists(): @@ -1230,7 +839,6 @@ def parse_test_results( get_run_tmp_file(Path("vitest_results.xml")).unlink(missing_ok=True) get_run_tmp_file(Path("vitest_perf_results.xml")).unlink(missing_ok=True) get_run_tmp_file(Path("vitest_line_profile_results.xml")).unlink(missing_ok=True) - test_xml_path.unlink(missing_ok=True) # For Jest tests, SQLite cleanup is deferred until after comparison # (comparison happens via language_support.compare_test_results) @@ -1239,56 +847,25 @@ def parse_test_results( results = merge_test_results(test_results_xml, test_results_data, test_config.test_framework) - # Bug #10 Fix: For Java performance tests, preserve subprocess stdout containing timing markers - # This is needed for calculate_function_throughput_from_test_results to work correctly - if is_java() and testing_type == TestingMode.PERFORMANCE and run_result is not None: - try: - # Extract stdout from subprocess result containing timing markers - if isinstance(run_result.stdout, bytes): - results.perf_stdout = run_result.stdout.decode("utf-8", errors="replace") - elif isinstance(run_result.stdout, str): - results.perf_stdout = run_result.stdout - logger.debug( - f"Bug #10 Fix: Set perf_stdout for Java performance tests ({len(results.perf_stdout or '')} chars)" - ) - except Exception as e: - logger.debug(f"Bug #10 Fix: Failed to set perf_stdout: {e}") - all_args = False coverage = None if coverage_database_file and source_file and code_context and function_name: all_args = True - if is_javascript(): - # Jest uses coverage-final.json (coverage_database_file points to this) - coverage = JestCoverageUtils.load_from_jest_json( - coverage_json_path=coverage_database_file, - function_name=function_name, - code_context=code_context, - source_code_path=source_file, - ) - elif is_java(): - # Java uses JaCoCo XML report (coverage_database_file points to jacoco.xml) - coverage = JacocoCoverageUtils.load_from_jacoco_xml( - jacoco_xml_path=coverage_database_file, - function_name=function_name, - code_context=code_context, - source_code_path=source_file, - ) - else: - # Python uses coverage.py SQLite database - coverage = CoverageUtils.load_from_sqlite_database( - database_path=coverage_database_file, - config_path=coverage_config_file, - source_code_path=source_file, - code_context=code_context, - function_name=function_name, - ) - coverage.log_coverage() - try: - failures = parse_test_failures_from_stdout(run_result.stdout) - results.test_failures = failures - except Exception as e: - logger.exception(e) + coverage = _cls().load_coverage( + coverage_database_file=coverage_database_file, + function_name=function_name, + code_context=code_context, + source_file=source_file, + coverage_config_file=coverage_config_file, + ) + if coverage: + coverage.log_coverage() + if run_result: + try: + failures = parse_test_failures_from_stdout(run_result.stdout) + results.test_failures = failures + except Exception as e: + logger.exception(e) # Cleanup Jest coverage directory after coverage is parsed import shutil diff --git a/codeflash/verification/pytest_plugin.py b/codeflash/verification/pytest_plugin.py index 0b7144356..5e77cf19e 100644 --- a/codeflash/verification/pytest_plugin.py +++ b/codeflash/verification/pytest_plugin.py @@ -11,6 +11,7 @@ import sys import time as _time_module import warnings +from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Callable, Optional from unittest import TestCase @@ -30,6 +31,12 @@ from _pytest.main import Session from _pytest.python import Metafunc +_HAS_NUMPY = find_spec("numpy") is not None + +_PROTECTED_MODULES = frozenset( + {"gc", "inspect", "os", "sys", "time", "functools", "pathlib", "typing", "dill", "pytest", "importlib"} +) + SECONDS_IN_HOUR: float = 3600 SECONDS_IN_MINUTE: float = 60 SHORTEST_AMOUNT_OF_TIME: float = 0 @@ -172,14 +179,12 @@ def mock_random() -> float: builtins._mock_datetime_utcnow = mock_datetime_utcnow # noqa: SLF001 # Patch numpy.random if available - try: + if _HAS_NUMPY: import numpy as np # Use modern numpy random generator approach np.random.default_rng(42) np.random.seed(42) # Keep legacy seed for compatibility # noqa: NPY002 - except ImportError: - pass # Patch os.urandom if needed try: @@ -301,6 +306,7 @@ def get_runtime_from_stdout(stdout: str) -> Optional[int]: _NODEID_BRACKET_PATTERN = re.compile(r"\s*\[\s*\d+\s*\]\s*$") +_NODEID_LOOP_PATTERN = re.compile(r"\[ \d+ \]") def should_stop( @@ -351,6 +357,7 @@ def __init__(self, config: Config) -> None: self.enable_stability_check: bool = ( str(getattr(config.option, "codeflash_stability_check", "false")).lower() == "true" ) + self._module_clearables: dict[str, list[Callable]] = {} @pytest.hookimpl def pytest_runtest_logreport(self, report: pytest.TestReport) -> None: @@ -401,7 +408,7 @@ def pytest_runtestloop(self, session: Session) -> bool: if self.enable_stability_check: elapsed_ns += _ORIGINAL_PERF_COUNTER_NS() - loop_start - best_runtime_until_now = sum([min(data) for data in self.runtime_data_by_test_case.values()]) + best_runtime_until_now = sum(min(data) for data in self.runtime_data_by_test_case.values()) if best_runtime_until_now > 0: runtimes.append(best_runtime_until_now) @@ -422,57 +429,55 @@ def pytest_runtestloop(self, session: Session) -> bool: return True def _clear_lru_caches(self, item: pytest.Item) -> None: - processed_functions: set[Callable] = set() - protected_modules = { - "gc", - "inspect", - "os", - "sys", - "time", - "functools", - "pathlib", - "typing", - "dill", - "pytest", - "importlib", - } - - def _clear_cache_for_object(obj: obj) -> None: - if obj in processed_functions: - return - processed_functions.add(obj) + func = item.function # type: ignore[attr-defined] + + # Always clear the test function itself + if hasattr(func, "cache_clear") and callable(func.cache_clear): + with contextlib.suppress(Exception): + func.cache_clear() + + module_name = getattr(func, "__module__", None) + if not module_name: + return + + try: + clearables = self._module_clearables.get(module_name) + if clearables is None: + clearables = self._scan_module_clearables(module_name) + self._module_clearables[module_name] = clearables + + for obj in clearables: + with contextlib.suppress(Exception): + obj.cache_clear() + except Exception: + pass + + def _scan_module_clearables(self, module_name: str) -> list[Callable]: + module = sys.modules.get(module_name) + if not module: + return [] + + clearables: list[Callable] = [] + for _, obj in inspect.getmembers(module): + if not callable(obj): + continue if hasattr(obj, "__wrapped__"): - module_name = obj.__wrapped__.__module__ + top_module = obj.__wrapped__.__module__ else: try: obj_module = inspect.getmodule(obj) - module_name = obj_module.__name__.split(".")[0] if obj_module is not None else None + top_module = obj_module.__name__.split(".")[0] if obj_module is not None else None except Exception: - module_name = None + top_module = None - if module_name in protected_modules: - return + if top_module in _PROTECTED_MODULES: + continue if hasattr(obj, "cache_clear") and callable(obj.cache_clear): - with contextlib.suppress(Exception): - obj.cache_clear() + clearables.append(obj) - _clear_cache_for_object(item.function) # type: ignore[attr-defined] - - try: - if hasattr(item.function, "__module__"): # type: ignore[attr-defined] - module_name = item.function.__module__ # type: ignore[attr-defined] - try: - module = sys.modules.get(module_name) - if module: - for _, obj in inspect.getmembers(module): - if callable(obj): - _clear_cache_for_object(obj) - except Exception: - pass - except Exception: - pass + return clearables def _set_nodeid(self, nodeid: str, count: int) -> str: """Set loop count when using duration. @@ -481,10 +486,10 @@ def _set_nodeid(self, nodeid: str, count: int) -> str: :param count: Current loop count. :return: Formatted string for test name. """ - pattern = r"\[ \d+ \]" run_str = f"[ {count} ]" os.environ["CODEFLASH_LOOP_INDEX"] = str(count) - return re.sub(pattern, run_str, nodeid) if re.search(pattern, nodeid) else nodeid + run_str + result, n = _NODEID_LOOP_PATTERN.subn(run_str, nodeid) + return result if n else nodeid + run_str def _get_delay_time(self, session: Session) -> float: """Extract delay time from session. diff --git a/codeflash/verification/test_runner.py b/codeflash/verification/test_runner.py index e797dc6e1..550f6bb05 100644 --- a/codeflash/verification/test_runner.py +++ b/codeflash/verification/test_runner.py @@ -1,29 +1,17 @@ from __future__ import annotations -import contextlib import re -import shlex import shutil import subprocess -import sys -from pathlib import Path from typing import TYPE_CHECKING -from codeflash.cli_cmds.console import logger -from codeflash.code_utils.code_utils import custom_addopts, get_run_tmp_file -from codeflash.code_utils.compat import IS_POSIX, SAFE_SYS_EXECUTABLE -from codeflash.code_utils.config_consts import TOTAL_LOOPING_TIME_EFFECTIVE -from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args -from codeflash.languages import is_python -from codeflash.languages.python.static_analysis.coverage_utils import prepare_coverage_files -from codeflash.languages.registry import get_language_support, get_language_support_by_framework -from codeflash.models.models import TestFiles, TestType - if TYPE_CHECKING: - from codeflash.models.models import TestFiles + from pathlib import Path -BEHAVIORAL_BLOCKLISTED_PLUGINS = ["benchmark", "codspeed", "xdist", "sugar"] -BENCHMARKING_BLOCKLISTED_PLUGINS = ["codspeed", "cov", "benchmark", "profiling", "xdist", "sugar"] +from codeflash.cli_cmds.console import logger +from codeflash.code_utils.code_utils import custom_addopts +from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args +from codeflash.languages.registry import get_language_support # Pattern to extract timing from stdout markers: !######...:######! # Jest markers have multiple colons: !######module:test:func:loop:id:duration######! @@ -112,312 +100,3 @@ def execute_test_subprocess( cwd=cwd, env=env, timeout=timeout, check=False, text=True, capture_output=True ) return subprocess.run(cmd_list, **run_args) # noqa: PLW1510 - - -def run_behavioral_tests( - test_paths: TestFiles, - test_framework: str, - test_env: dict[str, str], - cwd: Path, - *, - pytest_timeout: int | None = None, - pytest_cmd: str = "pytest", - pytest_target_runtime_seconds: float = TOTAL_LOOPING_TIME_EFFECTIVE, - enable_coverage: bool = False, - js_project_root: Path | None = None, - candidate_index: int = 0, -) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]: - """Run behavioral tests with optional coverage.""" - # Check if there's a language support for this test framework that implements run_behavioral_tests - language_support = get_language_support_by_framework(test_framework) - if language_support is not None and hasattr(language_support, "run_behavioral_tests"): - # Java tests need longer timeout due to Maven startup overhead - # Use Java-specific timeout if no explicit timeout provided - from codeflash.code_utils.config_consts import JAVA_TESTCASE_TIMEOUT - - effective_timeout = pytest_timeout - if test_framework in ("junit4", "junit5", "testng") and pytest_timeout is not None: - # For Java, use a minimum timeout to account for Maven overhead - effective_timeout = max(pytest_timeout, JAVA_TESTCASE_TIMEOUT) - if effective_timeout != pytest_timeout: - logger.debug( - f"Increased Java test timeout from {pytest_timeout}s to {effective_timeout}s " - "to account for Maven startup overhead" - ) - - return language_support.run_behavioral_tests( - test_paths=test_paths, - test_env=test_env, - cwd=cwd, - timeout=effective_timeout, - project_root=js_project_root, - enable_coverage=enable_coverage, - candidate_index=candidate_index, - ) - if is_python(): - test_files: list[str] = [] - for file in test_paths.test_files: - if file.test_type == TestType.REPLAY_TEST: - # Replay tests need specific test targeting because one file contains tests for multiple functions - if file.tests_in_file: - test_files.extend( - [ - str(file.instrumented_behavior_file_path) + "::" + test.test_function - for test in file.tests_in_file - ] - ) - else: - test_files.append(str(file.instrumented_behavior_file_path)) - - pytest_cmd_list = ( - shlex.split(f"{SAFE_SYS_EXECUTABLE} -m pytest", posix=IS_POSIX) - if pytest_cmd == "pytest" - else [SAFE_SYS_EXECUTABLE, "-m", *shlex.split(pytest_cmd, posix=IS_POSIX)] - ) - test_files = list(set(test_files)) # remove multiple calls in the same test function - - common_pytest_args = [ - "--capture=tee-sys", - "-q", - "--codeflash_loops_scope=session", - "--codeflash_min_loops=1", - "--codeflash_max_loops=1", - f"--codeflash_seconds={pytest_target_runtime_seconds}", - ] - if pytest_timeout is not None: - common_pytest_args.append(f"--timeout={pytest_timeout}") - - result_file_path = get_run_tmp_file(Path("pytest_results.xml")) - result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] - - pytest_test_env = test_env.copy() - pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" - - if enable_coverage: - coverage_database_file, coverage_config_file = prepare_coverage_files() - # disable jit for coverage - pytest_test_env["NUMBA_DISABLE_JIT"] = str(1) - pytest_test_env["TORCHDYNAMO_DISABLE"] = str(1) - pytest_test_env["PYTORCH_JIT"] = str(0) - pytest_test_env["TF_XLA_FLAGS"] = "--tf_xla_auto_jit=0" - pytest_test_env["TF_ENABLE_ONEDNN_OPTS"] = str(0) - pytest_test_env["JAX_DISABLE_JIT"] = str(0) - - is_windows = sys.platform == "win32" - if is_windows: - # On Windows, delete coverage database file directly instead of using 'coverage erase', to avoid locking issues - if coverage_database_file.exists(): - with contextlib.suppress(PermissionError, OSError): - coverage_database_file.unlink() - else: - cov_erase = execute_test_subprocess( - shlex.split(f"{SAFE_SYS_EXECUTABLE} -m coverage erase"), cwd=cwd, env=pytest_test_env, timeout=30 - ) # this cleanup is necessary to avoid coverage data from previous runs, if there are any, then the current run will be appended to the previous data, which skews the results - logger.debug(cov_erase) - coverage_cmd = [ - SAFE_SYS_EXECUTABLE, - "-m", - "coverage", - "run", - f"--rcfile={coverage_config_file.as_posix()}", - "-m", - ] - - if pytest_cmd == "pytest": - coverage_cmd.extend(["pytest"]) - else: - coverage_cmd.extend(shlex.split(pytest_cmd, posix=IS_POSIX)[1:]) - - blocklist_args = [f"-p no:{plugin}" for plugin in BEHAVIORAL_BLOCKLISTED_PLUGINS if plugin != "cov"] - results = execute_test_subprocess( - coverage_cmd + common_pytest_args + blocklist_args + result_args + test_files, - cwd=cwd, - env=pytest_test_env, - # Timeout for test subprocess execution (seconds). - # Override via CODEFLASH_TEST_TIMEOUT env var. Default: 600s. - timeout=600, - ) - logger.debug( - f"Result return code: {results.returncode}, " - f"{'Result stderr:' + str(results.stderr) if results.stderr else ''}" - ) - else: - blocklist_args = [f"-p no:{plugin}" for plugin in BEHAVIORAL_BLOCKLISTED_PLUGINS] - - results = execute_test_subprocess( - pytest_cmd_list + common_pytest_args + blocklist_args + result_args + test_files, - cwd=cwd, - env=pytest_test_env, - # Timeout for test subprocess execution (seconds). - # Override via CODEFLASH_TEST_TIMEOUT env var. Default: 600s. - timeout=600, - ) - logger.debug( - f"""Result return code: {results.returncode}, {"Result stderr:" + str(results.stderr) if results.stderr else ""}""" - ) - else: - msg = f"Unsupported test framework: {test_framework}" - raise ValueError(msg) - - return ( - result_file_path, - results, - coverage_database_file if enable_coverage else None, - coverage_config_file if enable_coverage else None, - ) - - -def run_line_profile_tests( - test_paths: TestFiles, - pytest_cmd: str, - test_env: dict[str, str], - cwd: Path, - test_framework: str, - *, - pytest_target_runtime_seconds: float = TOTAL_LOOPING_TIME_EFFECTIVE, - pytest_timeout: int | None = None, - pytest_min_loops: int = 5, - pytest_max_loops: int = 100_000, - js_project_root: Path | None = None, - line_profiler_output_file: Path | None = None, -) -> tuple[Path, subprocess.CompletedProcess]: - # Check if there's a language support for this test framework that implements run_line_profile_tests - language_support = get_language_support_by_framework(test_framework) - if language_support is not None and hasattr(language_support, "run_line_profile_tests"): - from codeflash.code_utils.config_consts import JAVA_TESTCASE_TIMEOUT - - effective_timeout = pytest_timeout - if test_framework in ("junit4", "junit5", "testng") and pytest_timeout is not None: - # For Java, use a minimum timeout to account for Maven overhead - effective_timeout = max(pytest_timeout, JAVA_TESTCASE_TIMEOUT) - return language_support.run_line_profile_tests( - test_paths=test_paths, - test_env=test_env, - cwd=cwd, - timeout=effective_timeout, - project_root=js_project_root, - line_profile_output_file=line_profiler_output_file, - ) - if is_python(): # pytest runs both pytest and unittest tests - pytest_cmd_list = ( - shlex.split(f"{SAFE_SYS_EXECUTABLE} -m pytest", posix=IS_POSIX) - if pytest_cmd == "pytest" - else shlex.split(pytest_cmd) - ) - # Always use file path - pytest discovers all tests including parametrized ones - test_files: list[str] = list( - {str(file.benchmarking_file_path) for file in test_paths.test_files} - ) # remove multiple calls in the same test function - pytest_args = [ - "--capture=tee-sys", - "-q", - "--codeflash_loops_scope=session", - "--codeflash_min_loops=1", - "--codeflash_max_loops=1", - f"--codeflash_seconds={pytest_target_runtime_seconds}", - ] - if pytest_timeout is not None: - pytest_args.append(f"--timeout={pytest_timeout}") - result_file_path = get_run_tmp_file(Path("pytest_results.xml")) - result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] - pytest_test_env = test_env.copy() - pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" - blocklist_args = [f"-p no:{plugin}" for plugin in BENCHMARKING_BLOCKLISTED_PLUGINS] - pytest_test_env["LINE_PROFILE"] = "1" - results = execute_test_subprocess( - pytest_cmd_list + pytest_args + blocklist_args + result_args + test_files, - cwd=cwd, - env=pytest_test_env, - # Timeout for line-profiling subprocess execution (seconds). - # Override via CODEFLASH_TEST_TIMEOUT env var. Default: 600s. - timeout=600, - ) - else: - msg = f"Unsupported test framework: {test_framework}" - raise ValueError(msg) - return result_file_path, results - - -def run_benchmarking_tests( - test_paths: TestFiles, - pytest_cmd: str, - test_env: dict[str, str], - cwd: Path, - test_framework: str, - *, - target_runtime_seconds: float = TOTAL_LOOPING_TIME_EFFECTIVE, - timeout: int | None = None, - min_outer_loops: int = 5, - max_outer_loops: int = 100_000, - inner_iterations: int | None = None, - js_project_root: Path | None = None, -) -> tuple[Path, subprocess.CompletedProcess]: - logger.debug(f"run_benchmarking_tests called: framework={test_framework}, num_files={len(test_paths.test_files)}") - # Check if there's a language support for this test framework that implements run_benchmarking_tests - language_support = get_language_support_by_framework(test_framework) - if language_support is not None and hasattr(language_support, "run_benchmarking_tests"): - # Java tests need longer timeout due to Maven startup overhead - # Use Java-specific timeout if no explicit timeout provided - from codeflash.code_utils.config_consts import JAVA_TESTCASE_TIMEOUT - - effective_timeout = timeout - if test_framework in ("junit4", "junit5", "testng") and timeout is not None: - # For Java, use a minimum timeout to account for Maven overhead - effective_timeout = max(timeout, JAVA_TESTCASE_TIMEOUT) - if effective_timeout != timeout: - logger.debug( - f"Increased Java test timeout from {timeout}s to {effective_timeout}s " - "to account for Maven startup overhead" - ) - - inner_iterations_kwargs = {"inner_iterations": inner_iterations} if inner_iterations is not None else {} - return language_support.run_benchmarking_tests( - test_paths=test_paths, - test_env=test_env, - cwd=cwd, - timeout=effective_timeout, - project_root=js_project_root, - min_loops=min_outer_loops, - max_loops=max_outer_loops, - target_duration_seconds=target_runtime_seconds, - **inner_iterations_kwargs, - ) - if is_python(): # pytest runs both pytest and unittest tests - pytest_cmd_list = ( - shlex.split(f"{SAFE_SYS_EXECUTABLE} -m pytest", posix=IS_POSIX) - if pytest_cmd == "pytest" - else shlex.split(pytest_cmd) - ) - # Always use file path - pytest discovers all tests including parametrized ones - test_files: list[str] = list( - {str(file.benchmarking_file_path) for file in test_paths.test_files} - ) # remove multiple calls in the same test function - pytest_args = [ - "--capture=tee-sys", - "-q", - "--codeflash_loops_scope=session", - f"--codeflash_min_loops={min_outer_loops}", - f"--codeflash_max_loops={max_outer_loops}", - f"--codeflash_seconds={target_runtime_seconds}", - "--codeflash_stability_check=true", - ] - if timeout is not None: - pytest_args.append(f"--timeout={timeout}") - - result_file_path = get_run_tmp_file(Path("pytest_results.xml")) - result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] - pytest_test_env = test_env.copy() - pytest_test_env["PYTEST_PLUGINS"] = "codeflash.verification.pytest_plugin" - blocklist_args = [f"-p no:{plugin}" for plugin in BENCHMARKING_BLOCKLISTED_PLUGINS] - results = execute_test_subprocess( - pytest_cmd_list + pytest_args + blocklist_args + result_args + test_files, - cwd=cwd, - env=pytest_test_env, - # Timeout for benchmarking subprocess execution (seconds). - # Override via CODEFLASH_TEST_TIMEOUT env var. Default: 600s. - timeout=600, - ) - else: - msg = f"Unsupported test framework: {test_framework}" - raise ValueError(msg) - return result_file_path, results diff --git a/codeflash/verification/verification_utils.py b/codeflash/verification/verification_utils.py index 0a613c1fe..fddf3a59f 100644 --- a/codeflash/verification/verification_utils.py +++ b/codeflash/verification/verification_utils.py @@ -6,7 +6,7 @@ from pydantic.dataclasses import dataclass -from codeflash.languages import current_language_support, is_java, is_javascript +from codeflash.languages import current_language_support def get_test_file_path( @@ -21,14 +21,10 @@ def get_test_file_path( assert test_type in {"unit", "inspired", "replay", "perf"} function_name_safe = function_name.replace(".", "_") # Use appropriate file extension based on language - if is_javascript(): - extension = current_language_support().get_test_file_suffix() - elif is_java(): - extension = ".java" - else: - extension = ".py" + lang_support = current_language_support() + extension = lang_support.get_test_file_suffix() - if is_java() and package_name: + if package_name: # For Java, create package directory structure # e.g., com.example -> com/example/ package_path = package_name.replace(".", "/") @@ -42,12 +38,11 @@ def get_test_file_path( # Create package directory if needed path.parent.mkdir(parents=True, exist_ok=True) else: - # For JavaScript/TypeScript, place generated tests in a subdirectory that matches - # Vitest/Jest include patterns (e.g., test/**/*.test.ts) - if is_javascript(): - package_test_dir = _find_js_package_test_dir(test_dir, source_file_path) - if package_test_dir: - test_dir = package_test_dir + # Let language support find the appropriate test subdirectory + # (e.g., for JS monorepos: packages/workflow/test/codeflash-generated/) + package_test_dir = lang_support.get_test_dir_for_source(test_dir, source_file_path) + if package_test_dir: + test_dir = package_test_dir path = test_dir / f"test_{function_name_safe}__{test_type}_test_{iteration}{extension}" @@ -58,66 +53,6 @@ def get_test_file_path( return path -def _find_js_package_test_dir(tests_root: Path, source_file_path: Path | None) -> Path | None: - """Find the appropriate test directory for a JavaScript/TypeScript package. - - For monorepos, this finds the package's test directory from the source file path. - For example: packages/workflow/src/utils.ts -> packages/workflow/test/codeflash-generated/ - - Args: - tests_root: The root tests directory (may be monorepo packages root). - source_file_path: Path to the source file being tested. - - Returns: - The test directory path, or None if not found. - - """ - if source_file_path is None: - # No source path provided, check if test_dir itself has a test subdirectory - for test_subdir_name in ["test", "tests", "__tests__", "src/__tests__"]: - test_subdir = tests_root / test_subdir_name - if test_subdir.is_dir(): - codeflash_test_dir = test_subdir / "codeflash-generated" - codeflash_test_dir.mkdir(parents=True, exist_ok=True) - return codeflash_test_dir - return None - - try: - # Resolve paths for reliable comparison - tests_root = tests_root.resolve() - source_path = Path(source_file_path).resolve() - - # Walk up from the source file to find a directory with package.json or test/ folder - package_dir = None - - for parent in source_path.parents: - # Stop if we've gone above or reached the tests_root level - # For monorepos, tests_root might be /packages/ and we want to search within packages - if parent in (tests_root, tests_root.parent): - break - - # Check if this looks like a package root - has_package_json = (parent / "package.json").exists() - has_test_dir = any((parent / d).is_dir() for d in ["test", "tests", "__tests__"]) - - if has_package_json or has_test_dir: - package_dir = parent - break - - if package_dir: - # Find the test directory in this package - for test_subdir_name in ["test", "tests", "__tests__", "src/__tests__"]: - test_subdir = package_dir / test_subdir_name - if test_subdir.is_dir(): - codeflash_test_dir = test_subdir / "codeflash-generated" - codeflash_test_dir.mkdir(parents=True, exist_ok=True) - return codeflash_test_dir - - return None - except Exception: - return None - - def delete_multiple_if_name_main(test_ast: ast.Module) -> ast.Module: if_indexes = [] for index, node in enumerate(test_ast.body): @@ -179,7 +114,6 @@ class TestConfig: use_cache: bool = True _language: Optional[str] = None # Language identifier for multi-language support js_project_root: Optional[Path] = None # JavaScript project root (directory containing package.json) - _test_framework: Optional[str] = None # Cached test framework detection result def __post_init__(self) -> None: self.tests_root = self.tests_root.resolve() @@ -188,57 +122,8 @@ def __post_init__(self) -> None: @property def test_framework(self) -> str: - """Returns the appropriate test framework based on language. - - For JavaScript/TypeScript: uses the configured framework (vitest, jest, or mocha). - For Python: uses pytest as default. - Result is cached after first detection to avoid repeated pom.xml parsing. - """ - if self._test_framework is not None: - return self._test_framework - if is_javascript(): - from codeflash.languages.test_framework import get_js_test_framework_or_default - - self._test_framework = get_js_test_framework_or_default() - elif is_java(): - self._test_framework = self._detect_java_test_framework() - else: - self._test_framework = "pytest" - return self._test_framework - - def _detect_java_test_framework(self) -> str: - """Detect the Java test framework from the project configuration. - - Returns 'junit4', 'junit5', or 'testng' based on project dependencies. - Checks both the project root and parent directories for multi-module projects. - Defaults to 'junit5' if detection fails. - """ - try: - from codeflash.languages.java.config import detect_java_project - - # First try the project root - config = detect_java_project(self.project_root_path) - if config and config.test_framework and (config.has_junit4 or config.has_junit5 or config.has_testng): - return config.test_framework - - # For multi-module projects, check parent directories - current = self.project_root_path.parent - while current != current.parent: - pom_path = current / "pom.xml" - if pom_path.exists(): - parent_config = detect_java_project(current) - if parent_config and ( - parent_config.has_junit4 or parent_config.has_junit5 or parent_config.has_testng - ): - return parent_config.test_framework - current = current.parent - - # Return whatever the initial detection found, or default - if config and config.test_framework: - return config.test_framework - except Exception: - pass - return "junit4" # Default fallback (JUnit 4 is more common in legacy projects) + """Returns the appropriate test framework based on language.""" + return current_language_support().test_framework def set_language(self, language: str) -> None: """Set the language for this test config. diff --git a/codeflash/verification/verifier.py b/codeflash/verification/verifier.py index b00700607..6fc16847e 100644 --- a/codeflash/verification/verifier.py +++ b/codeflash/verification/verifier.py @@ -6,8 +6,8 @@ from typing import TYPE_CHECKING from codeflash.cli_cmds.console import logger -from codeflash.code_utils.code_utils import get_run_tmp_file, module_name_from_file_path -from codeflash.languages import is_java, is_javascript +from codeflash.code_utils.code_utils import module_name_from_file_path +from codeflash.languages.current import current_language_support from codeflash.verification.verification_utils import ModifyInspiredTests, delete_multiple_if_name_main if TYPE_CHECKING: @@ -35,15 +35,13 @@ def generate_tests( start_time = time.perf_counter() test_module_path = Path(module_name_from_file_path(test_path, test_cfg.tests_project_rootdir)) - # Detect module system for JavaScript/TypeScript before calling aiservice - project_module_system = None - if is_javascript(): - from codeflash.languages.javascript.module_system import detect_module_system + # Detect module system via language support (non-None for JS/TS, None for Python) + lang_support = current_language_support() + source_file = Path(function_to_optimize.file_path) + project_module_system = lang_support.detect_module_system(test_cfg.tests_project_rootdir, source_file) - source_file = Path(function_to_optimize.file_path) - project_module_system = detect_module_system(test_cfg.tests_project_rootdir, source_file) - - # For JavaScript, calculate the correct import path from the actual test location + if project_module_system is not None: + # For JavaScript/TypeScript, calculate the correct import path from the actual test location # (test_path) to the source file, not from tests_root import os @@ -73,90 +71,18 @@ def generate_tests( ) if response and isinstance(response, tuple) and len(response) == 3: generated_test_source, instrumented_behavior_test_source, instrumented_perf_test_source = response - temp_run_dir = get_run_tmp_file(Path()).as_posix() - - # For JavaScript/TypeScript, instrumentation is done locally (aiservice returns uninstrumented code) - if is_javascript(): - from codeflash.languages.javascript.instrument import ( - TestingMode, - fix_imports_inside_test_blocks, - fix_jest_mock_paths, - instrument_generated_js_test, - validate_and_fix_import_style, - ) - from codeflash.languages.javascript.module_system import ( - ensure_module_system_compatibility, - ensure_vitest_imports, - ) - - source_file = Path(function_to_optimize.file_path) - - # Fix import statements that appear inside test blocks (invalid JS syntax) - generated_test_source = fix_imports_inside_test_blocks(generated_test_source) - - # Fix relative paths in jest.mock() calls - generated_test_source = fix_jest_mock_paths( - generated_test_source, test_path, source_file, test_cfg.tests_project_rootdir - ) - - # Validate and fix import styles (default vs named exports) - generated_test_source = validate_and_fix_import_style( - generated_test_source, source_file, function_to_optimize.function_name - ) - - # Convert module system if needed (e.g., CommonJS -> ESM for ESM projects) - # Skip conversion if ts-jest is installed (handles interop natively) - generated_test_source = ensure_module_system_compatibility( - generated_test_source, project_module_system, test_cfg.tests_project_rootdir - ) - - # Ensure vitest imports are present when using vitest framework - generated_test_source = ensure_vitest_imports(generated_test_source, test_cfg.test_framework) - # Instrument for behavior verification (writes to SQLite) - instrumented_behavior_test_source = instrument_generated_js_test( - test_code=generated_test_source, function_to_optimize=function_to_optimize, mode=TestingMode.BEHAVIOR - ) - - # Instrument for performance measurement (prints to stdout) - instrumented_perf_test_source = instrument_generated_js_test( - test_code=generated_test_source, function_to_optimize=function_to_optimize, mode=TestingMode.PERFORMANCE - ) - - logger.debug(f"Instrumented JS/TS tests locally for {function_to_optimize.function_name}") - elif is_java(): - from codeflash.languages.java.instrumentation import instrument_generated_java_test - - func_name = function_to_optimize.function_name - qualified_name = function_to_optimize.qualified_name - - # Instrument for behavior verification (renames class) - instrumented_behavior_test_source = instrument_generated_java_test( - test_code=generated_test_source, - function_name=func_name, - qualified_name=qualified_name, - mode="behavior", + generated_test_source, instrumented_behavior_test_source, instrumented_perf_test_source = ( + lang_support.process_generated_test_strings( + generated_test_source=generated_test_source, + instrumented_behavior_test_source=instrumented_behavior_test_source, + instrumented_perf_test_source=instrumented_perf_test_source, function_to_optimize=function_to_optimize, + test_path=test_path, + test_cfg=test_cfg, + project_module_system=project_module_system, ) - - # Instrument for performance measurement (adds timing markers) - instrumented_perf_test_source = instrument_generated_java_test( - test_code=generated_test_source, - function_name=func_name, - qualified_name=qualified_name, - mode="performance", - function_to_optimize=function_to_optimize, - ) - - logger.debug(f"Instrumented Java tests locally for {func_name}") - else: - # Python: instrumentation is done by aiservice, just replace temp dir placeholders - instrumented_behavior_test_source = instrumented_behavior_test_source.replace( - "{codeflash_run_tmp_dir_client_side}", temp_run_dir - ) - instrumented_perf_test_source = instrumented_perf_test_source.replace( - "{codeflash_run_tmp_dir_client_side}", temp_run_dir - ) + ) else: logger.warning(f"Failed to generate and instrument tests for {function_to_optimize.function_name}") return None diff --git a/docs/FRICTIONLESS_SETUP_PLAN.md b/docs/FRICTIONLESS_SETUP_PLAN.md new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/docs/FRICTIONLESS_SETUP_PLAN.md @@ -0,0 +1 @@ + diff --git a/docs/JS_PROMPT_PARITY_RECOMMENDATIONS.md b/docs/JS_PROMPT_PARITY_RECOMMENDATIONS.md new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/docs/JS_PROMPT_PARITY_RECOMMENDATIONS.md @@ -0,0 +1 @@ + diff --git a/docs/codeflash-concepts/how-codeflash-works.mdx b/docs/codeflash-concepts/how-codeflash-works.mdx index 4456cd3d0..b9ab9a060 100644 --- a/docs/codeflash-concepts/how-codeflash-works.mdx +++ b/docs/codeflash-concepts/how-codeflash-works.mdx @@ -3,25 +3,31 @@ title: "How Codeflash Works" description: "Understand Codeflash's generate-and-verify approach to code optimization and correctness verification" icon: "gear" sidebarTitle: "How It Works" -keywords: ["architecture", "verification", "correctness", "testing", "optimization", "LLM", "benchmarking"] +keywords: ["architecture", "verification", "correctness", "testing", "optimization", "LLM", "benchmarking", "javascript", "typescript", "python"] --- # How Codeflash Works Codeflash follows a "generate and verify" approach to optimize code. It uses LLMs to generate optimizations, then it rigorously verifies if those optimizations are indeed faster and if they have the same behavior. The basic unit of optimization is a function—Codeflash tries to speed up the function, and tries to ensure that it still behaves the same way. This way if you merge the optimized code, it simply runs faster without breaking any functionality. +Codeflash supports **Python**, **JavaScript**, and **TypeScript** projects. + ## Analysis of your code Codeflash scans your codebase to identify all available functions. It locates existing unit tests in your projects and maps which functions they test. When optimizing a function, Codeflash runs these discovered tests to verify nothing has broken. +For Python, code analysis uses `libcst` and `jedi`. For JavaScript/TypeScript, it uses `tree-sitter` for AST parsing. + #### What kind of functions can Codeflash optimize? Codeflash works best with self-contained functions that have minimal side effects (like communicating with external systems or sending network requests). Codeflash optimizes a group of functions - consisting of an entry point function and any other functions it directly calls. -Codeflash supports optimizing async functions. +Codeflash supports optimizing async functions in all supported languages. #### Test Discovery -Codeflash currently only runs tests that directly call the target function in their test body. To discover tests that indirectly call the function, you can use the Codeflash Tracer. The Tracer analyzes your test suite and identifies all tests that eventually call a function. +Codeflash discovers tests that directly call the target function in their test body. For Python, it finds pytest and unittest tests. For JavaScript/TypeScript, it finds Jest and Vitest test files. + +To discover tests that indirectly call the function, you can use the Codeflash Tracer. The Tracer analyzes your test suite and identifies all tests that eventually call a function. ## Optimization Generation @@ -48,12 +54,12 @@ We recommend manually reviewing the optimized code since there might be importan Codeflash generates two types of tests: -- LLM Generated tests - Codeflash uses LLMs to create several regression test cases that cover typical function usage, edge cases, and large-scale inputs to verify both correctness and performance. -- Concolic coverage tests - Codeflash uses state-of-the-art concolic testing with an SMT Solver (a theorem prover) to explore execution paths and generate function arguments. This aims to maximize code coverage for the function being optimized. Codeflash runs the resulting test file to verify correctness. Currently, this feature only supports pytest. +- **LLM Generated tests** - Codeflash uses LLMs to create several regression test cases that cover typical function usage, edge cases, and large-scale inputs to verify both correctness and performance. This works for Python, JavaScript, and TypeScript. +- **Concolic coverage tests** - Codeflash uses state-of-the-art concolic testing with an SMT Solver (a theorem prover) to explore execution paths and generate function arguments. This aims to maximize code coverage for the function being optimized. Currently, this feature only supports Python (pytest). ## Code Execution -Codeflash runs tests for the target function using either pytest or unittest frameworks. The tests execute on your machine, ensuring access to the Python environment and any other dependencies associated to let Codeflash run your code properly. Running on your machine also ensures accurate performance measurements since runtime varies by system. +Codeflash runs tests for the target function on your machine. For Python, it uses pytest or unittest. For JavaScript/TypeScript, it uses Jest or Vitest. Running on your machine ensures access to your environment and dependencies, and provides accurate performance measurements since runtime varies by system. #### Performance benchmarking diff --git a/docs/configuration.mdx b/docs/configuration.mdx index 3f388a531..29506b952 100644 --- a/docs/configuration.mdx +++ b/docs/configuration.mdx @@ -1,83 +1,26 @@ --- title: "Manual Configuration" -description: "Configure Codeflash for your project with pyproject.toml settings and advanced options" +description: "Configure Codeflash for your project" icon: "gear" sidebarTitle: "Manual Configuration" keywords: [ "configuration", - "pyproject.toml", "setup", "settings", - "pytest", - "formatter", ] --- # Manual Configuration Codeflash is installed and configured on a per-project basis. -`codeflash init` should guide you through the configuration process, but if you need to manually configure Codeflash or set advanced settings, you can do so by editing the `pyproject.toml` file in the root directory of your project. - -## Configuration Options - -Codeflash config looks like the following - -```toml -[tool.codeflash] -module-root = "my_module" -tests-root = "tests" -formatter-cmds = ["black $file"] -# optional configuration -benchmarks-root = "tests/benchmarks" # Required when running with --benchmark -ignore-paths = ["my_module/build/"] -pytest-cmd = "pytest" -disable-imports-sorting = false -disable-telemetry = false -git-remote = "origin" -override-fixtures = false -``` - -All file paths are relative to the directory of the `pyproject.toml` file. - -Required Options: - -- `module-root`: The Python module you want Codeflash to optimize going forward. Only code under this directory will be optimized. It should also have an `__init__.py` file to make the module importable. -- `tests-root`: The directory where your tests are located. Codeflash will use this directory to discover existing tests as well as generate new tests. - -Optional Configuration: - -- `benchmarks-root`: The directory where your benchmarks are located. Codeflash will use this directory to discover existing benchmarks. Note that this option is required when running with `--benchmark`. -- `ignore-paths`: A list of paths within the `module-root` to ignore when optimizing code. Codeflash will not optimize code in these paths. Useful for ignoring build directories or other generated code. You can also leave this empty if not needed. -- `pytest-cmd`: The command to run your tests. Defaults to `pytest`. You can specify extra commandline arguments here for pytest. -- `formatter-cmds`: The command line to run your code formatter or linter. Defaults to `["black $file"]`. In the command line `$file` refers to the current file being optimized. The assumption with using tools here is that they overwrite the same file and returns a zero exit code. You can also specify multiple tools here that run in a chain as a toml array. You can also disable code formatting by setting this to `["disabled"]`. - - `ruff` - A recommended way to run ruff linting and formatting is `["ruff check --exit-zero --fix $file", "ruff format $file"]`. To make `ruff check --fix` return a 0 exit code please add a `--exit-zero` argument. -- `disable-imports-sorting`: By default, codeflash uses isort to organize your imports before creating suggestions. You can disable this by setting this field to `true`. This could be useful if you don't sort your imports or while using linters like ruff that sort imports too. -- `disable-telemetry`: Disable telemetry data collection. Defaults to `false`. Set this to `true` to disable telemetry data collection. Codeflash collects anonymized telemetry data to understand how users are using Codeflash and to improve the product. Telemetry does not collect any code data. -- `git-remote`: The git remote to use for pull requests. Defaults to `"origin"`. -- `override-fixtures`: Override pytest fixtures during optimization. Defaults to `false`. - -## Example Configuration - -Here's an example project with the following structure: - -```text -acme-project/ -|- foo_module/ -| |- __init__.py -| |- foo.py -| |- main.py -|- tests/ -| |- __init__.py -| |- test_script.py -|- pyproject.toml -``` - -Here's a sample `pyproject.toml` file for the above project: - -```toml -[tool.codeflash] -module-root = "foo_module" -tests-root = "tests" -ignore-paths = [] -``` +`codeflash init` should guide you through the configuration process, but if you need to manually configure Codeflash or set advanced settings, follow the guide for your language: + + + + Configure via `pyproject.toml` + + + Configure via `package.json` + + \ No newline at end of file diff --git a/docs/configuration/javascript.mdx b/docs/configuration/javascript.mdx new file mode 100644 index 000000000..1195d692d --- /dev/null +++ b/docs/configuration/javascript.mdx @@ -0,0 +1,220 @@ +--- +title: "JavaScript / TypeScript Configuration" +description: "Configure Codeflash for JavaScript and TypeScript projects using package.json" +icon: "js" +sidebarTitle: "JavaScript / TypeScript" +keywords: + [ + "configuration", + "package.json", + "javascript", + "typescript", + "jest", + "vitest", + "prettier", + "eslint", + "monorepo", + ] +--- + +# JavaScript / TypeScript Configuration + +Codeflash stores its configuration in `package.json` under the `"codeflash"` key. + +## Full Reference + +```json +{ + "name": "my-project", + "codeflash": { + "moduleRoot": "src", + "testsRoot": "tests", + "testRunner": "jest", + "formatterCmds": ["prettier --write $file"], + "ignorePaths": ["src/generated/"], + "disableTelemetry": false, + "gitRemote": "origin" + } +} +``` + +All file paths are relative to the directory containing `package.json`. + + +Codeflash auto-detects most settings from your project structure. Running `codeflash init` will set up the correct config — manual configuration is usually not needed. + + +## Auto-Detection + +When you run `codeflash init`, Codeflash inspects your project and auto-detects: + +| Setting | Detection logic | +|---------|----------------| +| `moduleRoot` | Looks for `src/`, `lib/`, or the main source directory | +| `testsRoot` | Looks for `tests/`, `test/`, `__tests__/`, or files matching `*.test.js` / `*.spec.js` | +| `testRunner` | Checks `devDependencies` for `jest` or `vitest` | +| `formatterCmds` | Checks for `prettier`, `eslint`, or `biome` in dependencies and config files | +| Module system | Reads `"type"` field in `package.json` (ESM vs CommonJS) | +| TypeScript | Detects `tsconfig.json` | + +You can always override any auto-detected value in the `"codeflash"` section. + +## Required Options + +- `moduleRoot`: The source directory to optimize. Only code under this directory will be optimized. +- `testsRoot`: The directory where your tests are located. Codeflash discovers existing tests and generates new ones here. + +## Optional Options + +- `testRunner`: Test framework to use. Auto-detected from your dependencies. Supported values: `"jest"`, `"vitest"`. +- `formatterCmds`: Formatter commands. `$file` refers to the file being optimized. Disable with `["disabled"]`. + - **Prettier**: `["prettier --write $file"]` + - **ESLint + Prettier**: `["eslint --fix $file", "prettier --write $file"]` + - **Biome**: `["biome check --write $file"]` +- `ignorePaths`: Paths within `moduleRoot` to skip during optimization. +- `disableTelemetry`: Disable anonymized telemetry. Defaults to `false`. +- `gitRemote`: Git remote for pull requests. Defaults to `"origin"`. + +## Module Systems + +Codeflash handles both ES Modules and CommonJS automatically. It detects the module system from your `package.json`: + +```json +{ + "type": "module" +} +``` + +- `"type": "module"` — Files are treated as ESM (`import`/`export`) +- `"type": "commonjs"` or omitted — Files are treated as CommonJS (`require`/`module.exports`) + +No additional configuration is needed. Codeflash respects `.mjs`/`.cjs` extensions as well. + +## TypeScript + +TypeScript projects work out of the box. Codeflash detects TypeScript from the presence of `tsconfig.json` and handles `.ts`/`.tsx` files automatically. + +No separate configuration is needed for TypeScript vs JavaScript. + +## Test Framework Support + +| Framework | Auto-detected from | Notes | +|-----------|-------------------|-------| +| **Jest** | `jest` in dependencies | Default for most projects | +| **Vitest** | `vitest` in dependencies | ESM-native support | + + +**Functions must be exported** to be optimizable. Codeflash uses tree-sitter AST analysis to discover functions and check export status. Supported export patterns: + +- `export function foo() {}` +- `export const foo = () => {}` +- `export default function foo() {}` +- `const foo = () => {}; export { foo };` +- `module.exports = { foo }` +- `const utils = { foo() {} }; module.exports = utils;` + + +## Monorepo Configuration + +For monorepo projects (Yarn workspaces, pnpm workspaces, Lerna, Nx, Turborepo), configure each package individually: + +```text +my-monorepo/ +|- packages/ +| |- core/ +| | |- src/ +| | |- tests/ +| | |- package.json <-- "codeflash" config here +| |- utils/ +| | |- src/ +| | |- __tests__/ +| | |- package.json <-- "codeflash" config here +|- package.json <-- workspace root (no codeflash config) +``` + +Run `codeflash init` from within each package: + +```bash +cd packages/core +npx codeflash init +``` + + +**Always run codeflash from the package directory**, not the monorepo root. Codeflash needs to find the `package.json` with the `"codeflash"` config in the current working directory. + + +### Hoisted dependencies + +If your monorepo hoists `node_modules` to the root (Yarn Berry with `nodeLinker: node-modules`, pnpm with `shamefully-hoist`), Codeflash resolves modules using Node.js standard resolution. This works automatically. + +For **pnpm strict mode** (non-hoisted), ensure `codeflash` is a direct dependency of the package: + +```bash +pnpm add --filter @my-org/core --save-dev codeflash +``` + +## Example + +### Standard project + +```text +my-app/ +|- src/ +| |- utils.js +| |- index.js +|- tests/ +| |- utils.test.js +|- package.json +``` + +```json +{ + "name": "my-app", + "codeflash": { + "moduleRoot": "src", + "testsRoot": "tests" + } +} +``` + +### Project with co-located tests + +```text +my-app/ +|- src/ +| |- utils.js +| |- utils.test.js +| |- index.js +|- package.json +``` + +```json +{ + "name": "my-app", + "codeflash": { + "moduleRoot": "src", + "testsRoot": "src" + } +} +``` + +### CommonJS library with no separate test directory + +```text +my-lib/ +|- lib/ +| |- helpers.js +|- test/ +| |- helpers.spec.js +|- package.json +``` + +```json +{ + "name": "my-lib", + "codeflash": { + "moduleRoot": "lib", + "testsRoot": "test" + } +} +``` diff --git a/docs/configuration/python.mdx b/docs/configuration/python.mdx new file mode 100644 index 000000000..765a7ac82 --- /dev/null +++ b/docs/configuration/python.mdx @@ -0,0 +1,80 @@ +--- +title: "Python Configuration" +description: "Configure Codeflash for Python projects using pyproject.toml" +icon: "python" +sidebarTitle: "Python" +keywords: + [ + "configuration", + "pyproject.toml", + "python", + "pytest", + "formatter", + "ruff", + "black", + ] +--- + +# Python Configuration + +Codeflash stores its configuration in `pyproject.toml` under the `[tool.codeflash]` section. + +## Full Reference + +```toml +[tool.codeflash] +# Required +module-root = "my_module" +tests-root = "tests" + +# Optional +formatter-cmds = ["black $file"] +benchmarks-root = "tests/benchmarks" +ignore-paths = ["my_module/build/"] +pytest-cmd = "pytest" +disable-imports-sorting = false +disable-telemetry = false +git-remote = "origin" +override-fixtures = false +``` + +All file paths are relative to the directory of the `pyproject.toml` file. + +## Required Options + +- `module-root`: The Python module to optimize. Only code under this directory will be optimized. It should have an `__init__.py` file to make the module importable. +- `tests-root`: The directory where your tests are located. Codeflash discovers existing tests and generates new ones here. + +## Optional Options + +- `benchmarks-root`: Directory for benchmarks. Required when running with `--benchmark`. +- `ignore-paths`: Paths within `module-root` to skip. Useful for build directories or generated code. +- `pytest-cmd`: Command to run your tests. Defaults to `pytest`. You can add extra arguments here. +- `formatter-cmds`: Formatter/linter commands. `$file` refers to the file being optimized. Disable with `["disabled"]`. + - **ruff** (recommended): `["ruff check --exit-zero --fix $file", "ruff format $file"]` + - **black**: `["black $file"]` +- `disable-imports-sorting`: Disable isort import sorting. Defaults to `false`. +- `disable-telemetry`: Disable anonymized telemetry. Defaults to `false`. +- `git-remote`: Git remote for pull requests. Defaults to `"origin"`. +- `override-fixtures`: Override pytest fixtures during optimization. Defaults to `false`. + +## Example + +```text +acme-project/ +|- foo_module/ +| |- __init__.py +| |- foo.py +| |- main.py +|- tests/ +| |- __init__.py +| |- test_script.py +|- pyproject.toml +``` + +```toml +[tool.codeflash] +module-root = "foo_module" +tests-root = "tests" +ignore-paths = [] +``` \ No newline at end of file diff --git a/docs/getting-started/javascript-installation.mdx b/docs/getting-started/javascript-installation.mdx index abaa2d43d..a19d6cca6 100644 --- a/docs/getting-started/javascript-installation.mdx +++ b/docs/getting-started/javascript-installation.mdx @@ -1,38 +1,51 @@ --- -title: "JavaScript Installation" +title: "JavaScript / TypeScript Installation" description: "Install and configure Codeflash for your JavaScript/TypeScript project" icon: "node-js" +keywords: + [ + "installation", + "javascript", + "typescript", + "npm", + "yarn", + "pnpm", + "bun", + "jest", + "vitest", + "monorepo", + ] --- -Codeflash now supports JavaScript and TypeScript projects with optimized test data serialization using V8 native serialization. +Codeflash supports JavaScript and TypeScript projects. It uses V8 native serialization for test data capture and works with Jest and Vitest test frameworks. ### Prerequisites -Before installing Codeflash for JavaScript, ensure you have: +Before installing Codeflash, ensure you have: -1. **Node.js 16 or above** installed +1. **Node.js 18 or above** installed 2. **A JavaScript/TypeScript project** with a package manager (npm, yarn, pnpm, or bun) 3. **Project dependencies installed** Good to have (optional): -1. **Unit Tests** that Codeflash uses to ensure correctness of the optimizations +1. **Unit tests** (Jest or Vitest) — Codeflash uses them to verify correctness of optimizations -**Node.js Runtime Required** +**Node.js 18+ Required** -Codeflash JavaScript support uses V8 serialization API, which is available natively in Node.js. Make sure you're running on Node.js 16+ for optimal compatibility. +Codeflash requires Node.js 18 or above. Check your version: ```bash -node --version # Should show v16.0.0 or higher +node --version # Should show v18.0.0 or higher ``` - + -Install Codeflash globally or as a development dependency in your project: +Install Codeflash as a development dependency in your project: ```bash npm @@ -50,321 +63,285 @@ pnpm add --save-dev codeflash ```bash bun bun add --dev codeflash ``` - -```bash global -npm install -g codeflash -``` -**Development Dependency Recommended** - -Codeflash is intended for development and CI workflows. Installing as a dev dependency keeps your production bundle clean. - +**Dev dependency recommended** — Codeflash is for development and CI workflows. Installing as a dev dependency keeps your production bundle clean. - - -Navigate to your project's root directory (where your `package.json` file is) and run: + +**Codeflash also requires a Python installation** (3.9+) to run the CLI optimizer. Install the Python CLI globally: ```bash -codeflash init +pip install codeflash +# or +uv tool install codeflash ``` -When running `codeflash init`, you will see the following prompts: +The Python CLI orchestrates the optimization pipeline, while the npm package provides the JavaScript runtime (test runners, serialization, reporters). + + -```text -1. Enter your Codeflash API key (or login with Codeflash) -2. Which JavaScript/TypeScript module do you want me to optimize? (e.g. src/) -3. Where are your tests located? (e.g. tests/, __tests__/, *.test.js) -4. Which test framework do you use? (jest/vitest/mocha/ava/other) -5. Which code formatter do you use? (prettier/eslint/biome/disabled) -6. Which git remote should Codeflash use for Pull Requests? (if multiple remotes exist) -7. Help us improve Codeflash by sharing anonymous usage data? -8. Install the GitHub app -9. Install GitHub actions for Continuous optimization? -``` + +Codeflash uses cloud-hosted AI models. You need an API key: -After you have answered these questions, the Codeflash configuration will be saved in a `codeflash.config.js` file. +1. Visit the [Codeflash Web App](https://app.codeflash.ai/) +2. Sign up with your GitHub account (free tier available) +3. Navigate to the [API Key](https://app.codeflash.ai/app/apikeys) page to generate your key - -**Test Data Serialization Strategy** +Set it as an environment variable: -Codeflash uses **V8 serialization** for JavaScript test data capture. This provides: -- ⚡ **Best performance**: 2-3x faster than alternatives -- 🎯 **Perfect type preservation**: Maintains Date, Map, Set, TypedArrays, and more -- 📦 **Compact binary storage**: Smallest file sizes -- 🔄 **Framework agnostic**: Works with React, Vue, Angular, Svelte, and vanilla JS +```bash +export CODEFLASH_API_KEY="your-api-key-here" +``` - +Or add it to your shell profile (`~/.bashrc`, `~/.zshrc`) for persistence. - -Codeflash uses cloud-hosted AI models and integrations with GitHub. If you haven't created one already, you'll need to create an API key to authorize your access. + +Navigate to your project root (where `package.json` is) and run: -1. Visit the [Codeflash Web App](https://app.codeflash.ai/) -2. Sign up with your GitHub account (free) -3. Navigate to the [API Key](https://app.codeflash.ai/app/apikeys) page to generate your API key + +```bash npm / yarn / pnpm +npx codeflash init +``` + +```bash bun +bunx codeflash init +``` + +```bash Global install +codeflash init +``` + - -**Free Tier Available** +### What `codeflash init` does + +Codeflash **auto-detects** most settings from your project: + +| Setting | How it's detected | +|---------|------------------| +| **Module root** | Looks for `src/`, `lib/`, or the directory containing your source files | +| **Tests root** | Looks for `tests/`, `test/`, `__tests__/`, or files matching `*.test.js` / `*.spec.js` | +| **Test framework** | Checks `devDependencies` for `jest` or `vitest` | +| **Formatter** | Checks for `prettier`, `eslint`, or `biome` in dependencies and config files | +| **Module system** | Reads `"type"` field in `package.json` (ESM vs CommonJS) | +| **TypeScript** | Detects `tsconfig.json` presence | + +You'll be prompted to confirm or override the detected values. The configuration is saved in your `package.json` under the `"codeflash"` key: + +```json +{ + "name": "my-project", + "codeflash": { + "moduleRoot": "src", + "testsRoot": "tests" + } +} +``` -Codeflash offers a **free tier** with a limited number of optimizations. Perfect for trying it out on small projects! + +**No separate config file needed.** Codeflash stores all configuration inside your existing `package.json`, not in a separate config file. + - - + -Finally, if you have not done so already, Codeflash will ask you to install the GitHub App in your repository. -The Codeflash GitHub App allows the codeflash-ai bot to open PRs, review code, and provide optimization suggestions. +To receive optimization PRs automatically, install the Codeflash GitHub App: -Please [install the Codeflash GitHub -app](https://github.com/apps/codeflash-ai/installations/select_target) by choosing the repository you want to install -Codeflash on. +[Install Codeflash GitHub App](https://github.com/apps/codeflash-ai/installations/select_target) + +This enables the codeflash-ai bot to open PRs with optimization suggestions. If you skip this step, you can still optimize locally using `--no-pr`. -## Framework Support - -Codeflash JavaScript support works seamlessly with all major frameworks and testing libraries: - - - - - React - - Vue.js - - Angular - - Svelte - - Solid.js - - - - - Jest - - Vitest - - Mocha - - AVA - - Playwright - - Cypress - - - - - Express - - NestJS - - Fastify - - Koa - - Hono - - - - - Node.js ✅ (Recommended) - - Bun (Coming soon) - - Deno (Coming soon) - - - -## Understanding V8 Serialization - -Codeflash uses Node.js's native V8 serialization API to capture and compare test data. Here's what makes it powerful: - -### Type Preservation - -Unlike JSON serialization, V8 serialization preserves JavaScript-specific types: - -```javascript -// These types are preserved perfectly: -const testData = { - date: new Date(), // ✅ Date objects - map: new Map([['key', 'value']]), // ✅ Map instances - set: new Set([1, 2, 3]), // ✅ Set instances - buffer: Buffer.from('hello'), // ✅ Buffers - typed: new Uint8Array([1, 2, 3]), // ✅ TypedArrays - bigint: 9007199254740991n, // ✅ BigInt - regex: /pattern/gi, // ✅ RegExp - undef: undefined, // ✅ undefined (not null!) - circular: {} // ✅ Circular references -}; -testData.circular.self = testData.circular; -``` +## Monorepo Setup - -**Why Not JSON?** +For monorepos (Yarn workspaces, pnpm workspaces, Lerna, Nx, Turborepo), run `codeflash init` from within each package you want to optimize: -JSON serialization would cause bugs to slip through: -- `Date` becomes string → date arithmetic fails silently -- `Map` becomes `{}` → `.get()` calls return undefined -- `undefined` becomes `null` → type checks break -- TypedArrays become plain objects → binary operations fail +```bash +# Navigate to the specific package +cd packages/my-library -V8 serialization catches these issues during optimization verification. - +# Run init from the package directory +npx codeflash init +``` -## Try It Out! +Each package gets its own `"codeflash"` section in its `package.json`. The `moduleRoot` and `testsRoot` paths are relative to that package's `package.json`. - - -Once configured, you can start optimizing your JavaScript/TypeScript code immediately: +### Example: Yarn workspaces monorepo -```bash -# Optimize a specific function -codeflash --file path/to/your/file.js --function functionName +```text +my-monorepo/ +|- packages/ +| |- core/ +| | |- src/ +| | |- tests/ +| | |- package.json <-- codeflash config here +| |- utils/ +| | |- src/ +| | |- __tests__/ +| | |- package.json <-- codeflash config here +|- package.json <-- root workspace (no codeflash config needed) +``` -# Or optimize all functions in your codebase -codeflash --all +```json +// packages/core/package.json +{ + "name": "@my-org/core", + "codeflash": { + "moduleRoot": "src", + "testsRoot": "tests" + } +} ``` - + +**Run codeflash from the package directory**, not the monorepo root. Codeflash needs to find the `package.json` with the `"codeflash"` config in the current working directory. + - -Codeflash fully supports TypeScript projects: + +**Hoisted dependencies work fine.** If your monorepo hoists `node_modules` to the root (common in Yarn Berry, pnpm with `shamefully-hoist`), Codeflash resolves modules using Node.js standard resolution and will find them correctly. + -```bash -# Optimize TypeScript files directly -codeflash --file src/utils.ts --function processData +## Test Framework Support -# Works with TSX for React components -codeflash --file src/components/DataTable.tsx --function DataTable -``` +| Framework | Status | Auto-detected from | +|-----------|--------|-------------------| +| **Jest** | Supported | `jest` in dependencies | +| **Vitest** | Supported | `vitest` in dependencies | +| **Mocha** | Coming soon | — | -Codeflash preserves TypeScript types during optimization. Your type annotations and interfaces remain intact. +**Functions must be exported** to be optimizable. Codeflash can only discover and optimize functions that are exported from their module (via `export`, `export default`, or `module.exports`). - - - +## Try It Out - -```javascript -// sum.test.js -test('adds 1 + 2 to equal 3', () => { - expect(sum(1, 2)).toBe(3); -}); +Once configured, optimize your code: -// Optimize the sum function -codeflash --file sum.js --function sum + +```bash Optimize a function +codeflash --file src/utils.js --function processData ``` - - - -```javascript -// calculator.test.js -import { describe, it, expect } from 'vitest'; -describe('calculator', () => { - it('should multiply correctly', () => { - expect(multiply(2, 3)).toBe(6); - }); -}); +```bash Optimize locally (no PR) +codeflash --file src/utils.ts --function processData --no-pr +``` -// Optimize the multiply function -codeflash --file calculator.js --function multiply +```bash Optimize entire codebase +codeflash --all ``` - - - +```bash Trace and optimize +codeflash optimize --jest +``` + ## Troubleshooting - - Make sure: - - ✅ All project dependencies are installed - - ✅ Your `node_modules` directory exists + + Codeflash only optimizes **exported** functions. Make sure your function is exported: - ```bash - # Reinstall dependencies - npm install - # or - yarn install + ```javascript + // ES Modules + export function processData(data) { ... } + // or + const processData = (data) => { ... }; + export { processData }; + + // CommonJS + function processData(data) { ... } + module.exports = { processData }; ``` + + If codeflash reports the function exists but is not exported, add an export statement. - - If you encounter serialization errors: + + Ensure the codeflash npm package is installed in your project: - **Functions and classes** cannot be serialized: - ```javascript - // ❌ Won't work - contains function - const data = { callback: () => {} }; - - // ✅ Works - pure data - const data = { value: 42, items: [1, 2, 3] }; + + ```bash npm + npm install --save-dev codeflash ``` - - **Symbols** are not serializable: - ```javascript - // ❌ Won't work - const data = { [Symbol('key')]: 'value' }; - - // ✅ Use string keys - const data = { key: 'value' }; + ```bash yarn + yarn add --dev codeflash + ``` + ```bash pnpm + pnpm add --save-dev codeflash ``` + + + For **monorepos**, make sure it's installed in the package you're optimizing, or at the workspace root if dependencies are hoisted. - - Not all functions can be optimized - some code is already optimal. This is expected. + + Codeflash auto-detects the test framework from your `devDependencies`. If detection fails: + + 1. Verify your test framework is in `devDependencies`: + ```bash + npm ls jest # or: npm ls vitest + ``` + 2. Or set it manually in `package.json`: + ```json + { + "codeflash": { + "testRunner": "jest" + } + } + ``` + - Use the `--verbose` flag for detailed output: - ```bash - codeflash optimize --verbose - ``` + + If Jest tests take too long, Codeflash has a default timeout. For large test suites: - This will show: - - 🔍 Which functions are being analyzed - - 🚫 Why certain functions were skipped - - ⚠️ Detailed error messages - - 📊 Performance analysis results + - Use `--file` and `--function` to target specific functions instead of `--all` + - Ensure your tests don't have expensive setup/teardown that runs for every test file + - Check if `jest.config.js` has a `setupFiles` that takes a long time - - Verify: - - 📁 Your test directory path is correct in `codeflash.config.js` - - 🔍 Tests are discoverable by your test framework - - 📝 Test files follow naming conventions (`*.test.js`, `*.spec.js`) + + Codeflash uses your project's TypeScript configuration. If you see TS errors: - ```bash - # Test if your test framework can discover tests - npm test -- --listTests # Jest - # or - npx vitest list # Vitest - ``` + 1. Verify `npx tsc --noEmit` passes on its own + 2. Check that `tsconfig.json` is in the project root or the module root + 3. For projects using `moduleResolution: "bundler"`, Codeflash creates a temporary tsconfig overlay — this is expected behavior - - -## Configuration -Your `codeflash.config.js` file controls how Codeflash analyzes your JavaScript project: + + Run codeflash from the correct package directory: -```javascript -module.exports = { - // Source code to optimize - module: 'src', + ```bash + cd packages/my-library + codeflash --file src/utils.ts --function myFunc + ``` - // Test location - tests: 'tests', + If your monorepo tool hoists dependencies, you may need to ensure the `codeflash` npm package is accessible from the package directory. For pnpm, add `.npmrc` with `shamefully-hoist=true` or use `pnpm add --filter my-library --save-dev codeflash`. + - // Test framework - testFramework: 'jest', + + Not all functions can be optimized — some code is already efficient. This is normal. - // Serialization strategy (automatically set to 'v8') - serialization: 'v8', + For better results: + - Target functions with loops, string manipulation, or data transformations + - Ensure the function has existing tests for correctness verification + - Use `codeflash optimize --jest` to trace real execution and capture realistic inputs + + - // Formatter - formatter: 'prettier', +## Configuration Reference - // Additional options - exclude: ['node_modules', 'dist', 'build'], - verbose: false -}; -``` +See [JavaScript / TypeScript Configuration](/configuration/javascript) for the full list of options. ### Next Steps -- Learn about [Codeflash Concepts](/codeflash-concepts/how-codeflash-works) -- Explore [Optimization workflows](/optimizing-with-codeflash/one-function) +- Learn [how Codeflash works](/codeflash-concepts/how-codeflash-works) +- [Optimize a single function](/optimizing-with-codeflash/one-function) - Set up [Pull Request Optimization](/optimizing-with-codeflash/codeflash-github-actions) -- Read [configuration options](/configuration) for advanced setups \ No newline at end of file +- Explore [Trace and Optimize](/optimizing-with-codeflash/trace-and-optimize) for workflow optimization diff --git a/docs/index.mdx b/docs/index.mdx index 24d0d1561..b94258ed3 100644 --- a/docs/index.mdx +++ b/docs/index.mdx @@ -1,27 +1,38 @@ --- -title: "Codeflash is an AI performance optimizer for Python code" +title: "Codeflash is an AI performance optimizer for your code" icon: "rocket" sidebarTitle: "Overview" -keywords: ["python", "performance", "optimization", "AI", "code analysis", "benchmarking"] +keywords: ["python", "javascript", "typescript", "performance", "optimization", "AI", "code analysis", "benchmarking"] --- -Codeflash speeds up any Python code by figuring out the best way to rewrite it while verifying that the behavior of the code is unchanged, and verifying real speed -gains through performance benchmarking. +Codeflash speeds up your code by figuring out the best way to rewrite it while verifying that the behavior is unchanged, and verifying real speed +gains through performance benchmarking. It supports **Python**, **JavaScript**, and **TypeScript**. The optimizations Codeflash finds are generally better algorithms, opportunities to remove wasteful compute, better logic, utilizing caching and utilization of more efficient library methods. Codeflash does not modify the system architecture of your code, but it tries to find the most efficient implementation of your current architecture. +### Get Started + + + + Install via pip, uv, or poetry + + + Install via npm, yarn, pnpm, or bun + + + ### How to use Codeflash - Target and optimize individual Python functions for maximum performance gains. + Target and optimize individual functions for maximum performance gains. ```bash - codeflash --file path.py --function my_function + codeflash --file path/to/file --function my_function ``` - + Automatically find optimizations for Pull Requests with GitHub Actions integration. ```bash codeflash init-actions @@ -29,7 +40,7 @@ does not modify the system architecture of your code, but it tries to find the m - End-to-end optimization of entire Python workflows with execution tracing. + End-to-end optimization of entire workflows with execution tracing. ```bash codeflash optimize myscript.py ``` @@ -42,7 +53,6 @@ does not modify the system architecture of your code, but it tries to find the m ``` - ### How does Codeflash verify correctness? diff --git a/docs/optimizing-with-codeflash/benchmarking.mdx b/docs/optimizing-with-codeflash/benchmarking.mdx index ade7eb023..f373cce81 100644 --- a/docs/optimizing-with-codeflash/benchmarking.mdx +++ b/docs/optimizing-with-codeflash/benchmarking.mdx @@ -1,6 +1,6 @@ --- title: "Optimize Performance Benchmarks with every Pull Request" -description: "Configure and use pytest-benchmark integration for performance-critical code optimization" +description: "Configure and use benchmark integration for performance-critical code optimization" icon: "chart-line" sidebarTitle: Setup Benchmarks to Optimize keywords: @@ -26,6 +26,10 @@ It will then try to optimize the new code for the benchmark and calculate the im ## Using Codeflash in Benchmark Mode + + Benchmark mode currently supports Python projects using pytest-benchmark. JavaScript/TypeScript benchmark support is coming soon. + + 1. **Create a benchmarks root:** Create a directory for benchmarks if it does not already exist. @@ -44,7 +48,7 @@ It will then try to optimize the new code for the benchmark and calculate the im 2. **Define your benchmarks:** - Currently, Codeflash only supports benchmarks written as pytest-benchmarks. Check out the [pytest-benchmark](https://pytest-benchmark.readthedocs.io/en/stable/index.html) documentation for more information on syntax. + Codeflash supports benchmarks written as pytest-benchmarks. Check out the [pytest-benchmark](https://pytest-benchmark.readthedocs.io/en/stable/index.html) documentation for more information on syntax. For example: @@ -58,7 +62,7 @@ It will then try to optimize the new code for the benchmark and calculate the im Note that these benchmarks should be defined in such a way that they don't take a long time to run. - The pytest-benchmark format is simply used as an interface. The plugin is actually not used - Codeflash will run these benchmarks with its own pytest plugin + The pytest-benchmark format is simply used as an interface. The plugin is actually not used - Codeflash will run these benchmarks with its own pytest plugin. 3. **Run and Test Codeflash:** @@ -74,7 +78,7 @@ It will then try to optimize the new code for the benchmark and calculate the im codeflash --file test_file.py --benchmark --benchmarks-root path/to/benchmarks ``` -4. **Run Codeflash :** +4. **Run Codeflash with GitHub Actions:** Benchmark mode is best used together with Codeflash as a GitHub Action. This way, Codeflash will trace through your benchmark and optimize the functions modified in your Pull Request to speed up the benchmark. diff --git a/docs/optimizing-with-codeflash/codeflash-all.mdx b/docs/optimizing-with-codeflash/codeflash-all.mdx index 92a232e67..7749817c7 100644 --- a/docs/optimizing-with-codeflash/codeflash-all.mdx +++ b/docs/optimizing-with-codeflash/codeflash-all.mdx @@ -3,13 +3,13 @@ title: "Optimize Your Entire Codebase" description: "Automatically optimize all codepaths in your project with Codeflash's comprehensive analysis" icon: "database" sidebarTitle: "Optimize Entire Codebase" -keywords: ["codebase optimization", "all functions", "batch optimization", "github app", "checkpoint", "recovery"] +keywords: ["codebase optimization", "all functions", "batch optimization", "github app", "checkpoint", "recovery", "javascript", "typescript", "python"] --- # Optimize your entire codebase Codeflash can optimize your entire codebase by analyzing all the functions in your project and generating optimized versions of them. -It iterates through all the functions in your codebase and optimizes them one by one. +It iterates through all the functions in your codebase and optimizes them one by one. This works for Python, JavaScript, and TypeScript projects. To optimize your entire codebase, run the following command in your project directory: @@ -30,15 +30,27 @@ codeflash --all path/to/dir ``` - If your project has a good number of unit tests, we can trace those to achieve higher quality results. - The following approach is recommended instead: + If your project has a good number of unit tests, tracing them achieves higher quality results. + + + ```bash codeflash optimize --trace-only -m pytest tests/ ; codeflash --all ``` - This will run your test suite, trace all the code covered by your tests, ensuring higher correctness guarantees - and better performance benchmarking, and help create optimizations for code where the LLMs struggle to generate and run tests. + + + ```bash + codeflash optimize --trace-only --jest ; codeflash --all + # or for Vitest projects + codeflash optimize --trace-only --vitest ; codeflash --all + ``` + + + + This runs your test suite, traces all the code covered by your tests, ensuring higher correctness guarantees + and better performance benchmarking, and helps create optimizations for code where the LLMs struggle to generate and run tests. - Even though `codeflash --all` discovers any existing unit tests. It currently can only discover any test that directly calls the + `codeflash --all` discovers any existing unit tests, but it currently can only discover tests that directly call the function under optimization. Tracing all the tests helps ensure correctness for code that may be indirectly called by your tests. diff --git a/docs/optimizing-with-codeflash/codeflash-github-actions.mdx b/docs/optimizing-with-codeflash/codeflash-github-actions.mdx index b8da4ebac..dc6418104 100644 --- a/docs/optimizing-with-codeflash/codeflash-github-actions.mdx +++ b/docs/optimizing-with-codeflash/codeflash-github-actions.mdx @@ -26,9 +26,9 @@ We highly recommend setting this up, since once you set it up all your new code ✅ A Codeflash API key from the [Codeflash Web App](https://app.codeflash.ai/) -✅ Completed [local installation](/getting-started/local-installation) with `codeflash init` +✅ Completed local installation with `codeflash init` ([Python](/getting-started/local-installation) or [JavaScript/TypeScript](/getting-started/javascript-installation)) -✅ A Python project with a configured `pyproject.toml` file +✅ A configured project (`pyproject.toml` for Python, `package.json` for JavaScript/TypeScript) ## Setup Options @@ -113,7 +113,7 @@ jobs: -Customize the dependency installation based on your Python package manager: +Customize the dependency installation based on your package manager: The workflow will need to be set up in such a way the Codeflash can create and run tests for functionality and speed, so the stock YAML may need to be altered to @@ -121,7 +121,7 @@ suit the specific codebase. Typically the setup steps for a unit test workflow c be copied. -```yaml Poetry +```yaml Poetry (Python) - name: Install Project Dependencies run: | python -m pip install --upgrade pip @@ -129,11 +129,11 @@ be copied. poetry install --with dev - name: Run Codeflash to optimize code run: | - poetry env use python + poetry env use python poetry run codeflash ``` -```yaml uv +```yaml uv (Python) - uses: astral-sh/setup-uv@v6 with: enable-cache: true @@ -142,7 +142,7 @@ be copied. run: uv run codeflash ``` -```yaml pip +```yaml pip (Python) - name: Install Project Dependencies run: | python -m pip install --upgrade pip @@ -151,7 +151,50 @@ be copied. - name: Run Codeflash to optimize code run: codeflash ``` + +```yaml npm (JavaScript/TypeScript) +- uses: actions/setup-node@v4 + with: + node-version: '18' +- name: Install Project Dependencies + run: npm ci +- name: Run Codeflash to optimize code + run: npx codeflash +``` + +```yaml yarn (JavaScript/TypeScript) +- uses: actions/setup-node@v4 + with: + node-version: '18' +- name: Install Project Dependencies + run: yarn install --immutable +- name: Run Codeflash to optimize code + run: yarn codeflash +``` + +```yaml pnpm (JavaScript/TypeScript) +- uses: pnpm/action-setup@v4 + with: + version: 9 +- uses: actions/setup-node@v4 + with: + node-version: '18' + cache: 'pnpm' +- name: Install Project Dependencies + run: pnpm install --frozen-lockfile +- name: Run Codeflash to optimize code + run: pnpm codeflash +``` + + +**Monorepo?** If your codeflash config is in a subdirectory, add `working-directory` to the steps: +```yaml +- name: Run Codeflash to optimize code + run: npx codeflash + working-directory: packages/my-library +``` + diff --git a/docs/optimizing-with-codeflash/one-function.mdx b/docs/optimizing-with-codeflash/one-function.mdx index 8f60db0a7..194531198 100644 --- a/docs/optimizing-with-codeflash/one-function.mdx +++ b/docs/optimizing-with-codeflash/one-function.mdx @@ -1,6 +1,6 @@ --- title: "Optimize a Single Function" -description: "Target and optimize individual Python functions for maximum performance gains" +description: "Target and optimize individual functions for maximum performance gains" icon: "bullseye" sidebarTitle: "Optimize Single Function" keywords: @@ -10,6 +10,9 @@ keywords: "class methods", "performance", "targeted optimization", + "javascript", + "typescript", + "python", ] --- @@ -24,23 +27,55 @@ your mileage may vary. ## How to optimize a function -To optimize a function, you can run the following command in your project: +To optimize a function, run the following command in your project: + + ```bash codeflash --file path/to/your/file.py --function function_name ``` + + +```bash +codeflash --file path/to/your/file.js --function functionName +``` + + +```bash +codeflash --file path/to/your/file.ts --function functionName +``` + + If you have installed the GitHub App to your repository, the above command will open a pull request with the optimized function. -If you want to optimize a function locally, you can add a `--no-pr` argument as follows: +If you want to optimize a function locally, add a `--no-pr` argument: + + ```bash codeflash --file path/to/your/file.py --function function_name --no-pr ``` + + +```bash +codeflash --file path/to/your/file.ts --function functionName --no-pr +``` + + ### Optimizing class methods -To optimize a method `method_name` in a class `ClassName`, you can run the following command: +To optimize a method `methodName` in a class `ClassName`: + + ```bash codeflash --file path/to/your/file.py --function ClassName.method_name ``` + + +```bash +codeflash --file path/to/your/file.ts --function ClassName.methodName +``` + + diff --git a/docs/optimizing-with-codeflash/trace-and-optimize.mdx b/docs/optimizing-with-codeflash/trace-and-optimize.mdx index 3f4e23465..fb62ea1c1 100644 --- a/docs/optimizing-with-codeflash/trace-and-optimize.mdx +++ b/docs/optimizing-with-codeflash/trace-and-optimize.mdx @@ -1,6 +1,6 @@ --- title: "Trace & Optimize E2E Workflows" -description: "End-to-end optimization of entire Python workflows with execution tracing" +description: "End-to-end optimization of entire workflows with execution tracing" icon: "route" sidebarTitle: "Optimize E2E Workflows" keywords: @@ -11,28 +11,50 @@ keywords: "end-to-end", "script optimization", "context manager", + "javascript", + "typescript", + "jest", + "vitest", ] --- -Codeflash can optimize an entire Python script end-to-end by tracing the script's execution and generating Replay Tests. -Tracing follows the execution of a script, profiles it and captures inputs to all functions it called, allowing them to be replayed during optimization. -Codeflash uses these Replay Tests to optimize the most important functions called in the script, delivering the best performance for your workflow. +Codeflash can optimize an entire script or test suite end-to-end by tracing its execution and generating Replay Tests. +Tracing follows the execution of your code, profiles it and captures inputs to all functions it called, allowing them to be replayed during optimization. +Codeflash uses these Replay Tests to optimize the most important functions called in the workflow, delivering the best performance. ![Function Optimization](/images/priority-order.png) -To optimize a script, `python myscript.py`, simply replace `python` with `codeflash optimize` and run the following command: + + +To optimize a script, `python myscript.py`, simply replace `python` with `codeflash optimize`: ```bash codeflash optimize myscript.py ``` -You can also optimize code called by pytest tests that you could normally run like `python -m pytest tests/`, this provides for a good workload to optimize. Run this command: +You can also optimize code called by pytest tests: ```bash codeflash optimize -m pytest tests/ ``` + + +To trace and optimize your Jest or Vitest tests: -The powerful `codeflash optimize` command creates high-quality optimizations, making it ideal when you need to optimize a workflow or script. The initial tracing process can be slow, so try to limit your script's runtime to under 1 minute for best results. If your workflow is longer, consider tracing it into smaller sections by using the Codeflash tracer as a context manager (point 3 below). +```bash +# Jest +codeflash optimize --jest + +# Vitest +codeflash optimize --vitest + +# Or trace a specific script +codeflash optimize --language javascript script.js +``` + + + +The `codeflash optimize` command creates high-quality optimizations, making it ideal when you need to optimize a workflow or script. The initial tracing process can be slow, so try to limit your script's runtime to under 1 minute for best results. The generated replay tests and the trace file are for the immediate optimization use, don't add them to git. @@ -61,6 +83,9 @@ This way you can be _sure_ that the optimized function causes no changes of beha ## Using codeflash optimize + + + Codeflash script optimizer can be used in three ways: 1. **As an integrated command** @@ -100,10 +125,10 @@ Codeflash script optimizer can be used in three ways: - `--timeout`: The maximum time in seconds to trace the entire workflow. Default is indefinite. This is useful while tracing really long workflows. -3. **As a Context Manager -** +3. **As a Context Manager** - To trace only specific sections of your code, You can also use the Codeflash Tracer as a context manager. - You can wrap the code you want to trace in a `with` statement as follows - + To trace only specific sections of your code, you can use the Codeflash Tracer as a context manager. + You can wrap the code you want to trace in a `with` statement as follows: ```python from codeflash.tracer import Tracer @@ -128,3 +153,46 @@ Codeflash script optimizer can be used in three ways: - `output`: The file to save the trace to. Default is `codeflash.trace`. - `config_file_path`: The path to the `pyproject.toml` file which stores the Codeflash config. This is auto-discovered by default. You can also disable the tracer in the code by setting the `disable=True` option in the `Tracer` constructor. + + + + +The JavaScript tracer uses Babel instrumentation to capture function calls during your test suite execution. + +1. **Trace your test suite** + + ```bash + # Jest projects + codeflash optimize --jest + + # Vitest projects + codeflash optimize --vitest + + # Trace a specific script + codeflash optimize --language javascript src/main.js + ``` + +2. **Trace specific functions only** + + ```bash + codeflash optimize --jest --only-functions processData,transformInput + ``` + +3. **Trace and optimize as two separate steps** + + ```bash + # Step 1: Create trace file + codeflash optimize --trace-only --jest --output trace_file.sqlite + + # Step 2: Optimize with replay tests + codeflash --replay-test /path/to/test_replay_test_0.test.js + ``` + + More Options: + + - `--timeout`: Maximum tracing time in seconds. + - `--max-function-count`: Maximum traces per function (default: 256). + - `--only-functions`: Comma-separated list of function names to trace. + + + diff --git a/packages/codeflash/package-lock.json b/packages/codeflash/package-lock.json index aaffaef6a..ea4b361f6 100644 --- a/packages/codeflash/package-lock.json +++ b/packages/codeflash/package-lock.json @@ -1,12 +1,12 @@ { "name": "codeflash", - "version": "0.8.0", + "version": "0.9.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "codeflash", - "version": "0.8.0", + "version": "0.9.0", "hasInstallScript": true, "license": "MIT", "dependencies": { diff --git a/packages/codeflash/package.json b/packages/codeflash/package.json index dfd3abdf1..97c3cee33 100644 --- a/packages/codeflash/package.json +++ b/packages/codeflash/package.json @@ -1,6 +1,6 @@ { "name": "codeflash", - "version": "0.8.0", + "version": "0.10.0", "description": "Codeflash - AI-powered code optimization for JavaScript and TypeScript", "main": "runtime/index.js", "types": "runtime/index.d.ts", @@ -32,6 +32,10 @@ "./loop-runner": { "require": "./runtime/loop-runner.js", "import": "./runtime/loop-runner.js" + }, + "./jest-reporter": { + "require": "./runtime/jest-reporter.js", + "import": "./runtime/jest-reporter.js" } }, "scripts": { diff --git a/packages/codeflash/runtime/jest-reporter.js b/packages/codeflash/runtime/jest-reporter.js new file mode 100644 index 000000000..b4da97f05 --- /dev/null +++ b/packages/codeflash/runtime/jest-reporter.js @@ -0,0 +1,204 @@ +/** + * Codeflash JUnit XML Reporter for Jest. + * + * Minimal reporter that outputs JUnit XML in the format expected by + * codeflash's Python parser. Replaces the external jest-junit dependency. + * + * Configuration via environment variables (same as jest-junit): + * JEST_JUNIT_OUTPUT_FILE — absolute path for the XML file (required) + * JEST_JUNIT_CLASSNAME — template for classname ("{filepath}" supported) + * JEST_JUNIT_SUITE_NAME — template for suite name ("{filepath}" supported) + * JEST_JUNIT_ADD_FILE_ATTRIBUTE — "true" to add file= on + * JEST_JUNIT_INCLUDE_CONSOLE_OUTPUT — "true" to include console.log in + */ + +"use strict"; + +const fs = require("fs"); +const path = require("path"); + +function escapeXml(str) { + if (!str) return ""; + return str + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'"); +} + +function escapeXmlContent(str) { + if (!str) return ""; + return str.replace(/&/g, "&").replace(//g, ">"); +} + +function formatTemplate(template, values) { + if (!template) return ""; + let result = template; + for (const [key, val] of Object.entries(values)) { + result = result.replace(new RegExp(`\\{${key}\\}`, "g"), val || ""); + } + return result; +} + +class CodeflashJestReporter { + constructor(globalConfig, _reporterOptions) { + this._globalConfig = globalConfig; + this._outputFile = process.env.JEST_JUNIT_OUTPUT_FILE || "jest-results.xml"; + this._classnameTemplate = process.env.JEST_JUNIT_CLASSNAME || "{classname}"; + this._suiteNameTemplate = process.env.JEST_JUNIT_SUITE_NAME || "{filepath}"; + this._addFileAttribute = + process.env.JEST_JUNIT_ADD_FILE_ATTRIBUTE === "true"; + this._includeConsoleOutput = + process.env.JEST_JUNIT_INCLUDE_CONSOLE_OUTPUT === "true"; + // Capture buffered console output per test file + this._consoleBuffers = new Map(); + } + + // Called by Jest when a test suite starts — we just note it + onTestStart(_test) {} + + // Called by Jest with console output for a test file + onTestFileResult(_test, testResult, _aggregatedResult) { + if ( + this._includeConsoleOutput && + testResult.console && + testResult.console.length > 0 + ) { + const messages = testResult.console + .map((entry) => { + const prefix = + entry.type === "error" + ? "console.error" + : entry.type === "warn" + ? "console.warn" + : "console.log"; + return `${prefix}\n ${entry.message}`; + }) + .join("\n\n"); + this._consoleBuffers.set(testResult.testFilePath, messages); + } + } + + onRunComplete(_testContexts, results) { + const suites = []; + let totalTests = 0; + let totalFailures = 0; + let totalErrors = 0; + let totalTime = 0; + + for (const suiteResult of results.testResults) { + const filePath = suiteResult.testFilePath || ""; + const relativePath = this._globalConfig.rootDir + ? path.relative(this._globalConfig.rootDir, filePath) + : filePath; + + const templateVars = { + filepath: filePath, + filename: path.basename(filePath), + classname: relativePath.replace(/\//g, ".").replace(/\.[^.]+$/, ""), + title: "", + displayName: suiteResult.displayName || "", + }; + + const suiteName = formatTemplate( + this._suiteNameTemplate, + templateVars + ); + + const testcases = []; + let suiteFailures = 0; + let suiteErrors = 0; + let suiteTime = 0; + + for (const testResult of suiteResult.testResults) { + const duration = (testResult.duration || 0) / 1000; // ms → seconds + suiteTime += duration; + + const tcTemplateVars = { + ...templateVars, + title: testResult.fullName || testResult.title || "", + }; + + const classname = formatTemplate( + this._classnameTemplate, + tcTemplateVars + ); + + let tcXml = ` 0 + ) { + suiteFailures++; + const failureText = testResult.failureMessages.join("\n"); + tcXml += `>\n \n `; + } else if (testResult.status === "pending") { + tcXml += `>\n \n `; + } else { + tcXml += "/>"; + } + + testcases.push(tcXml); + } + + totalTests += suiteResult.testResults.length; + totalFailures += suiteFailures; + totalErrors += suiteErrors; + totalTime += suiteTime; + + // Build suite XML + let suiteXml = ` \n`; + } + } + + suiteXml += " "; + suites.push(suiteXml); + } + + const xml = [ + '', + ``, + ...suites, + "", + ].join("\n"); + + // Ensure output directory exists + const outputDir = path.dirname(this._outputFile); + if (!fs.existsSync(outputDir)) { + fs.mkdirSync(outputDir, { recursive: true }); + } + + fs.writeFileSync(this._outputFile, xml, "utf8"); + } +} + +module.exports = CodeflashJestReporter; diff --git a/pyproject.toml b/pyproject.toml index 367a6353c..020b9d02e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,364 +1,365 @@ -[project] -name = "codeflash" -dynamic = ["version"] -description = "Client for codeflash.ai - automatic code performance optimization, powered by AI" -authors = [{ name = "CodeFlash Inc.", email = "contact@codeflash.ai" }] -requires-python = ">=3.9" -readme = "README.md" -license-files = ["LICENSE"] -keywords = [ - "codeflash", - "performance", - "optimization", - "ai", - "code", - "machine learning", - "LLM", -] -dependencies = [ - "unidiff>=0.7.4", - "pytest>=7.0.0", - "gitpython>=3.1.31", - "libcst>=1.0.1", - "jedi>=0.19.1", - # Tree-sitter for multi-language support - "tree-sitter>=0.23.0", - "tree-sitter-javascript>=0.23.0", - "tree-sitter-typescript>=0.23.0", - "tree-sitter-java>=0.23.0", - "pytest-timeout>=2.1.0", - "tomlkit>=0.11.7", - "junitparser>=3.1.0", - "pydantic>=1.10.1", - "humanize>=4.0.0", - "posthog>=3.0.0", - "click>=8.1.0", - "inquirer>=3.0.0", - "sentry-sdk>=1.40.6,<3.0.0", - "parameterized>=0.9.0", - "isort>=5.11.0", - "dill>=0.3.8", - "rich>=13.8.1", - "lxml>=5.3.0", - "crosshair-tool>=0.0.78; python_version < '3.15'", - "coverage>=7.6.4", - "line_profiler>=4.2.0", - "platformdirs>=4.3.7", - "pygls>=2.0.0,<3.0.0", - "codeflash-benchmark", - "filelock>=3.20.3; python_version >= '3.10'", - "filelock<3.20.3; python_version < '3.10'", - "pytest-asyncio>=0.18.0", -] - -[project.urls] -Homepage = "https://codeflash.ai" - -[project.scripts] -codeflash = "codeflash.main:main" - -[project.optional-dependencies] - -[dependency-groups] -dev = [ - "ipython>=8.12.0", - "mypy>=1.13", - "ruff>=0.7.0", - "lxml-stubs>=0.5.1", - "pandas-stubs>=2.2.2.240807, <2.2.3.241009", - "types-Pygments>=2.18.0.20240506", - "types-colorama>=0.4.15.20240311", - "types-decorator>=5.1.8.20240310", - "types-jsonschema>=4.23.0.20240813", - "types-requests>=2.32.0.20241016", - "types-six>=1.16.21.20241009", - "types-cffi>=1.16.0.20240331", - "types-openpyxl>=3.1.5.20241020", - "types-regex>=2024.9.11.20240912", - "types-python-dateutil>=2.9.0.20241003", - "types-gevent>=24.11.0.20241230,<25", - "types-greenlet>=3.1.0.20241221,<4", - "types-pexpect>=4.9.0.20241208,<5", - "types-unidiff>=0.7.0.20240505,<0.8", - "prek>=0.2.25", - "ty>=0.0.14", - "uv>=0.9.29", -] -tests = [ - "black>=25.9.0", - "jax>=0.4.30", - "numpy>=2.0.2", - "pandas>=2.3.3", - "pyarrow>=15.0.0", - "pyrsistent>=0.20.0", - "scipy>=1.13.1", - "torch>=2.8.0", - "xarray>=2024.7.0", - "eval_type_backport", - "numba>=0.60.0", - "tensorflow>=2.20.0; python_version >= '3.10'", -] - -[tool.hatch.build.targets.sdist] -include = ["codeflash"] -exclude = [ - "docs/*", - "experiments/*", - "tests/*", - "*.pyc", - "__pycache__", - "*.pyo", - "*.pyd", - "*.so", - "*.dylib", - "*.dll", - "*.exe", - "*.log", - "*.tmp", - ".env", - ".env.*", - "**/.env", - "**/.env.*", - ".env.example", - "*.pem", - "*.key", - "secrets.*", - "config.yaml", - "config.json", - ".git", - ".gitignore", - ".gitattributes", - ".github", - "Dockerfile", - "docker-compose.yml", - "*.md", - "*.txt", - "*.csv", - "*.db", - "*.sqlite3", - "*.pdf", - "*.docx", - "*.xlsx", - "*.pptx", - "*.iml", - ".idea", - ".vscode", - ".DS_Store", - "Thumbs.db", - "venv", - "env", -] - -[tool.hatch.build.targets.wheel] -exclude = [ - "docs/*", - "experiments/*", - "tests/*", - "*.pyc", - "__pycache__", - "*.pyo", - "*.pyd", - "*.so", - "*.dylib", - "*.dll", - "*.exe", - "*.log", - "*.tmp", - ".env", - ".env.*", - "**/.env", - "**/.env.*", - ".env.example", - "*.pem", - "*.key", - "secrets.*", - "config.yaml", - "config.json", - ".git", - ".gitignore", - ".gitattributes", - ".github", - "Dockerfile", - "docker-compose.yml", - "*.md", - "*.txt", - "*.csv", - "*.db", - "*.sqlite3", - "*.pdf", - "*.docx", - "*.xlsx", - "*.pptx", - "*.iml", - ".idea", - ".vscode", - ".DS_Store", - "Thumbs.db", - "venv", - "env", -] - -[tool.mypy] -show_error_code_links = true -pretty = true -show_absolute_path = true -show_error_context = true -show_error_end = true -strict = true -warn_unreachable = true -install_types = true -plugins = ["pydantic.mypy"] - -exclude = ["tests/", "code_to_optimize/", "pie_test_set/", "experiments/"] - -[[tool.mypy.overrides]] -module = ["jedi", "jedi.api.classes", "inquirer", "inquirer.themes", "numba"] -ignore_missing_imports = true - -[tool.pydantic-mypy] -init_forbid_extra = true -init_typed = true -warn_required_dynamic_aliases = true - -[tool.ruff] -target-version = "py39" -line-length = 120 -fix = true -show-fixes = true -extend-exclude = ["code_to_optimize/", "pie_test_set/", "tests/", "experiments/"] - -[tool.ruff.lint] -select = ["ALL"] -ignore = [ - "N802", - "C901", - "D100", - "D101", - "D102", - "D103", - "D105", - "D107", - "D203", # incorrect-blank-line-before-class (incompatible with D211) - "D213", # multi-line-summary-second-line (incompatible with D212) - "S101", - "S603", - "S607", - "COM812", - "FIX002", - "PLR0912", - "PLR0913", - "PLR0915", - "TD002", - "TD003", - "TD004", - "PLR2004", - "UP007", # remove once we drop 3.9 support. - "E501", - "BLE001", - "ERA001", - "TRY003", - "EM101", - "T201", - "PGH004", - "S301", - "D104", - "PERF203", - "LOG015", - "PLC0415", - "UP045", - "TD007", - "D417", - "D401", - "S110", # try-except-pass - we do this a lot - "ARG002", # Unused method argument - # Added for multi-language branch - "FBT001", # Boolean positional argument - "FBT002", # Boolean default positional argument - "ANN401", # typing.Any disallowed - "ARG001", # Unused function argument (common in abstract/interface methods) - "TRY300", # Consider moving to else block - "FURB110", # if-exp-instead-of-or-operator - we prefer explicit if-else over "or" - "TRY401", # Redundant exception in logging.exception - "PLR0911", # Too many return statements - "PLW0603", # Global statement - "PLW2901", # Loop variable overwritten - "SIM102", # Nested if statements - "SIM103", # Return negated condition - "ANN001", # Missing type annotation - "PLC0206", # Dictionary items - "S314", # XML parsing (acceptable for dev tool) - "S608", # SQL injection (internal use only) - "S112", # try-except-continue - "PERF401", # List comprehension suggestion - "SIM108", # Ternary operator suggestion - "F841", # Unused variable (often intentional) - "ANN202", # Missing return type for private functions - "B009", # getattr-with-constant - needed to avoid mypy [misc] on dunder access -] - -[tool.ruff.lint.flake8-type-checking] -strict = true -runtime-evaluated-base-classes = ["pydantic.BaseModel"] -runtime-evaluated-decorators = ["pydantic.validate_call", "pydantic.dataclasses.dataclass"] - -[tool.ruff.lint.pep8-naming] -classmethod-decorators = [ - # Allow Pydantic's `@validator` decorator to trigger class method treatment. - "pydantic.validator", -] - -[tool.ruff.lint.isort] -split-on-trailing-comma = false - -[tool.ruff.format] -docstring-code-format = true -skip-magic-trailing-comma = true - -[tool.ty.src] -exclude = ["tests", "code_to_optimize", "pie_test_set", "experiments"] - -[tool.hatch.version] -source = "uv-dynamic-versioning" - -[tool.uv] -workspace = { members = ["codeflash-benchmark"] } - -[tool.uv.sources] -codeflash-benchmark = { workspace = true } - -[tool.uv-dynamic-versioning] -enable = true -style = "pep440" -vcs = "git" - -[tool.hatch.build.hooks.version] -path = "codeflash/version.py" -template = """# These version placeholders will be replaced by uv-dynamic-versioning during build. -__version__ = "{version}" -""" - - -#[tool.hatch.build.hooks.custom] -#path = "codeflash/update_license_version.py" - - -[tool.codeflash] -# All paths are relative to this pyproject.toml's directory. -module-root = "codeflash" -tests-root = "codeflash" -benchmarks-root = "tests/benchmarks" -ignore-paths = [] -formatter-cmds = ["disabled"] - -[tool.pytest.ini_options] -filterwarnings = [ - "ignore::pytest.PytestCollectionWarning", -] -markers = [ - "ci_skip: mark test to skip in CI environment", -] - - -[build-system] -requires = ["hatchling", "uv-dynamic-versioning"] -build-backend = "hatchling.build" +[project] +name = "codeflash" +dynamic = ["version"] +description = "Client for codeflash.ai - automatic code performance optimization, powered by AI" +authors = [{ name = "CodeFlash Inc.", email = "contact@codeflash.ai" }] +requires-python = ">=3.9" +readme = "README.md" +license-files = ["LICENSE"] +keywords = [ + "codeflash", + "performance", + "optimization", + "ai", + "code", + "machine learning", + "LLM", +] +dependencies = [ + "unidiff>=0.7.4", + "pytest>=7.0.0", + "gitpython>=3.1.31", + "libcst>=1.0.1", + "jedi>=0.19.1", + # Tree-sitter for multi-language support + "tree-sitter>=0.23.0", + "tree-sitter-javascript>=0.23.0", + "tree-sitter-typescript>=0.23.0", + "tree-sitter-java>=0.23.0", + "pytest-timeout>=2.1.0", + "tomlkit>=0.11.7", + "junitparser>=3.1.0", + "pydantic>=1.10.1", + "humanize>=4.0.0", + "posthog>=3.0.0", + "click>=8.1.0", + "inquirer>=3.0.0", + "sentry-sdk>=1.40.6,<3.0.0", + "parameterized>=0.9.0", + "isort>=5.11.0", + "dill>=0.3.8", + "rich>=13.8.1", + "lxml>=5.3.0", + "crosshair-tool>=0.0.78; python_version < '3.15'", + "coverage>=7.6.4", + "line_profiler>=4.2.0", + "platformdirs>=4.3.7", + "pygls>=2.0.0,<3.0.0", + "codeflash-benchmark", + "filelock>=3.20.3; python_version >= '3.10'", + "filelock<3.20.3; python_version < '3.10'", + "pytest-asyncio>=0.18.0", +] + +[project.urls] +Homepage = "https://codeflash.ai" + +[project.scripts] +codeflash = "codeflash.main:main" + +[project.optional-dependencies] + +[dependency-groups] +dev = [ + "ipython>=8.12.0", + "mypy>=1.13", + "ruff>=0.7.0", + "lxml-stubs>=0.5.1", + "pandas-stubs>=2.2.2.240807, <2.2.3.241009", + "types-Pygments>=2.18.0.20240506", + "types-colorama>=0.4.15.20240311", + "types-decorator>=5.1.8.20240310", + "types-jsonschema>=4.23.0.20240813", + "types-requests>=2.32.0.20241016", + "types-six>=1.16.21.20241009", + "types-cffi>=1.16.0.20240331", + "types-openpyxl>=3.1.5.20241020", + "types-regex>=2024.9.11.20240912", + "types-python-dateutil>=2.9.0.20241003", + "types-gevent>=24.11.0.20241230,<25", + "types-greenlet>=3.1.0.20241221,<4", + "types-pexpect>=4.9.0.20241208,<5", + "types-unidiff>=0.7.0.20240505,<0.8", + "prek>=0.2.25", + "ty>=0.0.14", + "uv>=0.9.29", +] +tests = [ + "black>=25.9.0", + "jax>=0.4.30", + "numpy>=2.0.2", + "pandas>=2.3.3", + "pyarrow>=15.0.0", + "pyrsistent>=0.20.0", + "scipy>=1.13.1", + "torch>=2.8.0", + "xarray>=2024.7.0", + "eval_type_backport", + "numba>=0.60.0", + "tensorflow>=2.20.0; python_version >= '3.10'", +] + +[tool.hatch.build.targets.sdist] +include = ["codeflash"] +exclude = [ + "docs/*", + "experiments/*", + "tests/*", + "*.pyc", + "__pycache__", + "*.pyo", + "*.pyd", + "*.so", + "*.dylib", + "*.dll", + "*.exe", + "*.log", + "*.tmp", + ".env", + ".env.*", + "**/.env", + "**/.env.*", + ".env.example", + "*.pem", + "*.key", + "secrets.*", + "config.yaml", + "config.json", + ".git", + ".gitignore", + ".gitattributes", + ".github", + "Dockerfile", + "docker-compose.yml", + "*.md", + "*.txt", + "*.csv", + "*.db", + "*.sqlite3", + "*.pdf", + "*.docx", + "*.xlsx", + "*.pptx", + "*.iml", + ".idea", + ".vscode", + ".DS_Store", + "Thumbs.db", + "venv", + "env", +] + +[tool.hatch.build.targets.wheel] +exclude = [ + "docs/*", + "experiments/*", + "tests/*", + "*.pyc", + "__pycache__", + "*.pyo", + "*.pyd", + "*.so", + "*.dylib", + "*.dll", + "*.exe", + "*.log", + "*.tmp", + ".env", + ".env.*", + "**/.env", + "**/.env.*", + ".env.example", + "*.pem", + "*.key", + "secrets.*", + "config.yaml", + "config.json", + ".git", + ".gitignore", + ".gitattributes", + ".github", + "Dockerfile", + "docker-compose.yml", + "*.md", + "*.txt", + "*.csv", + "*.db", + "*.sqlite3", + "*.pdf", + "*.docx", + "*.xlsx", + "*.pptx", + "*.iml", + ".idea", + ".vscode", + ".DS_Store", + "Thumbs.db", + "venv", + "env", +] + +[tool.mypy] +show_error_code_links = true +pretty = true +show_absolute_path = true +show_error_context = true +show_error_end = true +strict = true +warn_unreachable = true +install_types = true +plugins = ["pydantic.mypy"] + +exclude = ["tests/", "code_to_optimize/", "pie_test_set/", "experiments/"] + +[[tool.mypy.overrides]] +module = ["jedi", "jedi.api.classes", "inquirer", "inquirer.themes", "numba"] +ignore_missing_imports = true + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true + +[tool.ruff] +target-version = "py39" +line-length = 120 +fix = true +show-fixes = true +extend-exclude = ["code_to_optimize/", "pie_test_set/", "tests/", "experiments/"] + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "N802", + "C901", + "D100", + "D101", + "D102", + "D103", + "D105", + "D107", + "D203", # incorrect-blank-line-before-class (incompatible with D211) + "D213", # multi-line-summary-second-line (incompatible with D212) + "S101", + "S603", + "S607", + "COM812", + "FIX002", + "PLR0912", + "PLR0913", + "PLR0915", + "TD002", + "TD003", + "TD004", + "PLR2004", + "UP007", # remove once we drop 3.9 support. + "E501", + "BLE001", + "ERA001", + "TRY003", + "EM101", + "T201", + "PGH004", + "S301", + "D104", + "PERF203", + "LOG015", + "PLC0415", + "UP045", + "TD007", + "D417", + "D401", + "S110", # try-except-pass - we do this a lot + "ARG002", # Unused method argument + # Added for multi-language branch + "FBT001", # Boolean positional argument + "FBT002", # Boolean default positional argument + "ANN401", # typing.Any disallowed + "ARG001", # Unused function argument (common in abstract/interface methods) + "TRY300", # Consider moving to else block + "FURB110", # if-exp-instead-of-or-operator - we prefer explicit if-else over "or" + "TRY401", # Redundant exception in logging.exception + "PLR0911", # Too many return statements + "PLW0603", # Global statement + "PLW2901", # Loop variable overwritten + "SIM102", # Nested if statements + "SIM103", # Return negated condition + "ANN001", # Missing type annotation + "PLC0206", # Dictionary items + "S314", # XML parsing (acceptable for dev tool) + "S608", # SQL injection (internal use only) + "S112", # try-except-continue + "PERF401", # List comprehension suggestion + "SIM108", # Ternary operator suggestion + "F841", # Unused variable (often intentional) + "ANN202", # Missing return type for private functions + "B009", # getattr-with-constant - needed to avoid mypy [misc] on dunder access +] + +[tool.ruff.lint.flake8-type-checking] +strict = true +runtime-evaluated-base-classes = ["pydantic.BaseModel"] +runtime-evaluated-decorators = ["pydantic.validate_call", "pydantic.dataclasses.dataclass"] + +[tool.ruff.lint.pep8-naming] +classmethod-decorators = [ + # Allow Pydantic's `@validator` decorator to trigger class method treatment. + "pydantic.validator", +] + +[tool.ruff.lint.isort] +split-on-trailing-comma = false + +[tool.ruff.format] +docstring-code-format = true +skip-magic-trailing-comma = true + +[tool.ty.src] +exclude = ["tests", "code_to_optimize", "pie_test_set", "experiments"] + +[tool.hatch.version] +source = "uv-dynamic-versioning" + +[tool.uv] +workspace = { members = ["codeflash-benchmark"] } + +[tool.uv.sources] +codeflash-benchmark = { workspace = true } + +[tool.uv-dynamic-versioning] +enable = true +style = "pep440" +vcs = "git" + +[tool.hatch.build.hooks.version] +path = "codeflash/version.py" +template = """# These version placeholders will be replaced by uv-dynamic-versioning during build. +__version__ = "{version}" +""" + + +#[tool.hatch.build.hooks.custom] +#path = "codeflash/update_license_version.py" + + +[tool.codeflash] +# All paths are relative to this pyproject.toml's directory. +module-root = "codeflash" +tests-root = "tests" +benchmarks-root = "tests/benchmarks" +ignore-paths = [] +formatter-cmds = ["disabled"] + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::pytest.PytestCollectionWarning", +] +markers = [ + "ci_skip: mark test to skip in CI environment", +] + + +[build-system] +requires = ["hatchling", "uv-dynamic-versioning"] +build-backend = "hatchling.build" + diff --git a/tests/scripts/end_to_end_test_utilities.py b/tests/scripts/end_to_end_test_utilities.py index 7611f228b..12259b339 100644 --- a/tests/scripts/end_to_end_test_utilities.py +++ b/tests/scripts/end_to_end_test_utilities.py @@ -141,7 +141,7 @@ def run_codeflash_command( def build_command( cwd: pathlib.Path, config: TestConfig, test_root: pathlib.Path, benchmarks_root: pathlib.Path | None = None ) -> list[str]: - repo_root = pathlib.Path(__file__).resolve().parent.parent.parent + repo_root = pathlib.Path(__file__).parent.parent.parent python_path = os.path.relpath(repo_root / "codeflash" / "main.py", cwd) base_command = ["uv", "run", "--no-project", python_path, "--file", config.file_path, "--no-pr"] @@ -149,7 +149,7 @@ def build_command( if config.function_name: base_command.extend(["--function", config.function_name]) - # Check if codeflash config exists (pyproject.toml or codeflash.toml) - if so, don't override it + # Check if config exists (pyproject.toml or codeflash.toml) - if so, don't override it has_codeflash_config = (cwd / "codeflash.toml").exists() if not has_codeflash_config: pyproject_path = cwd / "pyproject.toml" @@ -158,7 +158,7 @@ def build_command( pyproject_data = tomllib.load(f) has_codeflash_config = "tool" in pyproject_data and "codeflash" in pyproject_data["tool"] - # Only pass --tests-root and --module-root if they're not configured + # Only pass --tests-root and --module-root if they're not configured in config files if not has_codeflash_config: base_command.extend(["--tests-root", str(test_root), "--module-root", str(cwd)]) @@ -222,7 +222,7 @@ def validate_output(stdout: str, return_code: int, expected_improvement_pct: int return False if config.expected_unit_test_files is not None: - # Match the per-function test discovery message from function_optimizer.py + # Match the per-function discovery message from function_optimizer.py # Format: "Discovered X existing unit test files, Y replay test files, and Z concolic..." unit_test_files_match = re.search(r"Discovered (\d+) existing unit test files?", stdout) if not unit_test_files_match: diff --git a/tests/test_add_needed_imports_from_module.py b/tests/test_add_needed_imports_from_module.py index 03d62cdc8..345b966dc 100644 --- a/tests/test_add_needed_imports_from_module.py +++ b/tests/test_add_needed_imports_from_module.py @@ -493,3 +493,37 @@ def my_function(): return helper """ assert result == expected_result + + +def test_module_input_preserves_comment_position_after_imports() -> None: + from codeflash.languages.python.context.code_context_extractor import parse_code_and_prune_cst + from codeflash.models.models import CodeContextType + + src_code = """from __future__ import annotations +import re + +# Comment about PATTERN. +PATTERN = re.compile(r"test") + +def parse(): + return PATTERN.findall("") +""" + pruned_module = parse_code_and_prune_cst(src_code, CodeContextType.READ_WRITABLE, {"parse"}) + + with tempfile.TemporaryDirectory() as tmpdir: + project_root = Path(tmpdir) + file_path = project_root / "mod.py" + file_path.write_text(src_code) + + result = add_needed_imports_from_module(src_code, pruned_module, file_path, file_path, project_root) + + expected = """from __future__ import annotations +import re + +# Comment about PATTERN. +PATTERN = re.compile(r"test") + +def parse(): + return PATTERN.findall("") +""" + assert result == expected diff --git a/tests/test_async_run_and_parse_tests.py b/tests/test_async_run_and_parse_tests.py index 01328081c..e9d85bf68 100644 --- a/tests/test_async_run_and_parse_tests.py +++ b/tests/test_async_run_and_parse_tests.py @@ -118,8 +118,8 @@ async def test_async_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -244,8 +244,8 @@ async def test_async_class_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -369,8 +369,8 @@ async def test_async_perf(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -489,8 +489,8 @@ async def async_error_function(lst): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -594,8 +594,8 @@ async def test_async_multi(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=2, - max_outer_loops=5, + pytest_min_loops=2, + pytest_max_loops=5, testing_time=0.2, ) @@ -714,8 +714,8 @@ async def test_async_edge_cases(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -860,8 +860,8 @@ def test_sync_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -1035,8 +1035,8 @@ async def test_mixed_sorting(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) diff --git a/tests/test_code_context_extractor.py b/tests/test_code_context_extractor.py index 2d87fbf24..eacaafe82 100644 --- a/tests/test_code_context_extractor.py +++ b/tests/test_code_context_extractor.py @@ -1,5 +1,6 @@ from __future__ import annotations +import ast import sys import tempfile from argparse import Namespace @@ -8,17 +9,17 @@ import pytest -from codeflash.languages.python.static_analysis.code_extractor import GlobalAssignmentCollector, add_global_assignments -from codeflash.languages.python.static_analysis.code_replacer import replace_functions_and_add_imports from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.python.context.code_context_extractor import ( - collect_names_from_annotation, + collect_type_names_from_annotation, enrich_testgen_context, - extract_classes_from_type_hint, - extract_imports_for_class, + extract_init_stub_from_class, + extract_parameter_type_constructors, get_code_optimization_context, - resolve_transitive_type_deps, + resolve_instance_class_name, ) +from codeflash.languages.python.static_analysis.code_extractor import GlobalAssignmentCollector, add_global_assignments +from codeflash.languages.python.static_analysis.code_replacer import replace_functions_and_add_imports from codeflash.models.models import CodeString, CodeStringsMarkdown, FunctionParent from codeflash.optimization.optimizer import Optimizer @@ -104,6 +105,7 @@ def test_code_replacement10() -> None: ```python:{file_path.relative_to(file_path.parent)} from __future__ import annotations + class HelperClass: def __init__(self, name): self.name = name @@ -164,6 +166,7 @@ def test_class_method_dependencies() -> None: from __future__ import annotations from collections import defaultdict + class Graph: def __init__(self, vertices): self.graph = defaultdict(list) @@ -243,6 +246,7 @@ def test_bubble_sort_helper() -> None: ```python:code_to_optimize/code_directories/retriever/bubble_sort_with_math.py import math + def sorter(arr): arr.sort() x = math.sqrt(2) @@ -252,6 +256,7 @@ def sorter(arr): ```python:code_to_optimize/code_directories/retriever/bubble_sort_imported.py from bubble_sort_with_math import sorter + def sort_from_another_file(arr): sorted_arr = sorter(arr) return sorted_arr @@ -1180,6 +1185,7 @@ def test_repo_helper() -> None: ```python:{path_to_utils.relative_to(project_root)} import math + class DataProcessor: def __init__(self, default_prefix: str = "PREFIX_"): @@ -1200,6 +1206,7 @@ def add_prefix(self, data: str, prefix: str = "PREFIX_") -> str: from globals import API_URL from utils import DataProcessor + def fetch_and_process_data(): # Use the global variable for the request response = requests.get(API_URL) @@ -1279,6 +1286,7 @@ def test_repo_helper_of_helper() -> None: import math from transform_utils import DataTransformer + class DataProcessor: def __init__(self, default_prefix: str = "PREFIX_"): @@ -1299,6 +1307,7 @@ def transform_data(self, data: str) -> str: from globals import API_URL from utils import DataProcessor + def fetch_and_transform_data(): # Use the global variable for the request response = requests.get(API_URL) @@ -1387,6 +1396,7 @@ def transform_using_own_method(self, data): import math from transform_utils import DataTransformer + class DataProcessor: def __init__(self, default_prefix: str = "PREFIX_"): @@ -1467,6 +1477,7 @@ def transform_using_same_file_function(self, data): import math from transform_utils import DataTransformer + class DataProcessor: def __init__(self, default_prefix: str = "PREFIX_"): @@ -1598,6 +1609,7 @@ def test_repo_helper_circular_dependency() -> None: import math from transform_utils import DataTransformer + class DataProcessor: def __init__(self, default_prefix: str = "PREFIX_"): @@ -1612,6 +1624,7 @@ def circular_dependency(self, data: str) -> str: ```python:{path_to_transform_utils.relative_to(project_root)} from code_to_optimize.code_directories.retriever.utils import DataProcessor + class DataTransformer: def __init__(self): self.data = None @@ -1744,6 +1757,7 @@ def test_direct_module_import() -> None: import math from transform_utils import DataTransformer + class DataProcessor: \"\"\"A class for processing data.\"\"\" @@ -1787,6 +1801,7 @@ def function_to_optimize(): from globals import API_URL from utils import DataProcessor + def fetch_and_transform_data(): # Use the global variable for the request response = requests.get(API_URL) @@ -3383,7 +3398,6 @@ def will_fit(self, chunk: PreChunk) -> bool: assert "class Element" in extracted_code, "Should contain Element class definition" assert "def __init__" in extracted_code, "Should contain __init__ method" assert "element_id" in extracted_code, "Should contain constructor parameter" - assert "import abc" in extracted_code, "Should include necessary imports for base class" def test_enrich_testgen_context_skips_existing_definitions(tmp_path: Path) -> None: @@ -3564,9 +3578,6 @@ def get_config(self) -> LLMConfig: assert "class LLMConfig" in all_extracted_code, "Should contain LLMConfig class definition" assert "class LLMConfigBase" in all_extracted_code, "Should contain LLMConfigBase class definition" - # Verify imports are included for dataclass-related items - assert "from dataclasses import" in all_extracted_code, "Should include dataclasses import" - def test_enrich_testgen_context_extracts_imports_for_decorated_classes(tmp_path: Path) -> None: """Test that extract_imports_for_class includes decorator and type annotation imports.""" @@ -3606,169 +3617,6 @@ def create_config() -> Config: # The extracted code should include the decorator assert "@dataclass" in extracted_code, "Should include @dataclass decorator" - # The imports should include dataclass and field - assert "from dataclasses import" in extracted_code, "Should include dataclasses import for decorator" - - -class TestCollectNamesFromAnnotation: - """Tests for the collect_names_from_annotation helper function.""" - - def test_simple_name(self): - """Test extracting a simple type name.""" - import ast - - code = "def f(x: MyClass): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - assert "MyClass" in names - - def test_subscript_type(self): - """Test extracting names from generic types like List[int].""" - import ast - - code = "def f(x: List[int]): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - assert "List" in names - assert "int" in names - - def test_optional_type(self): - """Test extracting names from Optional[MyClass].""" - import ast - - code = "def f(x: Optional[MyClass]): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - assert "Optional" in names - assert "MyClass" in names - - def test_union_type_with_pipe(self): - """Test extracting names from union types with | syntax.""" - import ast - - code = "def f(x: int | str | None): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - # int | str | None becomes BinOp nodes - assert "int" in names - assert "str" in names - - def test_nested_generic_types(self): - """Test extracting names from nested generics like Dict[str, List[MyClass]].""" - import ast - - code = "def f(x: Dict[str, List[MyClass]]): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - assert "Dict" in names - assert "str" in names - assert "List" in names - assert "MyClass" in names - - def test_tuple_annotation(self): - """Test extracting names from tuple type hints.""" - import ast - - code = "def f(x: tuple[int, str, MyClass]): pass" - annotation = ast.parse(code).body[0].args.args[0].annotation - names: set[str] = set() - collect_names_from_annotation(annotation, names) - assert "tuple" in names - assert "int" in names - assert "str" in names - assert "MyClass" in names - - -class TestExtractImportsForClass: - """Tests for the extract_imports_for_class helper function.""" - - def test_extracts_base_class_imports(self): - """Test that base class imports are extracted.""" - import ast - - module_source = """from abc import ABC -from mypackage import BaseClass - -class MyClass(BaseClass, ABC): - pass -""" - tree = ast.parse(module_source) - class_node = next(n for n in ast.walk(tree) if isinstance(n, ast.ClassDef)) - result = extract_imports_for_class(tree, class_node, module_source) - assert "from abc import ABC" in result - assert "from mypackage import BaseClass" in result - - def test_extracts_decorator_imports(self): - """Test that decorator imports are extracted.""" - import ast - - module_source = """from dataclasses import dataclass -from functools import lru_cache - -@dataclass -class MyClass: - name: str -""" - tree = ast.parse(module_source) - class_node = next(n for n in ast.walk(tree) if isinstance(n, ast.ClassDef)) - result = extract_imports_for_class(tree, class_node, module_source) - assert "from dataclasses import dataclass" in result - - def test_extracts_type_annotation_imports(self): - """Test that type annotation imports are extracted.""" - import ast - - module_source = """from typing import Optional, List -from mypackage.models import Config - -@dataclass -class MyClass: - config: Optional[Config] - items: List[str] -""" - tree = ast.parse(module_source) - class_node = next(n for n in ast.walk(tree) if isinstance(n, ast.ClassDef)) - result = extract_imports_for_class(tree, class_node, module_source) - assert "from typing import Optional, List" in result - assert "from mypackage.models import Config" in result - - def test_extracts_field_function_imports(self): - """Test that field() function imports are extracted for dataclasses.""" - import ast - - module_source = """from dataclasses import dataclass, field -from typing import List - -@dataclass -class MyClass: - items: List[str] = field(default_factory=list) -""" - tree = ast.parse(module_source) - class_node = next(n for n in ast.walk(tree) if isinstance(n, ast.ClassDef)) - result = extract_imports_for_class(tree, class_node, module_source) - assert "from dataclasses import dataclass, field" in result - - def test_no_duplicate_imports(self): - """Test that duplicate imports are not included.""" - import ast - - module_source = """from typing import Optional - -@dataclass -class MyClass: - field1: Optional[str] - field2: Optional[int] -""" - tree = ast.parse(module_source) - class_node = next(n for n in ast.walk(tree) if isinstance(n, ast.ClassDef)) - result = extract_imports_for_class(tree, class_node, module_source) - # Should only have one import line even though Optional is used twice - assert result.count("from typing import Optional") == 1 def test_enrich_testgen_context_multiple_decorators(tmp_path: Path) -> None: @@ -3909,8 +3757,8 @@ def get_router_config(self) -> RouterConfig: assert "model_list: list" in all_extracted_code, "Should include model_list field from Router" -def test_enrich_testgen_context_extracts_userdict(tmp_path: Path) -> None: - """Extracts __init__ from collections.UserDict when a class inherits from it.""" +def test_enrich_testgen_context_skips_stdlib_userdict(tmp_path: Path) -> None: + """Skips stdlib classes like collections.UserDict.""" code = """from collections import UserDict class MyCustomDict(UserDict): @@ -3922,20 +3770,7 @@ class MyCustomDict(UserDict): context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) result = enrich_testgen_context(context, tmp_path) - assert len(result.code_strings) == 1 - code_string = result.code_strings[0] - - expected_code = """\ -class UserDict: - def __init__(self, dict=None, /, **kwargs): - self.data = {} - if dict is not None: - self.update(dict) - if kwargs: - self.update(kwargs) -""" - assert code_string.code == expected_code - assert code_string.file_path.as_posix().endswith("collections/__init__.py") + assert len(result.code_strings) == 0, "Should not extract stdlib classes" def test_enrich_testgen_context_skips_unresolvable_base_classes(tmp_path: Path) -> None: @@ -3969,32 +3804,24 @@ def test_enrich_testgen_context_skips_builtin_base_classes(tmp_path: Path) -> No def test_enrich_testgen_context_deduplicates(tmp_path: Path) -> None: - """Extracts the same external base class only once even when inherited multiple times.""" - code = """from collections import UserDict - -class MyDict1(UserDict): - pass + """Extracts the same project class only once even when imported multiple times.""" + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") + (package_dir / "base.py").write_text( + "class Base:\n def __init__(self, x: int):\n self.x = x\n", + encoding="utf-8", + ) -class MyDict2(UserDict): - pass -""" - code_path = tmp_path / "mydicts.py" + code = "from mypkg.base import Base\n\nclass A(Base):\n pass\n\nclass B(Base):\n pass\n" + code_path = package_dir / "children.py" code_path.write_text(code, encoding="utf-8") context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) result = enrich_testgen_context(context, tmp_path) assert len(result.code_strings) == 1 - expected_code = """\ -class UserDict: - def __init__(self, dict=None, /, **kwargs): - self.data = {} - if dict is not None: - self.update(dict) - if kwargs: - self.update(kwargs) -""" - assert result.code_strings[0].code == expected_code + assert "class Base" in result.code_strings[0].code def test_enrich_testgen_context_empty_when_no_inheritance(tmp_path: Path) -> None: @@ -4077,6 +3904,7 @@ def reify_channel_message(data: dict) -> MessageIn: import enum import typing as t + class MessageKind(enum.StrEnum): ASK_FOR_CLIPBOARD_RESPONSE = "ask-for-clipboard-response" BEGIN_EXFILTRATION = "begin-exfiltration" @@ -4121,18 +3949,17 @@ def reify_channel_message(data: dict) -> MessageIn: def test_testgen_context_includes_external_base_inits(tmp_path: Path) -> None: - """Test that external base class __init__ methods are included in testgen context. - - This covers line 65 in code_context_extractor.py where external_base_inits.code_strings - are appended to the testgen context when a class inherits from an external library. - """ - code = """from collections import UserDict + """Test that base class definitions from project modules are included in testgen context.""" + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") + (package_dir / "base.py").write_text( + "class BaseDict:\n def __init__(self, data=None):\n self.data = data or {}\n", + encoding="utf-8", + ) -class MyCustomDict(UserDict): - def target_method(self): - return self.data -""" - file_path = tmp_path / "test_code.py" + code = "from mypkg.base import BaseDict\n\nclass MyCustomDict(BaseDict):\n def target_method(self):\n return self.data\n" + file_path = package_dir / "test_code.py" file_path.write_text(code, encoding="utf-8") func_to_optimize = FunctionToOptimize( @@ -4143,11 +3970,10 @@ def target_method(self): code_ctx = get_code_optimization_context(function_to_optimize=func_to_optimize, project_root_path=tmp_path) - # The testgen context should include the UserDict __init__ method testgen_context = code_ctx.testgen_context.markdown - assert "class UserDict:" in testgen_context, "UserDict class should be in testgen context" - assert "def __init__" in testgen_context, "UserDict __init__ should be in testgen context" - assert "self.data = {}" in testgen_context, "UserDict __init__ body should be included" + assert "class BaseDict" in testgen_context, "BaseDict class should be in testgen context" + assert "def __init__" in testgen_context, "BaseDict __init__ should be in testgen context" + assert "self.data" in testgen_context, "BaseDict __init__ body should be included" def test_testgen_raises_when_exceeds_limit(tmp_path: Path) -> None: @@ -4178,26 +4004,24 @@ def target_function(): def test_enrich_testgen_context_attribute_base(tmp_path: Path) -> None: - """Test handling of base class accessed as module.ClassName (ast.Attribute). - - This covers line 616 in code_context_extractor.py. - """ - # Use the standard import style which the code actually handles - code = """from collections import UserDict + """Test handling of base class in a project module.""" + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") + (package_dir / "base.py").write_text( + "class CustomDict:\n def __init__(self, data=None):\n self.data = data or {}\n", + encoding="utf-8", + ) -class MyDict(UserDict): - def custom_method(self): - return self.data -""" - code_path = tmp_path / "mydict.py" + code = "from mypkg.base import CustomDict\n\nclass MyDict(CustomDict):\n def custom_method(self):\n return self.data\n" + code_path = package_dir / "mydict.py" code_path.write_text(code, encoding="utf-8") context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) result = enrich_testgen_context(context, tmp_path) - # Should extract UserDict __init__ assert len(result.code_strings) == 1 - assert "class UserDict:" in result.code_strings[0].code + assert "class CustomDict" in result.code_strings[0].code assert "def __init__" in result.code_strings[0].code @@ -4223,58 +4047,6 @@ class MyProtocol(Protocol): assert isinstance(result.code_strings, list) -def test_collect_names_from_annotation_attribute(tmp_path: Path) -> None: - """Test collect_names_from_annotation handles ast.Attribute annotations. - - This covers line 756 in code_context_extractor.py. - """ - # Use __import__ to avoid polluting the test file's detected imports - ast_mod = __import__("ast") - - # Parse code with type annotation using attribute access - code = "x: typing.List[int] = []" - tree = ast_mod.parse(code) - names: set[str] = set() - - # Find the annotation node - for node in ast_mod.walk(tree): - if isinstance(node, ast_mod.AnnAssign) and node.annotation: - collect_names_from_annotation(node.annotation, names) - break - - assert "typing" in names - - -def test_extract_imports_for_class_decorator_call_attribute(tmp_path: Path) -> None: - """Test extract_imports_for_class handles decorator calls with attribute access. - - This covers lines 707-708 in code_context_extractor.py. - """ - ast_mod = __import__("ast") - - code = """ -import functools - -@functools.lru_cache(maxsize=128) -class CachedClass: - pass -""" - tree = ast_mod.parse(code) - - # Find the class node - class_node = None - for node in ast_mod.walk(tree): - if isinstance(node, ast_mod.ClassDef): - class_node = node - break - - assert class_node is not None - result = extract_imports_for_class(tree, class_node, code) - - # Should include the functools import - assert "functools" in result - - def test_annotated_assignment_in_read_writable(tmp_path: Path) -> None: """Test that annotated assignments used by target function are in read-writable context. @@ -4404,7 +4176,7 @@ def target_method(self): def test_enrich_testgen_context_extracts_click_option(tmp_path: Path) -> None: - """Extracts __init__ from click.Option when directly imported.""" + """click.Option re-exports via __init__.py so jedi resolves the module but not the class directly.""" code = """from click import Option def my_func(opt: Option) -> None: @@ -4416,11 +4188,10 @@ def my_func(opt: Option) -> None: context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) result = enrich_testgen_context(context, tmp_path) - assert len(result.code_strings) == 1 - code_string = result.code_strings[0] - assert "class Option:" in code_string.code - assert "def __init__" in code_string.code - assert code_string.file_path is not None and "click" in code_string.file_path.as_posix() + # click re-exports Option from click.core via __init__.py; jedi resolves + # the module to __init__.py where Option is not defined as a ClassDef, + # so enrich_testgen_context cannot extract it. + assert isinstance(result.code_strings, list) def test_enrich_testgen_context_extracts_project_class_defs(tmp_path: Path) -> None: @@ -4501,10 +4272,8 @@ def my_func() -> None: assert result.code_strings == [] -def test_enrich_testgen_context_skips_object_init(tmp_path: Path) -> None: - """Skips classes whose __init__ is just object.__init__ (trivial).""" - # enum.Enum has a metaclass-based __init__, but individual enum members - # effectively use object.__init__. Use a class we know has object.__init__. +def test_enrich_testgen_context_skips_stdlib(tmp_path: Path) -> None: + """Skips stdlib classes like QName.""" code = """from xml.etree.ElementTree import QName def my_func(q: QName) -> None: @@ -4516,9 +4285,7 @@ def my_func(q: QName) -> None: context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) result = enrich_testgen_context(context, tmp_path) - # QName has its own __init__, so it should be included if it's in site-packages. - # But since it's stdlib (not site-packages), it should be skipped. - assert result.code_strings == [] + assert result.code_strings == [], "Should not extract stdlib classes" def test_enrich_testgen_context_empty_when_no_imports(tmp_path: Path) -> None: @@ -4535,150 +4302,566 @@ def test_enrich_testgen_context_empty_when_no_imports(tmp_path: Path) -> None: assert result.code_strings == [] -# --- Tests for extract_classes_from_type_hint --- +# --- Integration tests for transitive resolution in enrich_testgen_context --- -def test_extract_classes_from_type_hint_plain_class() -> None: - """Extracts a plain class directly.""" - from click import Option +def test_enrich_testgen_context_transitive_deps(tmp_path: Path) -> None: + """Transitive deps require the class to be resolvable in the target module.""" + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") - result = extract_classes_from_type_hint(Option) - assert Option in result + (package_dir / "types.py").write_text( + "class Command:\n def __init__(self, name: str):\n self.name = name\n", encoding="utf-8" + ) + (package_dir / "ctx.py").write_text( + "from mypkg.types import Command\n\nclass Context:\n def __init__(self, cmd: Command):\n self.cmd = cmd\n", + encoding="utf-8", + ) + code = "from mypkg.ctx import Context\n\ndef my_func(ctx: Context) -> None:\n pass\n" + code_path = package_dir / "main.py" + code_path.write_text(code, encoding="utf-8") -def test_extract_classes_from_type_hint_optional() -> None: - """Unwraps Optional[X] to find X.""" - from typing import Optional + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = enrich_testgen_context(context, tmp_path) - from click import Option + class_names = {cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings} + assert "Context" in class_names - result = extract_classes_from_type_hint(Optional[Option]) - assert Option in result +def test_enrich_testgen_context_no_infinite_loops(tmp_path: Path) -> None: + """Handles classes with circular type references without infinite loops.""" + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") -def test_extract_classes_from_type_hint_union() -> None: - """Unwraps Union[X, Y] to find both X and Y.""" - from typing import Union + # Create circular references: Context references Command, Command references Context + (package_dir / "core.py").write_text( + "class Command:\n def __init__(self, name: str):\n self.name = name\n\n" + "class Context:\n def __init__(self, cmd: Command):\n self.cmd = cmd\n", + encoding="utf-8", + ) - from click import Command, Option + code = "from mypkg.core import Context\n\ndef my_func(ctx: Context) -> None:\n pass\n" + code_path = package_dir / "main.py" + code_path.write_text(code, encoding="utf-8") - result = extract_classes_from_type_hint(Union[Option, Command]) - assert Option in result - assert Command in result + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = enrich_testgen_context(context, tmp_path) + # Should complete without hanging + assert len(result.code_strings) >= 1 -def test_extract_classes_from_type_hint_list() -> None: - """Unwraps List[X] to find X.""" - from typing import List - from click import Option +def test_enrich_testgen_context_no_duplicate_stubs(tmp_path: Path) -> None: + """Does not emit duplicate stubs for the same class name.""" + code = """from click import Context - result = extract_classes_from_type_hint(List[Option]) - assert Option in result +def my_func(ctx: Context) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = enrich_testgen_context(context, tmp_path) -def test_extract_classes_from_type_hint_filters_builtins() -> None: - """Filters out builtins like str, int, None.""" - from typing import Optional + class_names = [cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings] + assert len(class_names) == len(set(class_names)), f"Duplicate class stubs found: {class_names}" - result = extract_classes_from_type_hint(Optional[str]) - assert len(result) == 0 +# --- Tests for collect_type_names_from_annotation --- -def test_extract_classes_from_type_hint_callable() -> None: - """Handles bare Callable without error.""" - from typing import Callable - result = extract_classes_from_type_hint(Callable) - assert isinstance(result, list) +def test_collect_type_names_simple() -> None: + tree = ast.parse("def f(x: Foo): pass") + func = tree.body[0] + assert isinstance(func, ast.FunctionDef) + ann = func.args.args[0].annotation + assert collect_type_names_from_annotation(ann) == {"Foo"} -def test_extract_classes_from_type_hint_callable_with_args() -> None: - """Unwraps Callable[[X], Y] to find classes.""" - from typing import Callable +def test_collect_type_names_generic() -> None: + tree = ast.parse("def f(x: list[Foo]): pass") + func = tree.body[0] + assert isinstance(func, ast.FunctionDef) + ann = func.args.args[0].annotation + names = collect_type_names_from_annotation(ann) + assert "Foo" in names + assert "list" in names - from click import Context - result = extract_classes_from_type_hint(Callable[[Context], None]) - assert Context in result +def test_collect_type_names_optional() -> None: + tree = ast.parse("def f(x: Optional[Foo]): pass") + func = tree.body[0] + assert isinstance(func, ast.FunctionDef) + ann = func.args.args[0].annotation + names = collect_type_names_from_annotation(ann) + assert "Optional" in names + assert "Foo" in names -# --- Tests for resolve_transitive_type_deps --- +def test_collect_type_names_union_pipe() -> None: + tree = ast.parse("def f(x: Foo | Bar): pass") + func = tree.body[0] + assert isinstance(func, ast.FunctionDef) + ann = func.args.args[0].annotation + names = collect_type_names_from_annotation(ann) + assert names == {"Foo", "Bar"} -def test_resolve_transitive_type_deps_click_context() -> None: - """click.Context.__init__ references Command, which should be found.""" - from click import Command, Context +def test_collect_type_names_none_annotation() -> None: + assert collect_type_names_from_annotation(None) == set() - deps = resolve_transitive_type_deps(Context) - dep_names = {cls.__name__ for cls in deps} - assert "Command" in dep_names or Command in deps +def test_collect_type_names_attribute_skipped() -> None: + tree = ast.parse("def f(x: module.Foo): pass") + func = tree.body[0] + assert isinstance(func, ast.FunctionDef) + ann = func.args.args[0].annotation + assert collect_type_names_from_annotation(ann) == set() -def test_resolve_transitive_type_deps_handles_failure_gracefully() -> None: - """Returns empty list for a class where get_type_hints fails.""" - class BadClass: - def __init__(self, x: NonexistentType) -> None: # type: ignore[name-defined] # noqa: F821 - pass +# --- Tests for extract_init_stub_from_class --- - result = resolve_transitive_type_deps(BadClass) - assert result == [] +def test_extract_init_stub_basic() -> None: + source = """ +class MyClass: + def __init__(self, name: str, value: int = 0): + self.name = name + self.value = value +""" + tree = ast.parse(source) + stub = extract_init_stub_from_class("MyClass", source, tree) + assert stub is not None + assert "class MyClass:" in stub + assert "def __init__(self, name: str, value: int = 0):" in stub + assert "self.name = name" in stub + assert "self.value = value" in stub + + +def test_extract_init_stub_no_init() -> None: + source = """ +class NoInit: + x = 10 + def other(self): + pass +""" + tree = ast.parse(source) + stub = extract_init_stub_from_class("NoInit", source, tree) + assert stub is None -# --- Integration tests for transitive resolution in enrich_testgen_context --- +def test_extract_init_stub_class_not_found() -> None: + source = """ +class Other: + def __init__(self): + pass +""" + tree = ast.parse(source) + stub = extract_init_stub_from_class("Missing", source, tree) + assert stub is None -def test_enrich_testgen_context_transitive_deps(tmp_path: Path) -> None: - """Extracts transitive type dependencies from __init__ annotations.""" - code = """from click import Context -def my_func(ctx: Context) -> None: +# --- Tests for extract_parameter_type_constructors --- + + +def test_extract_parameter_type_constructors_project_type(tmp_path: Path) -> None: + # Create a module with a class + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + """ +class Widget: + def __init__(self, size: int, color: str = "red"): + self.size = size + self.color = color +""", + encoding="utf-8", + ) + + # Create the FTO file that uses Widget + (pkg / "processor.py").write_text( + """from mypkg.models import Widget + +def process(w: Widget) -> str: + return str(w) +""", + encoding="utf-8", + ) + + fto = FunctionToOptimize( + function_name="process", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 1 + code = result.code_strings[0].code + assert "class Widget:" in code + assert "def __init__" in code + assert "size" in code + + +def test_extract_parameter_type_constructors_excludes_builtins(tmp_path: Path) -> None: + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "func.py").write_text( + """ +def my_func(x: int, y: str, z: list) -> None: pass +""", + encoding="utf-8", + ) + + fto = FunctionToOptimize( + function_name="my_func", file_path=(pkg / "func.py").resolve(), starting_line=2, ending_line=3 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 0 + + +def test_extract_parameter_type_constructors_skips_existing_classes(tmp_path: Path) -> None: + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + """ +class Widget: + def __init__(self, size: int): + self.size = size +""", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + """from mypkg.models import Widget + +def process(w: Widget) -> str: + return str(w) +""", + encoding="utf-8", + ) + + fto = FunctionToOptimize( + function_name="process", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + # Widget is already in the context — should not be duplicated + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), {"Widget"}) + assert len(result.code_strings) == 0 + + +def test_extract_parameter_type_constructors_no_init(tmp_path: Path) -> None: + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + """ +class Config: + x = 10 +""", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + """from mypkg.models import Config + +def process(c: Config) -> str: + return str(c) +""", + encoding="utf-8", + ) + + fto = FunctionToOptimize( + function_name="process", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 0 + + +# --- Tests for resolve_instance_class_name --- + + +def test_resolve_instance_class_name_direct_call() -> None: + source = "config = MyConfig(debug=True)" + tree = ast.parse(source) + assert resolve_instance_class_name("config", tree) == "MyConfig" + + +def test_resolve_instance_class_name_annotated() -> None: + source = "config: MyConfig = load()" + tree = ast.parse(source) + assert resolve_instance_class_name("config", tree) == "MyConfig" + + +def test_resolve_instance_class_name_factory_method() -> None: + source = "config = MyConfig.from_env()" + tree = ast.parse(source) + assert resolve_instance_class_name("config", tree) == "MyConfig" + + +def test_resolve_instance_class_name_no_match() -> None: + source = "x = 42" + tree = ast.parse(source) + assert resolve_instance_class_name("x", tree) is None + + +def test_resolve_instance_class_name_missing_variable() -> None: + source = "config = MyConfig()" + tree = ast.parse(source) + assert resolve_instance_class_name("other", tree) is None + + +# --- Tests for enhanced extract_init_stub_from_class --- + + +def test_extract_init_stub_includes_post_init() -> None: + source = """\ +class MyDataclass: + def __init__(self, x: int): + self.x = x + def __post_init__(self): + self.y = self.x * 2 """ - code_path = tmp_path / "myfunc.py" - code_path.write_text(code, encoding="utf-8") + tree = ast.parse(source) + stub = extract_init_stub_from_class("MyDataclass", source, tree) + assert stub is not None + assert "class MyDataclass:" in stub + assert "def __init__" in stub + assert "def __post_init__" in stub + assert "self.y = self.x * 2" in stub - context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) - result = enrich_testgen_context(context, tmp_path) - class_names = {cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings} - assert "Context" in class_names - # Command is a transitive dep via Context.__init__ - assert "Command" in class_names +def test_extract_init_stub_includes_properties() -> None: + source = """\ +class MyClass: + def __init__(self, name: str): + self._name = name + @property + def name(self) -> str: + return self._name +""" + tree = ast.parse(source) + stub = extract_init_stub_from_class("MyClass", source, tree) + assert stub is not None + assert "def __init__" in stub + assert "@property" in stub + assert "def name" in stub + + +def test_extract_init_stub_property_only_class() -> None: + source = """\ +class ReadOnly: + @property + def value(self) -> int: + return 42 +""" + tree = ast.parse(source) + stub = extract_init_stub_from_class("ReadOnly", source, tree) + assert stub is not None + assert "class ReadOnly:" in stub + assert "@property" in stub + assert "def value" in stub -def test_enrich_testgen_context_no_infinite_loops(tmp_path: Path) -> None: - """Handles classes with circular type references without infinite loops.""" - # click.Context references Command, and Command references Context back - # This should terminate without issues due to the processed_classes set - code = """from click import Context +# --- Tests for enrich_testgen_context resolving instances --- -def my_func(ctx: Context) -> None: - pass + +def test_enrich_testgen_context_resolves_instance_to_class(tmp_path: Path) -> None: + package_dir = tmp_path / "mypkg" + package_dir.mkdir() + (package_dir / "__init__.py").write_text("", encoding="utf-8") + + config_module = """\ +class AppConfig: + def __init__(self, debug: bool = False): + self.debug = debug + + @property + def log_level(self) -> str: + return "DEBUG" if self.debug else "INFO" + +app_config = AppConfig(debug=True) """ - code_path = tmp_path / "myfunc.py" - code_path.write_text(code, encoding="utf-8") + (package_dir / "config.py").write_text(config_module, encoding="utf-8") - context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + consumer_code = """\ +from mypkg.config import app_config + +def get_log_level() -> str: + return app_config.log_level +""" + consumer_path = package_dir / "consumer.py" + consumer_path.write_text(consumer_code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=consumer_code, file_path=consumer_path)]) result = enrich_testgen_context(context, tmp_path) - # Should complete without hanging; just verify we got results assert len(result.code_strings) >= 1 + combined = "\n".join(cs.code for cs in result.code_strings) + assert "class AppConfig:" in combined + assert "@property" in combined + +def test_extract_parameter_type_constructors_isinstance_single(tmp_path: Path) -> None: + """isinstance(x, SomeType) in function body should be picked up.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + "class Widget:\n def __init__(self, size: int):\n self.size = size\n", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + "from mypkg.models import Widget\n\ndef check(x) -> bool:\n return isinstance(x, Widget)\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="check", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 1 + assert "class Widget:" in result.code_strings[0].code + assert "__init__" in result.code_strings[0].code + + +def test_extract_parameter_type_constructors_isinstance_tuple(tmp_path: Path) -> None: + """isinstance(x, (TypeA, TypeB)) should pick up both types.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + "class Alpha:\n def __init__(self, a: int):\n self.a = a\n\n" + "class Beta:\n def __init__(self, b: str):\n self.b = b\n", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + "from mypkg.models import Alpha, Beta\n\ndef check(x) -> bool:\n return isinstance(x, (Alpha, Beta))\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="check", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 2 + combined = "\n".join(cs.code for cs in result.code_strings) + assert "class Alpha:" in combined + assert "class Beta:" in combined + + +def test_extract_parameter_type_constructors_type_is_pattern(tmp_path: Path) -> None: + """type(x) is SomeType pattern should be picked up.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "models.py").write_text( + "class Gadget:\n def __init__(self, val: float):\n self.val = val\n", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + "from mypkg.models import Gadget\n\ndef check(x) -> bool:\n return type(x) is Gadget\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="check", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 1 + assert "class Gadget:" in result.code_strings[0].code -def test_enrich_testgen_context_no_duplicate_stubs(tmp_path: Path) -> None: - """Does not emit duplicate stubs for the same class name.""" - code = """from click import Context +def test_extract_parameter_type_constructors_base_classes(tmp_path: Path) -> None: + """Base classes of enclosing class should be picked up for methods.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "base.py").write_text( + "class BaseProcessor:\n def __init__(self, config: str):\n self.config = config\n", + encoding="utf-8", + ) + (pkg / "child.py").write_text( + "from mypkg.base import BaseProcessor\n\nclass ChildProcessor(BaseProcessor):\n" + " def process(self) -> str:\n return self.config\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="process", + file_path=(pkg / "child.py").resolve(), + starting_line=4, + ending_line=5, + parents=[FunctionParent(name="ChildProcessor", type="ClassDef")], + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 1 + assert "class BaseProcessor:" in result.code_strings[0].code -def my_func(ctx: Context) -> None: - pass -""" - code_path = tmp_path / "myfunc.py" - code_path.write_text(code, encoding="utf-8") - context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) +def test_extract_parameter_type_constructors_isinstance_builtins_excluded(tmp_path: Path) -> None: + """Isinstance with builtins (int, str, etc.) should not produce stubs.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "func.py").write_text( + "def check(x) -> bool:\n return isinstance(x, (int, str, float))\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="check", file_path=(pkg / "func.py").resolve(), starting_line=1, ending_line=2 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + assert len(result.code_strings) == 0 + + +def test_extract_parameter_type_constructors_transitive(tmp_path: Path) -> None: + """Transitive extraction: if Widget.__init__ takes a Config, Config's stub should also appear.""" + pkg = tmp_path / "mypkg" + pkg.mkdir() + (pkg / "__init__.py").write_text("", encoding="utf-8") + (pkg / "config.py").write_text( + "class Config:\n def __init__(self, debug: bool = False):\n self.debug = debug\n", + encoding="utf-8", + ) + (pkg / "models.py").write_text( + "from mypkg.config import Config\n\n" + "class Widget:\n def __init__(self, cfg: Config):\n self.cfg = cfg\n", + encoding="utf-8", + ) + (pkg / "processor.py").write_text( + "from mypkg.models import Widget\n\ndef process(w: Widget) -> str:\n return str(w)\n", + encoding="utf-8", + ) + fto = FunctionToOptimize( + function_name="process", file_path=(pkg / "processor.py").resolve(), starting_line=3, ending_line=4 + ) + result = extract_parameter_type_constructors(fto, tmp_path.resolve(), set()) + combined = "\n".join(cs.code for cs in result.code_strings) + assert "class Widget:" in combined + assert "class Config:" in combined + + + + +def test_enrich_testgen_context_third_party_uses_stubs(tmp_path: Path) -> None: + """Third-party classes should produce compact __init__ stubs, not full class source.""" + # Use a real third-party package (pydantic) so jedi can actually resolve it + context_code = ( + "from pydantic import BaseModel\n\n" + "class MyModel(BaseModel):\n" + " name: str\n\n" + "def process(m: MyModel) -> str:\n" + " return m.name\n" + ) + consumer_path = tmp_path / "consumer.py" + consumer_path.write_text(context_code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=context_code, file_path=consumer_path)]) result = enrich_testgen_context(context, tmp_path) - class_names = [cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings] - assert len(class_names) == len(set(class_names)), f"Duplicate class stubs found: {class_names}" + # BaseModel lives in site-packages so should get stub treatment (compact __init__), + # not the full class definition with hundreds of methods + for cs in result.code_strings: + if "BaseModel" in cs.code: + assert "class BaseModel:" in cs.code + assert "__init__" in cs.code + # Full BaseModel has many methods; stubs should only have __init__/properties + assert "model_dump" not in cs.code + break diff --git a/tests/test_code_deduplication.py b/tests/test_code_deduplication.py index deea25f93..3cb266785 100644 --- a/tests/test_code_deduplication.py +++ b/tests/test_code_deduplication.py @@ -1,4 +1,4 @@ -from codeflash.code_utils.deduplicate_code import are_codes_duplicate, normalize_code +from codeflash.languages.python.normalizer import normalize_python_code as normalize_code def test_deduplicate1(): @@ -23,7 +23,7 @@ def compute_sum(numbers): """ assert normalize_code(code1) == normalize_code(code2) - assert are_codes_duplicate(code1, code2) + assert normalize_code(code1) == normalize_code(code2) # Example 3: Same function and parameter names, different local variables (should match) code3 = """ @@ -43,7 +43,7 @@ def calculate_sum(numbers): """ assert normalize_code(code3) == normalize_code(code4) - assert are_codes_duplicate(code3, code4) + assert normalize_code(code3) == normalize_code(code4) # Example 4: Nested functions and classes (preserving names) code5 = """ diff --git a/tests/test_code_replacement.py b/tests/test_code_replacement.py index 77d9108ab..f1bf48043 100644 --- a/tests/test_code_replacement.py +++ b/tests/test_code_replacement.py @@ -11,7 +11,6 @@ from codeflash.languages.python.static_analysis.code_replacer import ( AddRequestArgument, AutouseFixtureModifier, - OptimFunctionCollector, PytestMarkAdder, is_zero_diff, replace_functions_and_add_imports, @@ -19,7 +18,7 @@ ) from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.models.models import CodeOptimizationContext, CodeStringsMarkdown, FunctionParent, FunctionSource -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.verification.verification_utils import TestConfig os.environ["CODEFLASH_API_KEY"] = "cf-test-key" @@ -55,7 +54,7 @@ def sorter(arr): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -808,6 +807,7 @@ def test_code_replacement10() -> None: get_code_output = """# file: test_code_replacement.py from __future__ import annotations + class HelperClass: def __init__(self, name): self.name = name @@ -834,7 +834,7 @@ def main_method(self): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func_top_optimize, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func_top_optimize, test_cfg=test_config) code_context = func_optimizer.get_code_optimization_context().unwrap() assert code_context.testgen_context.flat.rstrip() == get_code_output.rstrip() @@ -1745,7 +1745,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -1824,7 +1824,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -1904,7 +1904,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -1983,7 +1983,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -2063,7 +2063,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -2153,7 +2153,7 @@ def new_function2(value): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} helper_function_paths = {hf.file_path for hf in code_context.helper_functions} @@ -3453,7 +3453,7 @@ def hydrate_input_text_actions_with_field_names( test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} @@ -3476,142 +3476,6 @@ def hydrate_input_text_actions_with_field_names( assert new_code == expected -# OptimFunctionCollector async function tests -def test_optim_function_collector_with_async_functions(): - """Test OptimFunctionCollector correctly collects async functions.""" - import libcst as cst - - source_code = """ -def sync_function(): - return "sync" - -async def async_function(): - return "async" - -class TestClass: - def sync_method(self): - return "sync_method" - - async def async_method(self): - return "async_method" -""" - - tree = cst.parse_module(source_code) - collector = OptimFunctionCollector( - function_names={ - (None, "sync_function"), - (None, "async_function"), - ("TestClass", "sync_method"), - ("TestClass", "async_method"), - }, - preexisting_objects=None, - ) - tree.visit(collector) - - # Should collect both sync and async functions - assert len(collector.modified_functions) == 4 - assert (None, "sync_function") in collector.modified_functions - assert (None, "async_function") in collector.modified_functions - assert ("TestClass", "sync_method") in collector.modified_functions - assert ("TestClass", "async_method") in collector.modified_functions - - -def test_optim_function_collector_new_async_functions(): - """Test OptimFunctionCollector identifies new async functions not in preexisting objects.""" - import libcst as cst - - source_code = """ -def existing_function(): - return "existing" - -async def new_async_function(): - return "new_async" - -def new_sync_function(): - return "new_sync" - -class ExistingClass: - async def new_class_async_method(self): - return "new_class_async" -""" - - # Only existing_function is in preexisting objects - preexisting_objects = {("existing_function", ())} - - tree = cst.parse_module(source_code) - collector = OptimFunctionCollector( - function_names=set(), # Not looking for specific functions - preexisting_objects=preexisting_objects, - ) - tree.visit(collector) - - # Should identify new functions (both sync and async) - assert len(collector.new_functions) == 2 - function_names = [func.name.value for func in collector.new_functions] - assert "new_async_function" in function_names - assert "new_sync_function" in function_names - - # Should identify new class methods - assert "ExistingClass" in collector.new_class_functions - assert len(collector.new_class_functions["ExistingClass"]) == 1 - assert collector.new_class_functions["ExistingClass"][0].name.value == "new_class_async_method" - - -def test_optim_function_collector_mixed_scenarios(): - """Test OptimFunctionCollector with complex mix of sync/async functions and classes.""" - import libcst as cst - - source_code = """ -# Global functions -def global_sync(): - pass - -async def global_async(): - pass - -class ParentClass: - def __init__(self): - pass - - def sync_method(self): - pass - - async def async_method(self): - pass - -class ChildClass: - async def child_async_method(self): - pass - - def child_sync_method(self): - pass -""" - - # Looking for specific functions - function_names = { - (None, "global_sync"), - (None, "global_async"), - ("ParentClass", "sync_method"), - ("ParentClass", "async_method"), - ("ChildClass", "child_async_method"), - } - - tree = cst.parse_module(source_code) - collector = OptimFunctionCollector(function_names=function_names, preexisting_objects=None) - tree.visit(collector) - - # Should collect all specified functions (mix of sync and async) - assert len(collector.modified_functions) == 5 - assert (None, "global_sync") in collector.modified_functions - assert (None, "global_async") in collector.modified_functions - assert ("ParentClass", "sync_method") in collector.modified_functions - assert ("ParentClass", "async_method") in collector.modified_functions - assert ("ChildClass", "child_async_method") in collector.modified_functions - - # Should collect __init__ method - assert "ParentClass" in collector.modified_init_functions - - def test_is_zero_diff_async_sleep(): original_code = """ import time diff --git a/tests/test_codeflash_capture.py b/tests/test_codeflash_capture.py index b488935bb..be61fcefa 100644 --- a/tests/test_codeflash_capture.py +++ b/tests/test_codeflash_capture.py @@ -7,8 +7,8 @@ from codeflash.code_utils.code_utils import get_run_tmp_file from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE from codeflash.discovery.functions_to_optimize import FunctionToOptimize +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.models.models import FunctionParent, TestFile, TestFiles, TestingMode, TestType, VerificationType -from codeflash.optimization.function_optimizer import FunctionOptimizer from codeflash.verification.equivalence import compare_test_results from codeflash.verification.instrument_codeflash_capture import instrument_codeflash_capture from codeflash.verification.test_runner import execute_test_subprocess @@ -459,7 +459,7 @@ def __init__(self, x=2): file_path=sample_code_path, parents=[FunctionParent(name="MyClass", type="ClassDef")], ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -475,8 +475,8 @@ def __init__(self, x=2): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(test_results) == 3 @@ -508,8 +508,8 @@ def __init__(self, x=2): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) match, _ = compare_test_results(test_results, test_results2) @@ -582,7 +582,7 @@ def __init__(self, *args, **kwargs): file_path=sample_code_path, parents=[FunctionParent(name="MyClass", type="ClassDef")], ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -598,8 +598,8 @@ def __init__(self, *args, **kwargs): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(test_results) == 3 @@ -632,8 +632,8 @@ def __init__(self, *args, **kwargs): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -709,7 +709,7 @@ def __init__(self, x=2): file_path=sample_code_path, parents=[FunctionParent(name="MyClass", type="ClassDef")], ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -725,8 +725,8 @@ def __init__(self, x=2): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -761,8 +761,8 @@ def __init__(self, x=2): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -872,7 +872,7 @@ def another_helper(self): file_path=fto_file_path, parents=[FunctionParent(name="MyClass", type="ClassDef")], ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -889,8 +889,8 @@ def another_helper(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -910,8 +910,8 @@ def another_helper(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -1021,7 +1021,7 @@ def another_helper(self): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -1049,13 +1049,13 @@ def another_helper(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) assert len(test_results.test_results) == 4 assert test_results[0].id.test_function_name == "test_helper_classes" @@ -1101,12 +1101,12 @@ def target_function(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) # Now, this fto_code mutates the instance so it should fail mutated_fto_code = """ @@ -1140,12 +1140,12 @@ def target_function(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) match, _ = compare_test_results(test_results, mutated_test_results) assert not match @@ -1179,12 +1179,12 @@ def target_function(self): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) match, _ = compare_test_results(test_results, no_helper1_test_results) assert match @@ -1446,7 +1446,7 @@ def calculate_portfolio_metrics( test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -1471,13 +1471,13 @@ def calculate_portfolio_metrics( test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) # Now, let's say we optimize the code and make changes. new_fto_code = """import math @@ -1538,12 +1538,12 @@ def risk_adjusted_return(return_val, weight): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) matched, diffs = compare_test_results(test_results, modified_test_results) assert not matched @@ -1601,12 +1601,12 @@ def calculate_portfolio_metrics( test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Remove instrumentation - FunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) + PythonFunctionOptimizer.write_code_and_helpers(candidate_fto_code, candidate_helper_code, fto.file_path) matched, diffs = compare_test_results(test_results, modified_test_results_2) # now the test should match and no diffs should be found assert len(diffs) == 0 @@ -1671,7 +1671,7 @@ def __init__(self, x, y): file_path=sample_code_path, parents=[FunctionParent(name="SlotsClass", type="ClassDef")], ) - func_optimizer = FunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config) func_optimizer.test_files = TestFiles( test_files=[ TestFile( @@ -1687,8 +1687,8 @@ def __init__(self, x, y): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) diff --git a/tests/test_comparator.py b/tests/test_comparator.py index 100e385fd..28eeb8490 100644 --- a/tests/test_comparator.py +++ b/tests/test_comparator.py @@ -417,6 +417,312 @@ class Color4(IntFlag): assert not comparator(id1, id3) +def test_itertools_count() -> None: + import itertools + + # Equal: same start and step (default step=1) + assert comparator(itertools.count(0), itertools.count(0)) + assert comparator(itertools.count(5), itertools.count(5)) + assert comparator(itertools.count(0, 1), itertools.count(0, 1)) + assert comparator(itertools.count(10, 3), itertools.count(10, 3)) + + # Equal: negative start and step + assert comparator(itertools.count(-5, -2), itertools.count(-5, -2)) + + # Equal: float start and step + assert comparator(itertools.count(0.5, 0.1), itertools.count(0.5, 0.1)) + + # Not equal: different start + assert not comparator(itertools.count(0), itertools.count(1)) + assert not comparator(itertools.count(5), itertools.count(10)) + + # Not equal: different step + assert not comparator(itertools.count(0, 1), itertools.count(0, 2)) + assert not comparator(itertools.count(0, 1), itertools.count(0, -1)) + + # Not equal: different type + assert not comparator(itertools.count(0), 0) + assert not comparator(itertools.count(0), [0, 1, 2]) + + # Equal after partial consumption (both advanced to the same state) + a = itertools.count(0) + b = itertools.count(0) + next(a) + next(b) + assert comparator(a, b) + + # Not equal after different consumption + a = itertools.count(0) + b = itertools.count(0) + next(a) + assert not comparator(a, b) + + # Works inside containers + assert comparator([itertools.count(0)], [itertools.count(0)]) + assert comparator({"key": itertools.count(5, 2)}, {"key": itertools.count(5, 2)}) + assert not comparator([itertools.count(0)], [itertools.count(1)]) + + +def test_itertools_repeat() -> None: + import itertools + + # Equal: infinite repeat + assert comparator(itertools.repeat(5), itertools.repeat(5)) + assert comparator(itertools.repeat("hello"), itertools.repeat("hello")) + + # Equal: bounded repeat + assert comparator(itertools.repeat(5, 3), itertools.repeat(5, 3)) + assert comparator(itertools.repeat(None, 10), itertools.repeat(None, 10)) + + # Not equal: different value + assert not comparator(itertools.repeat(5), itertools.repeat(6)) + assert not comparator(itertools.repeat(5, 3), itertools.repeat(6, 3)) + + # Not equal: different count + assert not comparator(itertools.repeat(5, 3), itertools.repeat(5, 4)) + + # Not equal: bounded vs infinite + assert not comparator(itertools.repeat(5), itertools.repeat(5, 3)) + + # Not equal: different type + assert not comparator(itertools.repeat(5), 5) + assert not comparator(itertools.repeat(5), [5]) + + # Equal after partial consumption + a = itertools.repeat(5, 5) + b = itertools.repeat(5, 5) + next(a) + next(b) + assert comparator(a, b) + + # Not equal after different consumption + a = itertools.repeat(5, 5) + b = itertools.repeat(5, 5) + next(a) + assert not comparator(a, b) + + # Works inside containers + assert comparator([itertools.repeat(5, 3)], [itertools.repeat(5, 3)]) + assert not comparator([itertools.repeat(5, 3)], [itertools.repeat(5, 4)]) + + +def test_itertools_cycle() -> None: + import itertools + + # Equal: same sequence + assert comparator(itertools.cycle([1, 2, 3]), itertools.cycle([1, 2, 3])) + assert comparator(itertools.cycle("abc"), itertools.cycle("abc")) + + # Not equal: different sequence + assert not comparator(itertools.cycle([1, 2, 3]), itertools.cycle([1, 2, 4])) + assert not comparator(itertools.cycle([1, 2, 3]), itertools.cycle([1, 2])) + + # Not equal: different type + assert not comparator(itertools.cycle([1, 2, 3]), [1, 2, 3]) + + # Equal after same partial consumption + a = itertools.cycle([1, 2, 3]) + b = itertools.cycle([1, 2, 3]) + next(a) + next(b) + assert comparator(a, b) + + # Not equal after different consumption + a = itertools.cycle([1, 2, 3]) + b = itertools.cycle([1, 2, 3]) + next(a) + assert not comparator(a, b) + + # Equal after consuming a full cycle + a = itertools.cycle([1, 2, 3]) + b = itertools.cycle([1, 2, 3]) + for _ in range(3): + next(a) + next(b) + assert comparator(a, b) + + # Equal at same position across different full-cycle counts + a = itertools.cycle([1, 2, 3]) + b = itertools.cycle([1, 2, 3]) + for _ in range(4): + next(a) + for _ in range(7): + next(b) + # Both at position 1 within the cycle (4%3 == 7%3 == 1) + assert comparator(a, b) + + # Works inside containers + assert comparator([itertools.cycle([1, 2])], [itertools.cycle([1, 2])]) + assert not comparator([itertools.cycle([1, 2])], [itertools.cycle([1, 3])]) + + +def test_itertools_chain() -> None: + import itertools + + assert comparator(itertools.chain([1, 2], [3, 4]), itertools.chain([1, 2], [3, 4])) + assert not comparator(itertools.chain([1, 2], [3, 4]), itertools.chain([1, 2], [3, 5])) + assert comparator(itertools.chain.from_iterable([[1, 2], [3]]), itertools.chain.from_iterable([[1, 2], [3]])) + assert comparator(itertools.chain(), itertools.chain()) + assert not comparator(itertools.chain([1]), itertools.chain([1, 2])) + + +def test_itertools_islice() -> None: + import itertools + + assert comparator(itertools.islice(range(10), 5), itertools.islice(range(10), 5)) + assert not comparator(itertools.islice(range(10), 5), itertools.islice(range(10), 6)) + assert comparator(itertools.islice(range(10), 2, 5), itertools.islice(range(10), 2, 5)) + assert not comparator(itertools.islice(range(10), 2, 5), itertools.islice(range(10), 2, 6)) + + +def test_itertools_product() -> None: + import itertools + + assert comparator(itertools.product("AB", repeat=2), itertools.product("AB", repeat=2)) + assert not comparator(itertools.product("AB", repeat=2), itertools.product("AC", repeat=2)) + assert comparator(itertools.product([1, 2], [3, 4]), itertools.product([1, 2], [3, 4])) + assert not comparator(itertools.product([1, 2], [3, 4]), itertools.product([1, 2], [3, 5])) + + +def test_itertools_permutations_combinations() -> None: + import itertools + + assert comparator(itertools.permutations("ABC", 2), itertools.permutations("ABC", 2)) + assert not comparator(itertools.permutations("ABC", 2), itertools.permutations("ABD", 2)) + assert comparator(itertools.combinations("ABCD", 2), itertools.combinations("ABCD", 2)) + assert not comparator(itertools.combinations("ABCD", 2), itertools.combinations("ABCD", 3)) + assert comparator( + itertools.combinations_with_replacement("ABC", 2), + itertools.combinations_with_replacement("ABC", 2), + ) + assert not comparator( + itertools.combinations_with_replacement("ABC", 2), + itertools.combinations_with_replacement("ABD", 2), + ) + + +def test_itertools_accumulate() -> None: + import itertools + + assert comparator(itertools.accumulate([1, 2, 3, 4]), itertools.accumulate([1, 2, 3, 4])) + assert not comparator(itertools.accumulate([1, 2, 3, 4]), itertools.accumulate([1, 2, 3, 5])) + assert comparator(itertools.accumulate([1, 2, 3], initial=10), itertools.accumulate([1, 2, 3], initial=10)) + assert not comparator(itertools.accumulate([1, 2, 3], initial=10), itertools.accumulate([1, 2, 3], initial=0)) + + +def test_itertools_filtering() -> None: + import itertools + + # compress + assert comparator( + itertools.compress("ABCDEF", [1, 0, 1, 0, 1, 1]), + itertools.compress("ABCDEF", [1, 0, 1, 0, 1, 1]), + ) + assert not comparator( + itertools.compress("ABCDEF", [1, 0, 1, 0, 1, 1]), + itertools.compress("ABCDEF", [1, 1, 1, 0, 1, 1]), + ) + + # dropwhile + assert comparator( + itertools.dropwhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + itertools.dropwhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + ) + assert not comparator( + itertools.dropwhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + itertools.dropwhile(lambda x: x < 5, [1, 4, 7, 4, 1]), + ) + + # takewhile + assert comparator( + itertools.takewhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + itertools.takewhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + ) + assert not comparator( + itertools.takewhile(lambda x: x < 5, [1, 4, 6, 4, 1]), + itertools.takewhile(lambda x: x < 5, [1, 3, 6, 4, 1]), + ) + + # filterfalse + assert comparator( + itertools.filterfalse(lambda x: x % 2, range(10)), + itertools.filterfalse(lambda x: x % 2, range(10)), + ) + + +def test_itertools_starmap() -> None: + import itertools + + assert comparator( + itertools.starmap(pow, [(2, 3), (3, 2), (10, 0)]), + itertools.starmap(pow, [(2, 3), (3, 2), (10, 0)]), + ) + assert not comparator( + itertools.starmap(pow, [(2, 3), (3, 2)]), + itertools.starmap(pow, [(2, 3), (3, 3)]), + ) + + +def test_itertools_zip_longest() -> None: + import itertools + + assert comparator( + itertools.zip_longest("AB", "xyz", fillvalue="-"), + itertools.zip_longest("AB", "xyz", fillvalue="-"), + ) + assert not comparator( + itertools.zip_longest("AB", "xyz", fillvalue="-"), + itertools.zip_longest("AB", "xyz", fillvalue="*"), + ) + + +def test_itertools_groupby() -> None: + import itertools + + assert comparator(itertools.groupby("AAABBBCC"), itertools.groupby("AAABBBCC")) + assert not comparator(itertools.groupby("AAABBBCC"), itertools.groupby("AAABBCC")) + assert comparator(itertools.groupby([]), itertools.groupby([])) + + # With key function + assert comparator( + itertools.groupby([1, 1, 2, 2, 3], key=lambda x: x), + itertools.groupby([1, 1, 2, 2, 3], key=lambda x: x), + ) + + +@pytest.mark.skipif(sys.version_info < (3, 10), reason="itertools.pairwise requires Python 3.10+") +def test_itertools_pairwise() -> None: + import itertools + + assert comparator(itertools.pairwise([1, 2, 3, 4]), itertools.pairwise([1, 2, 3, 4])) + assert not comparator(itertools.pairwise([1, 2, 3, 4]), itertools.pairwise([1, 2, 3, 5])) + + +@pytest.mark.skipif(sys.version_info < (3, 12), reason="itertools.batched requires Python 3.12+") +def test_itertools_batched() -> None: + import itertools + + assert comparator(itertools.batched("ABCDEFG", 3), itertools.batched("ABCDEFG", 3)) + assert not comparator(itertools.batched("ABCDEFG", 3), itertools.batched("ABCDEFG", 2)) + + +def test_itertools_in_containers() -> None: + import itertools + + # Itertools objects nested in dicts/lists + assert comparator( + {"a": itertools.chain([1], [2]), "b": itertools.islice(range(5), 3)}, + {"a": itertools.chain([1], [2]), "b": itertools.islice(range(5), 3)}, + ) + assert not comparator( + [itertools.product("AB", repeat=2)], + [itertools.product("AC", repeat=2)], + ) + + # Different itertools types should not match + assert not comparator(itertools.chain([1, 2]), itertools.islice([1, 2], 2)) + + def test_numpy(): try: import numpy as np @@ -5216,3 +5522,67 @@ def test_python_tempfile_pattern_regex(self): assert PYTHON_TEMPFILE_PATTERN.search("/tmp/tmp123456/") assert not PYTHON_TEMPFILE_PATTERN.search("/tmp/mydir/file.txt") assert not PYTHON_TEMPFILE_PATTERN.search("/home/tmp123/file.txt") + + +@pytest.mark.skipif(sys.version_info < (3, 10), reason="types.UnionType requires Python 3.10+") +class TestUnionType: + def test_union_type_equal(self): + assert comparator(int | str, int | str) + + def test_union_type_not_equal(self): + assert not comparator(int | str, int | float) + + def test_union_type_order_independent(self): + assert comparator(int | str, str | int) + + def test_union_type_multiple_args(self): + assert comparator(int | str | float, int | str | float) + + def test_union_type_in_list(self): + assert comparator([int | str, 1], [int | str, 1]) + + def test_union_type_in_dict(self): + assert comparator({"key": int | str}, {"key": int | str}) + + def test_union_type_vs_none(self): + assert not comparator(int | str, None) + + +class SlotsOnly: + __slots__ = ("x", "y") + + def __init__(self, x, y): + self.x = x + self.y = y + + +class SlotsInherited(SlotsOnly): + __slots__ = ("z",) + + def __init__(self, x, y, z): + super().__init__(x, y) + self.z = z + + +class TestSlotsObjects: + def test_slots_equal(self): + assert comparator(SlotsOnly(1, 2), SlotsOnly(1, 2)) + + def test_slots_not_equal(self): + assert not comparator(SlotsOnly(1, 2), SlotsOnly(1, 3)) + + def test_slots_inherited_equal(self): + assert comparator(SlotsInherited(1, 2, 3), SlotsInherited(1, 2, 3)) + + def test_slots_inherited_not_equal(self): + assert not comparator(SlotsInherited(1, 2, 3), SlotsInherited(1, 2, 4)) + + def test_slots_nested(self): + a = SlotsOnly(SlotsOnly(1, 2), [3, 4]) + b = SlotsOnly(SlotsOnly(1, 2), [3, 4]) + assert comparator(a, b) + + def test_slots_nested_not_equal(self): + a = SlotsOnly(SlotsOnly(1, 2), [3, 4]) + b = SlotsOnly(SlotsOnly(1, 9), [3, 4]) + assert not comparator(a, b) diff --git a/tests/test_existing_tests_source_for.py b/tests/test_existing_tests_source_for.py index 2afa30eb8..69b96e0a9 100644 --- a/tests/test_existing_tests_source_for.py +++ b/tests/test_existing_tests_source_for.py @@ -294,6 +294,7 @@ class MockTestConfig: """Mocks codeflash.verification.verification_utils.TestConfig""" tests_root: Path + tests_project_rootdir: Path = Path(".") @contextlib.contextmanager diff --git a/tests/test_function_dependencies.py b/tests/test_function_dependencies.py index 988f60b7b..ad39262a7 100644 --- a/tests/test_function_dependencies.py +++ b/tests/test_function_dependencies.py @@ -5,7 +5,7 @@ from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.either import is_successful from codeflash.models.models import FunctionParent -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.verification.verification_utils import TestConfig @@ -132,7 +132,7 @@ def test_class_method_dependencies() -> None: starting_line=None, ending_line=None, ) - func_optimizer = FunctionOptimizer( + func_optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=TestConfig( tests_root=file_path, @@ -163,6 +163,7 @@ def test_class_method_dependencies() -> None: == """# file: test_function_dependencies.py from collections import defaultdict + class Graph: def __init__(self, vertices): self.graph = defaultdict(list) @@ -201,7 +202,7 @@ def test_recursive_function_context() -> None: starting_line=None, ending_line=None, ) - func_optimizer = FunctionOptimizer( + func_optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=TestConfig( tests_root=file_path, diff --git a/tests/test_function_discovery.py b/tests/test_function_discovery.py index 3232d8be2..db2eb54a7 100644 --- a/tests/test_function_discovery.py +++ b/tests/test_function_discovery.py @@ -680,8 +680,14 @@ def test_in_dunder_tests(): # Combine all discovered functions all_functions = {} - for discovered in [discovered_source, discovered_test, discovered_test_underscore, - discovered_spec, discovered_tests_dir, discovered_dunder_tests]: + for discovered in [ + discovered_source, + discovered_test, + discovered_test_underscore, + discovered_spec, + discovered_tests_dir, + discovered_dunder_tests, + ]: all_functions.update(discovered) # Test Case 1: tests_root == module_root (overlapping case) @@ -781,9 +787,7 @@ def test_filter_functions_strict_string_matching(): # Strict check: exactly these 3 files should remain (those with 'test' as substring only) expected_files = {contest_file, latest_file, attestation_file} - assert set(filtered.keys()) == expected_files, ( - f"Expected files {expected_files}, got {set(filtered.keys())}" - ) + assert set(filtered.keys()) == expected_files, f"Expected files {expected_files}, got {set(filtered.keys())}" # Strict check: each file should have exactly 1 function with the expected name assert [fn.function_name for fn in filtered[contest_file]] == ["run_contest"], ( @@ -871,9 +875,7 @@ def test_filter_functions_test_directory_patterns(): # Strict check: exactly these 2 files should remain (those in non-test directories) expected_files = {contest_file, latest_file} - assert set(filtered.keys()) == expected_files, ( - f"Expected files {expected_files}, got {set(filtered.keys())}" - ) + assert set(filtered.keys()) == expected_files, f"Expected files {expected_files}, got {set(filtered.keys())}" # Strict check: each file should have exactly 1 function with the expected name assert [fn.function_name for fn in filtered[contest_file]] == ["get_scores"], ( @@ -936,9 +938,7 @@ def test_filter_functions_non_overlapping_tests_root(): # Strict check: exactly these 2 files should remain (both in src/, not in tests/) expected_files = {source_file, test_in_src} - assert set(filtered.keys()) == expected_files, ( - f"Expected files {expected_files}, got {set(filtered.keys())}" - ) + assert set(filtered.keys()) == expected_files, f"Expected files {expected_files}, got {set(filtered.keys())}" # Strict check: each file should have exactly 1 function with the expected name assert [fn.function_name for fn in filtered[source_file]] == ["process"], ( @@ -1047,20 +1047,15 @@ def test_deep_copy(): ) root_functions = [fn.function_name for fn in filtered.get(root_source_file, [])] - assert root_functions == ["main"], ( - f"Expected ['main'], got {root_functions}" - ) + assert root_functions == ["main"], f"Expected ['main'], got {root_functions}" # Strict check: exactly 3 functions (2 from utils.py + 1 from main.py) assert count == 3, ( - f"Expected exactly 3 functions, got {count}. " - f"Some source files may have been incorrectly filtered." + f"Expected exactly 3 functions, got {count}. Some source files may have been incorrectly filtered." ) # Verify test file was properly filtered (should not be in results) - assert test_file not in filtered, ( - f"Test file {test_file} should have been filtered but wasn't" - ) + assert test_file not in filtered, f"Test file {test_file} should have been filtered but wasn't" def test_filter_functions_typescript_project_in_tests_folder(): @@ -1214,9 +1209,7 @@ def sample_data(): # source_file and file_in_test_dir should remain # test_prefix_file, conftest_file, and test_in_subdir should be filtered expected_files = {source_file, file_in_test_dir} - assert set(filtered.keys()) == expected_files, ( - f"Expected {expected_files}, got {set(filtered.keys())}" - ) + assert set(filtered.keys()) == expected_files, f"Expected {expected_files}, got {set(filtered.keys())}" assert count == 2, f"Expected exactly 2 functions, got {count}" @@ -1266,7 +1259,8 @@ def helper_method(self): """) support = PythonSupport() - functions = support.discover_functions(fixture_file) + source = fixture_file.read_text(encoding="utf-8") + functions = support.discover_functions(source, fixture_file) function_names = [fn.function_name for fn in functions] assert "regular_function" in function_names diff --git a/tests/test_get_helper_code.py b/tests/test_get_helper_code.py index 1772f25fd..875263a1a 100644 --- a/tests/test_get_helper_code.py +++ b/tests/test_get_helper_code.py @@ -7,7 +7,7 @@ from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.either import is_successful from codeflash.models.models import FunctionParent, get_code_block_splitter -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.optimization.optimizer import Optimizer from codeflash.verification.verification_utils import TestConfig @@ -233,7 +233,7 @@ def __call__(self, *args: _P.args, **kwargs: _P.kwargs) -> _R: test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config) with open(file_path) as f: original_code = f.read() ctx_result = func_optimizer.get_code_optimization_context() @@ -404,7 +404,7 @@ def test_bubble_sort_deps() -> None: test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=function_to_optimize, test_cfg=test_config) with open(file_path) as f: original_code = f.read() ctx_result = func_optimizer.get_code_optimization_context() @@ -427,6 +427,7 @@ def dep2_swap(arr, j): from code_to_optimize.bubble_sort_dep1_helper import dep1_comparer from code_to_optimize.bubble_sort_dep2_swap import dep2_swap + def sorter_deps(arr): for i in range(len(arr)): for j in range(len(arr) - 1): diff --git a/tests/test_get_read_only_code.py b/tests/test_get_read_only_code.py index c6de2cc27..73db3d5cb 100644 --- a/tests/test_get_read_only_code.py +++ b/tests/test_get_read_only_code.py @@ -23,7 +23,7 @@ class TestClass: class_var = "value" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -47,7 +47,7 @@ def __str__(self): return f"Value: {self.x}" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -75,7 +75,7 @@ def __str__(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set(), remove_docstrings=True - ) + ).code assert dedent(expected).strip() == output.strip() @@ -102,7 +102,7 @@ def __str__(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set(), remove_docstrings=True - ) + ).code assert dedent(expected).strip() == output.strip() @@ -131,7 +131,7 @@ def __str__(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set(), remove_docstrings=True - ) + ).code assert dedent(expected).strip() == output.strip() @@ -171,7 +171,7 @@ class TestClass: \"\"\"Class docstring.\"\"\" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -190,7 +190,7 @@ def class_method(cls, param: int = 42) -> None: expected = """""" - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -212,7 +212,7 @@ def __init__(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.READ_ONLY, {"TestClass.target1", "TestClass.target2"}, set() - ) + ).code assert dedent(expected).strip() == output.strip() @@ -232,7 +232,7 @@ class TestClass: var2: str """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -254,7 +254,7 @@ class TestClass: var2: str """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -280,7 +280,7 @@ class TestClass: continue """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -316,7 +316,7 @@ class TestClass: var2: str """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -331,7 +331,7 @@ def some_function(): expected = """""" - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()).code assert dedent(expected).strip() == output.strip() @@ -350,7 +350,7 @@ def some_function(): x = 5 """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()).code assert dedent(expected).strip() == output.strip() @@ -377,7 +377,7 @@ def some_function(): z = 10 """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"target_function"}, set()).code assert dedent(expected).strip() == output.strip() @@ -412,7 +412,7 @@ class PlatformClass: platform = "other" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"PlatformClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"PlatformClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -471,7 +471,7 @@ class TestClass: error_type = "cleanup" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -524,7 +524,7 @@ class TestClass: context = "cleanup" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -573,7 +573,7 @@ class TestClass: status = "cancelled" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_ONLY, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -675,7 +675,7 @@ def __str__(self) -> str: output = parse_code_and_prune_cst( dedent(code), CodeContextType.READ_ONLY, {"DataProcessor.target_method", "ResultHandler.target_method"}, set() - ) + ).code assert dedent(expected).strip() == output.strip() @@ -768,5 +768,5 @@ def __str__(self) -> str: {"DataProcessor.target_method", "ResultHandler.target_method"}, set(), remove_docstrings=True, - ) + ).code assert dedent(expected).strip() == output.strip() diff --git a/tests/test_get_read_writable_code.py b/tests/test_get_read_writable_code.py index c6bbdd04b..c4fb7d7aa 100644 --- a/tests/test_get_read_writable_code.py +++ b/tests/test_get_read_writable_code.py @@ -13,7 +13,7 @@ def target_function(): y = 2 return x + y """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}).code expected = dedent(""" def target_function(): @@ -32,7 +32,7 @@ def target_function(self): y = 2 return x + y """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_function"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_function"}).code expected = dedent(""" class MyClass: @@ -56,7 +56,7 @@ def target_method(self): def other_method(self): print("this should be excluded") """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}).code expected = dedent(""" class MyClass: @@ -80,7 +80,7 @@ class Inner: def not_findable(self): return 42 """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"Outer.target_method"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"Outer.target_method"}).code expected = dedent(""" class Outer: @@ -100,7 +100,7 @@ def method1(self): def target_function(): return 42 """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}).code expected = dedent(""" def target_function(): @@ -123,7 +123,7 @@ class ClassC: def process(self): return "C" """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"ClassA.process", "ClassC.process"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"ClassA.process", "ClassC.process"}).code expected = dedent(""" class ClassA: @@ -148,7 +148,7 @@ class ErrorClass: def handle_error(self): print("error") """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"TargetClass.target_method"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"TargetClass.target_method"}).code expected = dedent(""" try: @@ -175,7 +175,7 @@ def other_method(self): def target_method(self): return f"Value: {self.x}" """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}).code expected = dedent(""" class MyClass: @@ -200,7 +200,7 @@ def other_method(self): def target_method(self): return f"Value: {self.x}" """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.target_method"}).code expected = dedent(""" class MyClass: @@ -221,7 +221,7 @@ class Inner: def target(self): pass """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.Inner.target"}) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"MyClass.Inner.target"}).code expected = dedent(""" class MyClass: def method(self): @@ -266,5 +266,55 @@ def target_function(self) -> None: var2 = "test" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"target_function"}).code assert dedent(expected).strip() == output.strip() + + +def test_comment_between_imports_and_variable_preserves_position() -> None: + code = """ + from __future__ import annotations + + import re + from dataclasses import dataclass, field + + # NOTE: This comment documents the constant below. + # It should stay right above SOME_RE, not jump to the top of the file. + SOME_RE = re.compile(r"^pattern", re.MULTILINE) + + + @dataclass(slots=True) + class Item: + name: str + value: int + children: list[Item] = field(default_factory=list) + + + def parse(text: str) -> list[Item]: + root = Item(name="root", value=0) + for m in SOME_RE.finditer(text): + root.children.append(Item(name=m.group(), value=1)) + return root.children + """ + + expected = """ + # NOTE: This comment documents the constant below. + # It should stay right above SOME_RE, not jump to the top of the file. + SOME_RE = re.compile(r"^pattern", re.MULTILINE) + + + @dataclass(slots=True) + class Item: + name: str + value: int + children: list[Item] = field(default_factory=list) + + + def parse(text: str) -> list[Item]: + root = Item(name="root", value=0) + for m in SOME_RE.finditer(text): + root.children.append(Item(name=m.group(), value=1)) + return root.children + """ + + result = parse_code_and_prune_cst(dedent(code), CodeContextType.READ_WRITABLE, {"parse"}).code + assert result.strip() == dedent(expected).strip() diff --git a/tests/test_get_testgen_code.py b/tests/test_get_testgen_code.py index 01c3ae153..42af2d742 100644 --- a/tests/test_get_testgen_code.py +++ b/tests/test_get_testgen_code.py @@ -13,7 +13,7 @@ def target_function(): y = 2 return x + y """ - result = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()) + result = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()).code expected = """ def target_function(): @@ -44,7 +44,7 @@ def target_method(self): print("This should be included") """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -73,7 +73,7 @@ def target_method(self): print("include me") """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -107,7 +107,7 @@ def target_method(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set(), remove_docstrings=True - ) + ).code assert dedent(expected).strip() == output.strip() @@ -139,7 +139,7 @@ def target_method(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set(), remove_docstrings=True - ) + ).code assert dedent(expected).strip() == output.strip() @@ -181,7 +181,7 @@ def target_method(self) -> str: return "value" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -215,7 +215,7 @@ def __init__(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.TESTGEN, {"TestClass.target1", "TestClass.target2"}, set() - ) + ).code assert dedent(expected).strip() == output.strip() @@ -238,7 +238,7 @@ def target_method(self) -> None: self.var2 = "test" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -263,7 +263,7 @@ def target_method(self) -> None: self.var2 = "test" """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -304,7 +304,7 @@ def target_method(self): print("other") """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"PlatformClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"PlatformClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -333,7 +333,7 @@ def handle_error(self): print("error") """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TargetClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TargetClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -355,7 +355,7 @@ def target_function(self) -> None: x = 5 """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()).code assert dedent(expected).strip() == output.strip() @@ -385,7 +385,7 @@ def target_function(self) -> None: z = 10 """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"target_function"}, set()).code assert dedent(expected).strip() == output.strip() @@ -416,7 +416,7 @@ def process(self): output = parse_code_and_prune_cst( dedent(code), CodeContextType.TESTGEN, {"ClassA.process", "ClassC.process"}, set() - ) + ).code assert dedent(expected).strip() == output.strip() @@ -477,7 +477,7 @@ def target_method(self): print("cleanup") """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -532,7 +532,7 @@ async def target_method(self): await self.cleanup() """ - output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()) + output = parse_code_and_prune_cst(dedent(code), CodeContextType.TESTGEN, {"TestClass.target_method"}, set()).code assert dedent(expected).strip() == output.strip() @@ -659,7 +659,7 @@ def target_method(self, key: str) -> None: output = parse_code_and_prune_cst( dedent(code), CodeContextType.TESTGEN, {"DataProcessor.target_method", "ResultHandler.target_method"}, set() - ) + ).code assert dedent(expected).strip() == output.strip() @@ -765,5 +765,5 @@ def target_method(self, key: str) -> None: {"DataProcessor.target_method", "ResultHandler.target_method"}, set(), remove_docstrings=True, - ) + ).code assert dedent(expected).strip() == output.strip() diff --git a/tests/test_init_javascript.py b/tests/test_init_javascript.py index 87509cbad..59c38c547 100644 --- a/tests/test_init_javascript.py +++ b/tests/test_init_javascript.py @@ -1,6 +1,8 @@ """Tests for JavaScript/TypeScript project initialization and package manager detection.""" +import json from pathlib import Path +from unittest.mock import patch import pytest @@ -8,6 +10,7 @@ JsPackageManager, determine_js_package_manager, get_package_install_command, + should_modify_package_json_config, ) @@ -281,3 +284,67 @@ def test_different_package_name(self, tmp_project: Path) -> None: result = get_package_install_command(tmp_project, "typescript", dev=True) assert result == ["pnpm", "add", "typescript", "--save-dev"] + + +class TestShouldModifySkipConfirm: + """Tests for should_modify_package_json_config with skip_confirm.""" + + def test_should_modify_skip_confirm_no_config(self, tmp_project: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """With skip_confirm and no codeflash config, should return (True, None).""" + monkeypatch.chdir(tmp_project) + (tmp_project / "package.json").write_text(json.dumps({"name": "test"})) + + should_modify, config = should_modify_package_json_config(skip_confirm=True) + + assert should_modify is True + assert config is None + + def test_should_modify_skip_confirm_with_valid_config( + self, tmp_project: Path, monkeypatch: pytest.MonkeyPatch + ) -> None: + """With skip_confirm and valid config, should return (False, config) — no reconfigure.""" + monkeypatch.chdir(tmp_project) + codeflash_config = {"moduleRoot": "."} + (tmp_project / "package.json").write_text( + json.dumps({"name": "test", "codeflash": codeflash_config}) + ) + + should_modify, config = should_modify_package_json_config(skip_confirm=True) + + assert should_modify is False + assert config == codeflash_config + + def test_should_modify_skip_confirm_with_invalid_config( + self, tmp_project: Path, monkeypatch: pytest.MonkeyPatch + ) -> None: + """With skip_confirm and invalid config (bad moduleRoot), should return (True, None).""" + monkeypatch.chdir(tmp_project) + codeflash_config = {"moduleRoot": "/nonexistent/path/that/does/not/exist"} + (tmp_project / "package.json").write_text( + json.dumps({"name": "test", "codeflash": codeflash_config}) + ) + + should_modify, config = should_modify_package_json_config(skip_confirm=True) + + assert should_modify is True + assert config is None + + +class TestCollectJsSetupInfoSkipConfirm: + """Tests for collect_js_setup_info with skip_confirm.""" + + def test_collect_js_setup_info_skip_confirm(self, tmp_project: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """skip_confirm should return defaults without any interactive prompts.""" + monkeypatch.chdir(tmp_project) + (tmp_project / "package.json").write_text(json.dumps({"name": "test"})) + + from codeflash.cli_cmds.init_javascript import ProjectLanguage, collect_js_setup_info + + # Should not call any prompt functions + with patch("codeflash.cli_cmds.init_javascript.inquirer") as mock_inquirer: + setup_info = collect_js_setup_info(ProjectLanguage.JAVASCRIPT, skip_confirm=True) + mock_inquirer.prompt.assert_not_called() + + assert setup_info.module_root_override is None + assert setup_info.formatter_override is None + assert setup_info.git_remote == "origin" diff --git a/tests/test_instrument_all_and_run.py b/tests/test_instrument_all_and_run.py index 1dee9479c..a00f74e14 100644 --- a/tests/test_instrument_all_and_run.py +++ b/tests/test_instrument_all_and_run.py @@ -165,8 +165,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -210,8 +210,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) out_str = """codeflash stdout: Sorting list @@ -342,8 +342,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(test_results) == 4 @@ -388,8 +388,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -452,8 +452,8 @@ def sorter(self, arr): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(new_test_results) == 4 @@ -612,8 +612,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(test_results) == 2 @@ -655,8 +655,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -783,8 +783,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert len(test_results) == 2 @@ -826,8 +826,8 @@ def test_sort(): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) diff --git a/tests/test_instrument_line_profiler.py b/tests/test_instrument_line_profiler.py index 9b1716481..e34d8a722 100644 --- a/tests/test_instrument_line_profiler.py +++ b/tests/test_instrument_line_profiler.py @@ -5,7 +5,7 @@ from codeflash.languages.python.static_analysis.line_profile_utils import add_decorator_imports, contains_jit_decorator from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.models.models import CodeOptimizationContext -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.verification.verification_utils import TestConfig @@ -22,7 +22,7 @@ def test_add_decorator_imports_helper_in_class(): pytest_cmd="pytest", ) func = FunctionToOptimize(function_name="sort_classmethod", parents=[], file_path=code_path) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) os.chdir(run_cwd) # func_optimizer = pass try: @@ -94,7 +94,7 @@ def test_add_decorator_imports_helper_in_nested_class(): pytest_cmd="pytest", ) func = FunctionToOptimize(function_name="sort_classmethod", parents=[], file_path=code_path) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) os.chdir(run_cwd) # func_optimizer = pass try: @@ -143,7 +143,7 @@ def test_add_decorator_imports_nodeps(): pytest_cmd="pytest", ) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_path) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) os.chdir(run_cwd) # func_optimizer = pass try: @@ -194,7 +194,7 @@ def test_add_decorator_imports_helper_outside(): pytest_cmd="pytest", ) func = FunctionToOptimize(function_name="sorter_deps", parents=[], file_path=code_path) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) os.chdir(run_cwd) # func_optimizer = pass try: @@ -271,7 +271,7 @@ def __init__(self, arr): pytest_cmd="pytest", ) func = FunctionToOptimize(function_name="sorter", parents=[], file_path=code_write_path) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) os.chdir(run_cwd) # func_optimizer = pass try: diff --git a/tests/test_instrument_tests.py b/tests/test_instrument_tests.py index f172b5159..d5670d55f 100644 --- a/tests/test_instrument_tests.py +++ b/tests/test_instrument_tests.py @@ -15,8 +15,9 @@ FunctionImportedAsVisitor, inject_profiling_into_existing_test, ) -from codeflash.languages.python.static_analysis.line_profile_utils import add_decorator_imports from codeflash.discovery.functions_to_optimize import FunctionToOptimize +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer +from codeflash.languages.python.static_analysis.line_profile_utils import add_decorator_imports from codeflash.models.models import ( CodeOptimizationContext, CodePosition, @@ -27,7 +28,6 @@ TestsInFile, TestType, ) -from codeflash.optimization.function_optimizer import FunctionOptimizer from codeflash.verification.verification_utils import TestConfig codeflash_wrap_string = """def codeflash_wrap(codeflash_wrapped, codeflash_test_module_name, codeflash_test_class_name, codeflash_test_name, codeflash_function_name, codeflash_line_id, codeflash_loop_index, codeflash_cur, codeflash_con, *args, **kwargs): @@ -434,7 +434,7 @@ def test_sort(): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_env = os.environ.copy() test_env["CODEFLASH_TEST_ITERATION"] = "0" test_env["CODEFLASH_LOOP_INDEX"] = "1" @@ -454,8 +454,8 @@ def test_sort(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -489,8 +489,8 @@ def test_sort(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results_perf[0].id.function_getting_tested == "sorter" @@ -541,8 +541,8 @@ def test_sort(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, line_profiler_output_file=line_profiler_output_file, ) @@ -695,14 +695,14 @@ def test_sort_parametrized(input, expected_output): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -755,8 +755,8 @@ def test_sort_parametrized(input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results_perf[0].id.function_getting_tested == "sorter" @@ -812,8 +812,8 @@ def test_sort_parametrized(input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, line_profiler_output_file=line_profiler_output_file, ) @@ -984,14 +984,14 @@ def test_sort_parametrized_loop(input, expected_output): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -1081,8 +1081,8 @@ def test_sort_parametrized_loop(input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -1171,8 +1171,8 @@ def test_sort_parametrized_loop(input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, line_profiler_output_file=line_profiler_output_file, ) @@ -1341,14 +1341,14 @@ def test_sort(): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -1389,8 +1389,8 @@ def test_sort(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -1453,8 +1453,8 @@ def test_sort(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, line_profiler_output_file=line_profiler_output_file, ) @@ -1723,14 +1723,14 @@ def test_sort(self): test_framework="unittest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -1779,8 +1779,8 @@ def test_sort(self): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -1973,14 +1973,14 @@ def test_sort(self, input, expected_output): test_framework="unittest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -2034,8 +2034,8 @@ def test_sort(self, input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -2229,14 +2229,14 @@ def test_sort(self): test_framework="unittest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( test_env=test_env, testing_type=TestingMode.BEHAVIOR, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -2290,8 +2290,8 @@ def test_sort(self): testing_type=TestingMode.PERFORMANCE, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -2481,14 +2481,14 @@ def test_sort(self, input, expected_output): test_framework="unittest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=f, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=f, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.BEHAVIOR, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -2574,8 +2574,8 @@ def test_sort(self, input, expected_output): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -3144,7 +3144,7 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time): test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_files = TestFiles( test_files=[ TestFile( @@ -3160,8 +3160,8 @@ def test_sleepfunc_sequence_short(n, expected_total_sleep_time): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=2, - max_outer_loops=2, + pytest_min_loops=2, + pytest_max_loops=2, testing_time=0.1, ) @@ -3279,14 +3279,14 @@ def test_sleepfunc_sequence_short(self, n, expected_total_sleep_time): test_framework="unittest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) test_results, coverage_data = func_optimizer.run_and_parse_tests( testing_type=TestingMode.PERFORMANCE, test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) diff --git a/tests/test_instrumentation_run_results_aiservice.py b/tests/test_instrumentation_run_results_aiservice.py index 0c3cb37aa..4879cc93a 100644 --- a/tests/test_instrumentation_run_results_aiservice.py +++ b/tests/test_instrumentation_run_results_aiservice.py @@ -177,8 +177,8 @@ def test_single_element_list(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results[0].id.function_getting_tested == "sorter" @@ -217,8 +217,8 @@ def sorter(self, arr): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # assert test_results_mutated_attr[0].return_value[1]["self"].x == 1 TODO: add self as input to function @@ -318,8 +318,8 @@ def test_single_element_list(): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # Verify instance_state result, which checks instance state right after __init__, using codeflash_capture @@ -395,8 +395,8 @@ def sorter(self, arr): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) # assert test_results_mutated_attr[0].return_value[0]["self"].x == 1 TODO: add self as input @@ -449,8 +449,8 @@ def sorter(self, arr): test_env=test_env, test_files=test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) assert test_results_new_attr[0].id.function_getting_tested == "BubbleSorter.__init__" diff --git a/tests/test_java_tests_project_rootdir.py b/tests/test_java_tests_project_rootdir.py index 9aa2f3163..8274964a0 100644 --- a/tests/test_java_tests_project_rootdir.py +++ b/tests/test_java_tests_project_rootdir.py @@ -5,7 +5,7 @@ from codeflash.discovery.discover_unit_tests import discover_unit_tests from codeflash.languages.base import Language -from codeflash.languages.current import set_current_language +from codeflash.languages.current import reset_current_language, set_current_language from codeflash.verification.verification_utils import TestConfig @@ -32,15 +32,18 @@ def test_java_tests_project_rootdir_set_to_tests_root(tmp_path): mock_java_function.language = "java" file_to_funcs = {Path("dummy.java"): [mock_java_function]} - # Mock is_python() to return False and is_java() to return True - # These are imported from codeflash.languages - with patch("codeflash.languages.is_python", return_value=False), \ - patch("codeflash.languages.is_java", return_value=True), \ - patch("codeflash.discovery.discover_unit_tests.discover_tests_for_language") as mock_discover: - mock_discover.return_value = ({}, 0, 0) - - # Call discover_unit_tests - discover_unit_tests(test_cfg, file_to_funcs_to_optimize=file_to_funcs) + # Set current language to Java so is_python() returns False and + # current_language_support() returns JavaSupport with its + # adjust_test_config_for_discovery implementation + set_current_language(Language.JAVA) + try: + with patch("codeflash.discovery.discover_unit_tests.discover_tests_for_language") as mock_discover: + mock_discover.return_value = ({}, 0, 0) + + # Call discover_unit_tests + discover_unit_tests(test_cfg, file_to_funcs_to_optimize=file_to_funcs) + finally: + reset_current_language() # Verify that tests_project_rootdir was updated to tests_root assert test_cfg.tests_project_rootdir == tests_root, ( diff --git a/tests/test_languages/test_code_context_extraction.py b/tests/test_languages/test_code_context_extraction.py index b7b12a69c..5c411b037 100644 --- a/tests/test_languages/test_code_context_extraction.py +++ b/tests/test_languages/test_code_context_extraction.py @@ -20,12 +20,15 @@ from __future__ import annotations +from unittest.mock import MagicMock + import pytest from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.base import Language from codeflash.languages.javascript.support import JavaScriptSupport, TypeScriptSupport -from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context_for_language +from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer +from codeflash.verification.verification_utils import TestConfig @pytest.fixture @@ -61,7 +64,8 @@ def test_simple_function_no_dependencies(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) assert len(functions) == 1 func = functions[0] @@ -87,7 +91,8 @@ def test_arrow_function_with_implicit_return(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) assert len(functions) == 1 func = functions[0] assert func.function_name == "multiply" @@ -121,7 +126,8 @@ def test_function_with_simple_jsdoc(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -173,7 +179,8 @@ def test_function_with_complex_jsdoc_types(self, js_support, temp_project): file_path = temp_project / "processor.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -243,7 +250,8 @@ def test_class_with_jsdoc_on_class_and_methods(self, js_support, temp_project): file_path = temp_project / "cache.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) get_or_compute = next(f for f in functions if f.function_name == "getOrCompute") context = js_support.extract_code_context(get_or_compute, temp_project, temp_project) @@ -339,7 +347,8 @@ def test_jsdoc_with_typedef_and_callback(self, js_support, temp_project): file_path = temp_project / "validator.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = next(f for f in functions if f.function_name == "validateUserData") context = js_support.extract_code_context(func, temp_project, temp_project) @@ -429,7 +438,8 @@ def test_function_with_multiple_complex_constants(self, js_support, temp_project file_path = temp_project / "api.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = next(f for f in functions if f.function_name == "fetchWithRetry") context = js_support.extract_code_context(func, temp_project, temp_project) @@ -515,7 +525,8 @@ def test_function_with_regex_and_template_constants(self, js_support, temp_proje file_path = temp_project / "validation.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -578,7 +589,8 @@ def test_function_with_chain_of_helpers(self, js_support, temp_project): file_path = temp_project / "processor.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) process_func = next(f for f in functions if f.function_name == "processUserInput") context = js_support.extract_code_context(process_func, temp_project, temp_project) @@ -633,7 +645,8 @@ def test_function_with_multiple_unrelated_helpers(self, js_support, temp_project file_path = temp_project / "report.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) report_func = next(f for f in functions if f.function_name == "generateReport") context = js_support.extract_code_context(report_func, temp_project, temp_project) @@ -731,7 +744,8 @@ def test_graph_topological_sort(self, js_support, temp_project): file_path = temp_project / "graph.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) topo_sort = next(f for f in functions if f.function_name == "topologicalSort") context = js_support.extract_code_context(topo_sort, temp_project, temp_project) @@ -819,7 +833,8 @@ def test_class_method_using_nested_helper_class(self, js_support, temp_project): file_path = temp_project / "classes.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) main_method = next(f for f in functions if f.function_name == "mainMethod" and f.class_name == "MainClass") context = js_support.extract_code_context(main_method, temp_project, temp_project) @@ -875,7 +890,8 @@ def test_helper_from_another_file_commonjs(self, js_support, temp_project): main_path = temp_project / "bubble_sort_imported.js" main_path.write_text(main_code, encoding="utf-8") - functions = js_support.discover_functions(main_path) + source = main_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, main_path) main_func = next(f for f in functions if f.function_name == "sortFromAnotherFile") context = js_support.extract_code_context(main_func, temp_project, temp_project) @@ -926,7 +942,8 @@ def test_helper_from_another_file_esm(self, js_support, temp_project): main_path = temp_project / "main.js" main_path.write_text(main_code, encoding="utf-8") - functions = js_support.discover_functions(main_path) + source = main_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, main_path) process_func = next(f for f in functions if f.function_name == "processNumber") context = js_support.extract_code_context(process_func, temp_project, temp_project) @@ -992,7 +1009,8 @@ def test_chained_imports_across_three_files(self, js_support, temp_project): main_path = temp_project / "main.js" main_path.write_text(main_code, encoding="utf-8") - functions = js_support.discover_functions(main_path) + source = main_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, main_path) handle_func = next(f for f in functions if f.function_name == "handleUserInput") context = js_support.extract_code_context(handle_func, temp_project, temp_project) @@ -1043,7 +1061,8 @@ def test_function_with_complex_generic_types(self, ts_support, temp_project): file_path = temp_project / "entity.ts" file_path.write_text(code, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) func = functions[0] context = ts_support.extract_code_context(func, temp_project, temp_project) @@ -1133,7 +1152,8 @@ def test_class_with_private_fields_and_typed_methods(self, ts_support, temp_proj file_path = temp_project / "cache.ts" file_path.write_text(code, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) get_method = next(f for f in functions if f.function_name == "get") context = ts_support.extract_code_context(get_method, temp_project, temp_project) @@ -1217,7 +1237,8 @@ def test_typescript_with_type_imports(self, ts_support, temp_project): service_path = temp_project / "service.ts" service_path.write_text(service_code, encoding="utf-8") - functions = ts_support.discover_functions(service_path) + source = service_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, service_path) func = next(f for f in functions if f.function_name == "createUser") context = ts_support.extract_code_context(func, temp_project, temp_project) @@ -1271,7 +1292,8 @@ def test_self_recursive_factorial(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -1301,7 +1323,8 @@ def test_mutually_recursive_even_odd(self, js_support, temp_project): file_path = temp_project / "parity.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) is_even = next(f for f in functions if f.function_name == "isEven") context = js_support.extract_code_context(is_even, temp_project, temp_project) @@ -1319,12 +1342,15 @@ def test_mutually_recursive_even_odd(self, js_support, temp_project): assert helper_names == ["isOdd"] # Verify helper source - assert context.helper_functions[0].source_code == """\ + assert ( + context.helper_functions[0].source_code + == """\ export function isOdd(n) { if (n === 0) return false; return isEven(n - 1); } """ + ) def test_complex_recursive_tree_traversal(self, js_support, temp_project): """Test complex recursive tree traversal with multiple recursive calls.""" @@ -1363,7 +1389,8 @@ def test_complex_recursive_tree_traversal(self, js_support, temp_project): file_path = temp_project / "tree.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) collect_func = next(f for f in functions if f.function_name == "collectAllValues") context = js_support.extract_code_context(collect_func, temp_project, temp_project) @@ -1428,7 +1455,8 @@ def test_async_function_chain(self, js_support, temp_project): file_path = temp_project / "api.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) profile_func = next(f for f in functions if f.function_name == "fetchUserProfile") context = js_support.extract_code_context(profile_func, temp_project, temp_project) @@ -1483,7 +1511,8 @@ def test_extract_and_replace_class_method(self, js_support, temp_project): file_path = temp_project / "counter.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) increment_func = next(fn for fn in functions if fn.function_name == "increment") # Step 1: Extract code context @@ -1563,7 +1592,8 @@ def test_function_with_complex_destructuring(self, js_support, temp_project): file_path = temp_project / "api.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -1605,7 +1635,8 @@ def test_generator_function(self, js_support, temp_project): file_path = temp_project / "generators.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) range_func = next(f for f in functions if f.function_name == "range") context = js_support.extract_code_context(range_func, temp_project, temp_project) @@ -1640,7 +1671,8 @@ def test_function_with_computed_property_names(self, js_support, temp_project): file_path = temp_project / "user.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] context = js_support.extract_code_context(func, temp_project, temp_project) @@ -1790,7 +1822,8 @@ def test_with_tricky_helpers(self, ts_support, temp_project): file_path.write_text(code, encoding="utf-8") target_func = "sendSlackMessage" - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) func_info = next(f for f in functions if f.function_name == target_func) fto = FunctionToOptimize( function_name=target_func, @@ -1804,9 +1837,11 @@ def test_with_tricky_helpers(self, ts_support, temp_project): language="typescript", ) - ctx = get_code_optimization_context_for_language( - fto, temp_project + test_config = TestConfig( + tests_root=temp_project, tests_project_rootdir=temp_project, project_root_path=temp_project ) + func_optimizer = JavaScriptFunctionOptimizer(function_to_optimize=fto, test_cfg=test_config, aiservice_client=MagicMock()) + ctx = func_optimizer.get_code_optimization_context().unwrap() # The read_writable_code should contain the target function AND helper functions expected_read_writable = """```typescript:slack_util.ts @@ -1899,7 +1934,6 @@ def test_with_tricky_helpers(self, ts_support, temp_project): assert ctx.read_only_context_code == expected_read_only - class TestContextProperties: """Tests for CodeContext object properties.""" @@ -1913,7 +1947,8 @@ def test_javascript_context_has_correct_language(self, js_support, temp_project) file_path = temp_project / "test.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) context = js_support.extract_code_context(functions[0], temp_project, temp_project) assert context.language == Language.JAVASCRIPT @@ -1932,7 +1967,8 @@ def test_typescript_context_has_javascript_language(self, ts_support, temp_proje file_path = temp_project / "test.ts" file_path.write_text(code, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) context = ts_support.extract_code_context(functions[0], temp_project, temp_project) # TypeScript uses JavaScript language enum @@ -1974,7 +2010,8 @@ def test_all_class_methods_produce_valid_syntax(self, js_support, temp_project): file_path = temp_project / "calculator.js" file_path.write_text(code, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) for func in functions: if func.function_name != "constructor": diff --git a/tests/test_languages/test_java/test_comparison_decision.py b/tests/test_languages/test_java/test_comparison_decision.py index 9bbf55eeb..1228b3481 100644 --- a/tests/test_languages/test_java/test_comparison_decision.py +++ b/tests/test_languages/test_java/test_comparison_decision.py @@ -207,30 +207,24 @@ class TestDecisionPointDocumentation: these tests will alert us so we can update our understanding. """ - def test_decision_point_exists_in_function_optimizer(self): - """Verify the decision logic pattern exists in function_optimizer.py source. + def test_decision_point_exists_in_java_function_optimizer(self): + """Verify the comparison decision logic exists in JavaFunctionOptimizer. - The comparison decision at lines ~2816-2836 checks: - 1. if not is_python() -> enters non-Python path - 2. original_sqlite.exists() and candidate_sqlite.exists() -> SQLite path - 3. else -> fail with error (strict correctness) + After refactoring to protocol dispatch, the comparison routing lives in + JavaFunctionOptimizer.compare_candidate_results which checks: + 1. original_sqlite.exists() and candidate_sqlite.exists() -> SQLite path + 2. else -> fallback to pass/fail comparison This is a canary test: if the pattern is refactored, this test fails to alert that the routing logic has changed. """ - import codeflash.optimization.function_optimizer as fo_module + import codeflash.languages.java.function_optimizer as java_fo_module - source = inspect.getsource(fo_module) - - # Verify the non-Python branch exists - assert "if not is_python():" in source, ( - "Decision point 'if not is_python():' not found in function_optimizer.py. " - "The comparison routing logic may have been refactored." - ) + source = inspect.getsource(java_fo_module) # Verify SQLite file existence check assert "original_sqlite.exists()" in source, ( - "SQLite existence check 'original_sqlite.exists()' not found. " + "SQLite existence check 'original_sqlite.exists()' not found in JavaFunctionOptimizer. " "The SQLite comparison routing may have been refactored." ) diff --git a/tests/test_languages/test_java/test_instrumentation.py b/tests/test_languages/test_java/test_instrumentation.py index e0d6de086..5f67f125d 100644 --- a/tests/test_languages/test_java/test_instrumentation.py +++ b/tests/test_languages/test_java/test_instrumentation.py @@ -2189,8 +2189,8 @@ def test_run_and_parse_behavior_mode(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -2340,8 +2340,8 @@ def test_run_and_parse_performance_mode(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, # Only 1 outer loop (Maven invocation) + pytest_min_loops=1, + pytest_max_loops=1, # Only 1 outer loop (Maven invocation) testing_time=1.0, ) @@ -2464,8 +2464,8 @@ def test_run_and_parse_multiple_test_methods(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -2566,8 +2566,8 @@ def test_run_and_parse_failing_test(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) @@ -2751,8 +2751,8 @@ def test_behavior_mode_writes_to_sqlite(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=0.1, ) diff --git a/tests/test_languages/test_java/test_integration.py b/tests/test_languages/test_java/test_integration.py index d0820e38e..5e0de4ba2 100644 --- a/tests/test_languages/test_java/test_integration.py +++ b/tests/test_languages/test_java/test_integration.py @@ -6,17 +6,11 @@ from codeflash.languages.base import FunctionFilterCriteria, Language from codeflash.languages.java import ( - JavaSupport, - detect_build_tool, detect_java_project, discover_functions, discover_functions_from_source, discover_test_methods, - discover_tests, extract_code_context, - find_helper_functions, - find_test_root, - format_java_code, get_java_analyzer, get_java_support, is_java_project, @@ -196,7 +190,8 @@ def test_full_optimization_cycle(self, support, tmp_path: Path): """) # 1. Discover functions - functions = support.discover_functions(src_file) + source = src_file.read_text(encoding="utf-8") + functions = support.discover_functions(source, src_file) assert len(functions) == 1 assert functions[0].function_name == "reverse" diff --git a/tests/test_languages/test_java/test_java_test_paths.py b/tests/test_languages/test_java/test_java_test_paths.py index 2a9256f9c..9c39ccf9c 100644 --- a/tests/test_languages/test_java/test_java_test_paths.py +++ b/tests/test_languages/test_java/test_java_test_paths.py @@ -1,14 +1,9 @@ """Tests for Java test path handling in FunctionOptimizer.""" from pathlib import Path -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock -import pytest - -from codeflash.languages.java.test_runner import ( - _extract_source_dirs_from_pom, - _path_to_class_name, -) +from codeflash.languages.java.test_runner import _extract_source_dirs_from_pom, _path_to_class_name class TestGetJavaSourcesRoot: @@ -16,15 +11,15 @@ class TestGetJavaSourcesRoot: def _create_mock_optimizer(self, tests_root: str): """Create a mock FunctionOptimizer with the given tests_root.""" - from codeflash.optimization.function_optimizer import FunctionOptimizer + from codeflash.languages.java.function_optimizer import JavaFunctionOptimizer # Create a minimal mock - mock_optimizer = MagicMock(spec=FunctionOptimizer) + mock_optimizer = MagicMock(spec=JavaFunctionOptimizer) mock_optimizer.test_cfg = MagicMock() mock_optimizer.test_cfg.tests_root = Path(tests_root) # Bind the actual method to the mock - mock_optimizer._get_java_sources_root = lambda: FunctionOptimizer._get_java_sources_root(mock_optimizer) + mock_optimizer._get_java_sources_root = lambda: JavaFunctionOptimizer._get_java_sources_root(mock_optimizer) return mock_optimizer @@ -97,15 +92,15 @@ class TestFixJavaTestPathsIntegration: def _create_mock_optimizer(self, tests_root: str): """Create a mock FunctionOptimizer with the given tests_root.""" - from codeflash.optimization.function_optimizer import FunctionOptimizer + from codeflash.languages.java.function_optimizer import JavaFunctionOptimizer - mock_optimizer = MagicMock(spec=FunctionOptimizer) + mock_optimizer = MagicMock(spec=JavaFunctionOptimizer) mock_optimizer.test_cfg = MagicMock() mock_optimizer.test_cfg.tests_root = Path(tests_root) # Bind the actual methods - mock_optimizer._get_java_sources_root = lambda: FunctionOptimizer._get_java_sources_root(mock_optimizer) - mock_optimizer._fix_java_test_paths = lambda behavior_source, perf_source, used_paths: FunctionOptimizer._fix_java_test_paths(mock_optimizer, behavior_source, perf_source, used_paths) + mock_optimizer._get_java_sources_root = lambda: JavaFunctionOptimizer._get_java_sources_root(mock_optimizer) + mock_optimizer._fix_java_test_paths = lambda behavior_source, perf_source, used_paths: JavaFunctionOptimizer._fix_java_test_paths(mock_optimizer, behavior_source, perf_source, used_paths) return mock_optimizer diff --git a/tests/test_languages/test_java/test_line_profiler_integration.py b/tests/test_languages/test_java/test_line_profiler_integration.py index 46662b2d5..fe3ef9cfd 100644 --- a/tests/test_languages/test_java/test_line_profiler_integration.py +++ b/tests/test_languages/test_java/test_line_profiler_integration.py @@ -484,7 +484,7 @@ def run_spin_timer_profiled(tmppath: Path, spin_durations_ns: list[int]) -> dict agent_arg = profiler.build_javaagent_arg(config_path) result = subprocess.run( - ["javac", str(java_file)], + ["javac", "--release", "11", str(java_file)], capture_output=True, text=True, cwd=str(tmppath), diff --git a/tests/test_languages/test_java/test_replacement.py b/tests/test_languages/test_java/test_replacement.py index f1424361a..ad4c4cd29 100644 --- a/tests/test_languages/test_java/test_replacement.py +++ b/tests/test_languages/test_java/test_replacement.py @@ -9,29 +9,20 @@ import pytest -from codeflash.languages.python.static_analysis.code_replacer import ( - replace_function_definitions_for_language, - replace_function_definitions_in_module, -) -from codeflash.models.function_types import FunctionParent -from codeflash.languages.base import Language -from codeflash.languages import current as language_current +from codeflash.languages.code_replacer import replace_function_definitions_for_language +from codeflash.languages.java.support import JavaSupport from codeflash.models.models import CodeStringsMarkdown @pytest.fixture -def java_language_context(): - """Set the current language to Java for the duration of the test.""" - original_language = language_current._current_language - language_current._current_language = Language.JAVA - yield - language_current._current_language = original_language +def java_support(): + return JavaSupport() class TestReplaceFunctionDefinitionsInModule: - """Tests for replace_function_definitions_in_module with Java.""" + """Tests for replace_function_definitions_for_language with Java (basic cases).""" - def test_replace_simple_method(self, tmp_path: Path, java_language_context): + def test_replace_simple_method(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a simple method in a Java class.""" java_file = tmp_path / "Calculator.java" original_code = """public class Calculator { @@ -52,12 +43,12 @@ def test_replace_simple_method(self, tmp_path: Path, java_language_context): optimized_code = CodeStringsMarkdown.parse_markdown_code(optimized_markdown, expected_language="java") - result = replace_function_definitions_in_module( + result = replace_function_definitions_for_language( function_names=["add"], optimized_code=optimized_code, module_abspath=java_file, - preexisting_objects=set(), project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -70,7 +61,7 @@ def test_replace_simple_method(self, tmp_path: Path, java_language_context): """ assert new_code == expected - def test_replace_method_preserves_other_methods(self, tmp_path: Path, java_language_context): + def test_replace_method_preserves_other_methods(self, tmp_path: Path, java_support: JavaSupport): """Test that replacing one method preserves other methods.""" java_file = tmp_path / "Calculator.java" original_code = """public class Calculator { @@ -107,12 +98,12 @@ def test_replace_method_preserves_other_methods(self, tmp_path: Path, java_langu optimized_code = CodeStringsMarkdown.parse_markdown_code(optimized_markdown, expected_language="java") - result = replace_function_definitions_in_module( + result = replace_function_definitions_for_language( function_names=["add"], optimized_code=optimized_code, module_abspath=java_file, - preexisting_objects=set(), project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -133,7 +124,7 @@ def test_replace_method_preserves_other_methods(self, tmp_path: Path, java_langu """ assert new_code == expected - def test_replace_method_with_javadoc(self, tmp_path: Path, java_language_context): + def test_replace_method_with_javadoc(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method that has Javadoc comments.""" java_file = tmp_path / "MathUtils.java" original_code = """public class MathUtils { @@ -174,12 +165,12 @@ def test_replace_method_with_javadoc(self, tmp_path: Path, java_language_context optimized_code = CodeStringsMarkdown.parse_markdown_code(optimized_markdown, expected_language="java") - result = replace_function_definitions_in_module( + result = replace_function_definitions_for_language( function_names=["factorial"], optimized_code=optimized_code, module_abspath=java_file, - preexisting_objects=set(), project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -202,7 +193,7 @@ def test_replace_method_with_javadoc(self, tmp_path: Path, java_language_context """ assert new_code == expected - def test_no_change_when_code_identical(self, tmp_path: Path, java_language_context): + def test_no_change_when_code_identical(self, tmp_path: Path, java_support: JavaSupport): """Test that no change is made when optimized code is identical.""" java_file = tmp_path / "Identity.java" original_code = """public class Identity { @@ -223,12 +214,12 @@ def test_no_change_when_code_identical(self, tmp_path: Path, java_language_conte optimized_code = CodeStringsMarkdown.parse_markdown_code(optimized_markdown, expected_language="java") - result = replace_function_definitions_in_module( + result = replace_function_definitions_for_language( function_names=["getValue"], optimized_code=optimized_code, module_abspath=java_file, - preexisting_objects=set(), project_root_path=tmp_path, + lang_support=java_support, ) assert result is False @@ -239,7 +230,7 @@ def test_no_change_when_code_identical(self, tmp_path: Path, java_language_conte class TestReplaceFunctionDefinitionsForLanguage: """Tests for replace_function_definitions_for_language with Java.""" - def test_replace_static_method(self, tmp_path: Path): + def test_replace_static_method(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a static method.""" java_file = tmp_path / "Utils.java" original_code = """public class Utils { @@ -265,6 +256,7 @@ def test_replace_static_method(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -277,7 +269,7 @@ def test_replace_static_method(self, tmp_path: Path): """ assert new_code == expected - def test_replace_method_with_annotations(self, tmp_path: Path): + def test_replace_method_with_annotations(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method with annotations.""" java_file = tmp_path / "Service.java" original_code = """public class Service { @@ -305,6 +297,7 @@ def test_replace_method_with_annotations(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -318,7 +311,7 @@ def test_replace_method_with_annotations(self, tmp_path: Path): """ assert new_code == expected - def test_replace_method_in_interface(self, tmp_path: Path): + def test_replace_method_in_interface(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a default method in an interface.""" java_file = tmp_path / "Processor.java" original_code = """public interface Processor { @@ -344,6 +337,7 @@ def test_replace_method_in_interface(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -356,7 +350,7 @@ def test_replace_method_in_interface(self, tmp_path: Path): """ assert new_code == expected - def test_replace_method_in_enum(self, tmp_path: Path): + def test_replace_method_in_enum(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method in an enum.""" java_file = tmp_path / "Color.java" original_code = """public enum Color { @@ -386,6 +380,7 @@ def test_replace_method_in_enum(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -400,7 +395,7 @@ def test_replace_method_in_enum(self, tmp_path: Path): """ assert new_code == expected - def test_replace_generic_method(self, tmp_path: Path): + def test_replace_generic_method(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method with generics.""" java_file = tmp_path / "Container.java" original_code = """import java.util.List; @@ -440,6 +435,7 @@ def test_replace_generic_method(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -457,7 +453,7 @@ def test_replace_generic_method(self, tmp_path: Path): """ assert new_code == expected - def test_replace_method_with_throws(self, tmp_path: Path): + def test_replace_method_with_throws(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method with throws clause.""" java_file = tmp_path / "FileReader.java" original_code = """import java.io.IOException; @@ -491,6 +487,7 @@ def test_replace_method_with_throws(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -511,7 +508,7 @@ def test_replace_method_with_throws(self, tmp_path: Path): class TestRealWorldOptimizationScenarios: """Real-world optimization scenarios with complete valid Java code.""" - def test_optimize_string_concatenation(self, tmp_path: Path): + def test_optimize_string_concatenation(self, tmp_path: Path, java_support: JavaSupport): """Test optimizing string concatenation to StringBuilder.""" java_file = tmp_path / "StringJoiner.java" original_code = """public class StringJoiner { @@ -545,6 +542,7 @@ def test_optimize_string_concatenation(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -561,7 +559,7 @@ def test_optimize_string_concatenation(self, tmp_path: Path): """ assert new_code == expected - def test_optimize_list_iteration(self, tmp_path: Path): + def test_optimize_list_iteration(self, tmp_path: Path, java_support: JavaSupport): """Test optimizing list iteration with streams.""" java_file = tmp_path / "ListProcessor.java" original_code = """import java.util.List; @@ -595,6 +593,7 @@ def test_optimize_list_iteration(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -609,7 +608,7 @@ def test_optimize_list_iteration(self, tmp_path: Path): """ assert new_code == expected - def test_optimize_null_checks(self, tmp_path: Path): + def test_optimize_null_checks(self, tmp_path: Path, java_support: JavaSupport): """Test optimizing null checks with Objects utility.""" java_file = tmp_path / "NullChecker.java" original_code = """public class NullChecker { @@ -643,6 +642,7 @@ def test_optimize_null_checks(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -655,7 +655,7 @@ def test_optimize_null_checks(self, tmp_path: Path): """ assert new_code == expected - def test_optimize_collection_creation(self, tmp_path: Path): + def test_optimize_collection_creation(self, tmp_path: Path, java_support: JavaSupport): """Test optimizing collection creation with factory methods.""" java_file = tmp_path / "CollectionFactory.java" original_code = """import java.util.ArrayList; @@ -691,6 +691,7 @@ def test_optimize_collection_creation(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -710,7 +711,7 @@ def test_optimize_collection_creation(self, tmp_path: Path): class TestMultipleClassesAndMethods: """Tests for files with multiple classes or multiple methods being optimized.""" - def test_replace_method_in_first_class(self, tmp_path: Path): + def test_replace_method_in_first_class(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method in the first class when multiple classes exist.""" java_file = tmp_path / "MultiClass.java" original_code = """public class Calculator { @@ -748,6 +749,7 @@ class Helper {{ optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -766,7 +768,7 @@ class Helper { """ assert new_code == expected - def test_replace_multiple_methods(self, tmp_path: Path): + def test_replace_multiple_methods(self, tmp_path: Path, java_support: JavaSupport): """Test replacing multiple methods in the same class.""" java_file = tmp_path / "MathOps.java" original_code = """public class MathOps { @@ -808,6 +810,7 @@ def test_replace_multiple_methods(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -832,7 +835,7 @@ def test_replace_multiple_methods(self, tmp_path: Path): class TestNestedClasses: """Tests for nested class scenarios.""" - def test_replace_method_in_nested_class(self, tmp_path: Path): + def test_replace_method_in_nested_class(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a method in a nested class.""" java_file = tmp_path / "Outer.java" original_code = """public class Outer { @@ -870,6 +873,7 @@ def test_replace_method_in_nested_class(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -892,7 +896,7 @@ def test_replace_method_in_nested_class(self, tmp_path: Path): class TestPreservesStructure: """Tests that verify code structure is preserved during replacement.""" - def test_preserves_fields_and_constructors(self, tmp_path: Path): + def test_preserves_fields_and_constructors(self, tmp_path: Path, java_support: JavaSupport): """Test that fields and constructors are preserved.""" java_file = tmp_path / "Counter.java" original_code = """public class Counter { @@ -937,6 +941,7 @@ def test_preserves_fields_and_constructors(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -961,7 +966,7 @@ def test_preserves_fields_and_constructors(self, tmp_path: Path): class TestEdgeCases: """Edge cases and error handling tests.""" - def test_empty_optimized_code_returns_false(self, tmp_path: Path): + def test_empty_optimized_code_returns_false(self, tmp_path: Path, java_support: JavaSupport): """Test that empty optimized code returns False.""" java_file = tmp_path / "Empty.java" original_code = """public class Empty { @@ -982,13 +987,14 @@ def test_empty_optimized_code_returns_false(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is False new_code = java_file.read_text(encoding="utf-8") assert new_code == original_code - def test_function_not_found_returns_false(self, tmp_path: Path): + def test_function_not_found_returns_false(self, tmp_path: Path, java_support: JavaSupport): """Test that function not found returns False.""" java_file = tmp_path / "NotFound.java" original_code = """public class NotFound { @@ -1014,11 +1020,12 @@ def test_function_not_found_returns_false(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is False - def test_unicode_in_code(self, tmp_path: Path): + def test_unicode_in_code(self, tmp_path: Path, java_support: JavaSupport): """Test handling of unicode characters in code.""" java_file = tmp_path / "Unicode.java" original_code = """public class Unicode { @@ -1044,6 +1051,7 @@ def test_unicode_in_code(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1060,7 +1068,7 @@ def test_unicode_in_code(self, tmp_path: Path): class TestOptimizationWithStaticFields: """Tests for optimizations that add new static fields to the class.""" - def test_add_static_lookup_table(self, tmp_path: Path): + def test_add_static_lookup_table(self, tmp_path: Path, java_support: JavaSupport): """Test optimization that adds a static lookup table.""" java_file = tmp_path / "Buffer.java" original_code = """public class Buffer { @@ -1099,6 +1107,7 @@ def test_add_static_lookup_table(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1119,7 +1128,7 @@ def test_add_static_lookup_table(self, tmp_path: Path): """ assert new_code == expected - def test_add_precomputed_array(self, tmp_path: Path): + def test_add_precomputed_array(self, tmp_path: Path, java_support: JavaSupport): """Test optimization that adds a precomputed static array.""" java_file = tmp_path / "Encoder.java" original_code = """public class Encoder { @@ -1156,6 +1165,7 @@ def test_add_precomputed_array(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1178,7 +1188,7 @@ def test_add_precomputed_array(self, tmp_path: Path): """ assert new_code == expected - def test_preserve_existing_fields(self, tmp_path: Path): + def test_preserve_existing_fields(self, tmp_path: Path, java_support: JavaSupport): """Test that existing fields are preserved when adding new ones.""" java_file = tmp_path / "Calculator.java" original_code = """public class Calculator { @@ -1229,6 +1239,7 @@ def test_preserve_existing_fields(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1263,7 +1274,7 @@ def test_preserve_existing_fields(self, tmp_path: Path): class TestOptimizationWithHelperMethods: """Tests for optimizations that add new helper methods.""" - def test_add_private_helper_method(self, tmp_path: Path): + def test_add_private_helper_method(self, tmp_path: Path, java_support: JavaSupport): """Test optimization that adds a private helper method.""" java_file = tmp_path / "StringUtils.java" original_code = """public class StringUtils { @@ -1310,6 +1321,7 @@ def test_add_private_helper_method(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1332,7 +1344,7 @@ def test_add_private_helper_method(self, tmp_path: Path): """ assert new_code == expected - def test_add_multiple_helpers(self, tmp_path: Path): + def test_add_multiple_helpers(self, tmp_path: Path, java_support: JavaSupport): """Test optimization that adds multiple helper methods.""" java_file = tmp_path / "MathUtils.java" original_code = """public class MathUtils { @@ -1372,6 +1384,7 @@ def test_add_multiple_helpers(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1396,7 +1409,7 @@ def test_add_multiple_helpers(self, tmp_path: Path): class TestOptimizationWithFieldsAndHelpers: """Tests for optimizations that add both static fields and helper methods.""" - def test_add_field_and_helper_together(self, tmp_path: Path): + def test_add_field_and_helper_together(self, tmp_path: Path, java_support: JavaSupport): """Test optimization that adds both a static field and helper method.""" java_file = tmp_path / "Fibonacci.java" original_code = """public class Fibonacci { @@ -1438,6 +1451,7 @@ def test_add_field_and_helper_together(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1464,7 +1478,7 @@ def test_add_field_and_helper_together(self, tmp_path: Path): """ assert new_code == expected - def test_real_world_bytes_to_hex_optimization(self, tmp_path: Path): + def test_real_world_bytes_to_hex_optimization(self, tmp_path: Path, java_support: JavaSupport): """Test the actual bytesToHexString optimization pattern from aerospike.""" java_file = tmp_path / "Buffer.java" original_code = """package com.example; @@ -1523,6 +1537,7 @@ def test_real_world_bytes_to_hex_optimization(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, ) assert result is True @@ -1560,7 +1575,7 @@ def test_real_world_bytes_to_hex_optimization(self, tmp_path: Path): class TestOverloadedMethods: """Tests for handling overloaded methods (same name, different signatures).""" - def test_replace_specific_overload_by_line_number(self, tmp_path: Path): + def test_replace_specific_overload_by_line_number(self, tmp_path: Path, java_support: JavaSupport): """Test replacing a specific overload when multiple exist.""" java_file = tmp_path / "Buffer.java" original_code = """public final class Buffer { @@ -1606,7 +1621,7 @@ def test_replace_specific_overload_by_line_number(self, tmp_path: Path): optimized_code = CodeStringsMarkdown.parse_markdown_code(optimized_markdown, expected_language="java") # Create FunctionToOptimize with line info for the 3-arg version (lines 13-18) - from codeflash.discovery.functions_to_optimize import FunctionToOptimize, FunctionParent + from codeflash.discovery.functions_to_optimize import FunctionParent, FunctionToOptimize function_to_optimize = FunctionToOptimize( function_name="bytesToHexString", @@ -1623,6 +1638,7 @@ def test_replace_specific_overload_by_line_number(self, tmp_path: Path): optimized_code=optimized_code, module_abspath=java_file, project_root_path=tmp_path, + lang_support=java_support, function_to_optimize=function_to_optimize, ) @@ -1668,7 +1684,7 @@ class TestWrongMethodNameGeneration: source file unchanged. """ - def test_standalone_wrong_method_name_leaves_source_unchanged(self, tmp_path): + def test_standalone_wrong_method_name_leaves_source_unchanged(self, tmp_path, java_support): """Standalone generated method with wrong name must not replace the target. Reproduces the Unpacker.unpackObjectMap bug: the LLM was asked to optimise @@ -1676,7 +1692,7 @@ def test_standalone_wrong_method_name_leaves_source_unchanged(self, tmp_path): Applying that would create a duplicate ``unpackMap`` and delete ``unpackObjectMap``, causing compilation failures. """ - from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.discovery.functions_to_optimize import FunctionParent, FunctionToOptimize java_file = tmp_path / "Unpacker.java" original_code = """\ @@ -1712,7 +1728,7 @@ def test_standalone_wrong_method_name_leaves_source_unchanged(self, tmp_path): is_method=True, ) - result = replace_function_definitions_for_language( + result = java_support.replace_function_definitions( function_names=["unpackObjectMap"], optimized_code=optimized_code, module_abspath=java_file, @@ -1724,14 +1740,14 @@ def test_standalone_wrong_method_name_leaves_source_unchanged(self, tmp_path): assert result is False assert java_file.read_text(encoding="utf-8") == original_code - def test_class_wrapper_with_wrong_target_method_leaves_source_unchanged(self, tmp_path): + def test_class_wrapper_with_wrong_target_method_leaves_source_unchanged(self, tmp_path, java_support): """Class-wrapped generated code missing the target method must not modify source. Reproduces the Command.estimateKeySize bug: the LLM generated a class that contained only ``sizeTxn`` (a helper) and did not include ``estimateKeySize`` (the target). Applying it would duplicate ``sizeTxn`` in the source. """ - from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.discovery.functions_to_optimize import FunctionParent, FunctionToOptimize java_file = tmp_path / "Command.java" original_code = """\ @@ -1769,7 +1785,7 @@ def test_class_wrapper_with_wrong_target_method_leaves_source_unchanged(self, tm is_method=True, ) - result = replace_function_definitions_for_language( + result = java_support.replace_function_definitions( function_names=["estimateKeySize"], optimized_code=optimized_code, module_abspath=java_file, @@ -1793,7 +1809,7 @@ class TestAnonymousInnerClassMethods: enclosing method scope. """ - def test_anonymous_iterator_methods_not_hoisted_to_class(self, tmp_path): + def test_anonymous_iterator_methods_not_hoisted_to_class(self, tmp_path, java_support): """Reproduces the LuaMap.keySetIterator bug. The LLM optimised ``keySetIterator`` by returning an anonymous @@ -1801,7 +1817,7 @@ def test_anonymous_iterator_methods_not_hoisted_to_class(self, tmp_path): Those three methods must remain inside the anonymous class body and must NOT be added as top-level members of the outer class. """ - from codeflash.discovery.functions_to_optimize import FunctionToOptimize + from codeflash.discovery.functions_to_optimize import FunctionParent, FunctionToOptimize java_file = tmp_path / "LuaMap.java" original_code = """\ @@ -1878,7 +1894,7 @@ def test_anonymous_iterator_methods_not_hoisted_to_class(self, tmp_path): is_method=True, ) - result = replace_function_definitions_for_language( + result = java_support.replace_function_definitions( function_names=["keySetIterator"], optimized_code=optimized_code, module_abspath=java_file, diff --git a/tests/test_languages/test_java/test_run_and_parse.py b/tests/test_languages/test_java/test_run_and_parse.py index 35c644a48..e51ac11e6 100644 --- a/tests/test_languages/test_java/test_run_and_parse.py +++ b/tests/test_languages/test_java/test_run_and_parse.py @@ -278,8 +278,8 @@ def test_behavior_single_test_method(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=2, + pytest_min_loops=1, + pytest_max_loops=2, testing_time=0.1, ) @@ -358,8 +358,8 @@ def test_behavior_multiple_test_methods(self, java_project): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=2, + pytest_min_loops=1, + pytest_max_loops=2, testing_time=0.1, ) @@ -436,7 +436,7 @@ def _setup_precise_waiter_project(self, java_project): (src_dir / "PreciseWaiter.java").write_text(PRECISE_WAITER_JAVA, encoding="utf-8") return project_root, src_dir, test_dir - def _instrument_and_run(self, project_root, src_dir, test_dir, test_source, test_filename, inner_iterations=2): + def _instrument_and_run(self, project_root, src_dir, test_dir, test_source, test_filename): """Instrument a performance test and run it, returning test_results.""" test_file = test_dir / test_filename test_file.write_text(test_source, encoding="utf-8") @@ -474,15 +474,15 @@ def _instrument_and_run(self, project_root, src_dir, test_dir, test_source, test test_env = os.environ.copy() test_env["CODEFLASH_TEST_ITERATION"] = "0" + test_env["CODEFLASH_INNER_ITERATIONS"] = "2" test_results, _ = func_optimizer.run_and_parse_tests( testing_type=TestingMode.PERFORMANCE, test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=2, - max_outer_loops=2, - inner_iterations=inner_iterations, + pytest_min_loops=2, + pytest_max_loops=2, testing_time=0.0, ) return test_results @@ -498,7 +498,6 @@ def test_performance_inner_loop_count_and_timing(self, java_project): test_dir, self.PRECISE_WAITER_TEST, "PreciseWaiterTest.java", - inner_iterations=2, ) # 2 outer loops × 2 inner iterations = 4 total results @@ -590,7 +589,6 @@ def test_performance_multiple_test_methods_inner_loop(self, java_project): test_dir, multi_test_source, "PreciseWaiterMultiTest.java", - inner_iterations=2, ) # 2 test methods × 2 outer loops × 2 inner iterations = 8 total results diff --git a/tests/test_languages/test_java/test_support.py b/tests/test_languages/test_java/test_support.py index 0d646eb9c..b5cba5ab4 100644 --- a/tests/test_languages/test_java/test_support.py +++ b/tests/test_languages/test_java/test_support.py @@ -5,7 +5,7 @@ import pytest from codeflash.languages.base import Language, LanguageSupport -from codeflash.languages.java.support import JavaSupport, get_java_support +from codeflash.languages.java.support import get_java_support class TestJavaSupportProtocol: @@ -56,7 +56,8 @@ def test_discover_functions(self, support, tmp_path: Path): } """) - functions = support.discover_functions(java_file) + source = java_file.read_text(encoding="utf-8") + functions = support.discover_functions(source, java_file) assert len(functions) == 1 assert functions[0].function_name == "add" assert functions[0].language == Language.JAVA @@ -130,5 +131,6 @@ def test_discover_functions_from_fixture(self, support, java_fixture_path: Path) if not calculator_file.exists(): pytest.skip("Calculator.java not found") - functions = support.discover_functions(calculator_file) + source = calculator_file.read_text(encoding="utf-8") + functions = support.discover_functions(source, calculator_file) assert len(functions) > 0 diff --git a/tests/test_languages/test_java_e2e.py b/tests/test_languages/test_java_e2e.py index c01865048..68ac24bce 100644 --- a/tests/test_languages/test_java_e2e.py +++ b/tests/test_languages/test_java_e2e.py @@ -89,12 +89,11 @@ def java_project_dir(self): def test_extract_code_context_for_java(self, java_project_dir): """Test extracting code context for a Java method.""" - from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context - from codeflash.languages import current as lang_current + from codeflash.languages import get_language_support from codeflash.languages.base import Language + from codeflash.languages.java.function_optimizer import JavaFunctionOptimizer - # Force set language to Java for proper context extraction routing - lang_current._current_language = Language.JAVA + lang_support = get_language_support(Language.JAVA) sort_file = java_project_dir / "src" / "main" / "java" / "com" / "example" / "BubbleSort.java" if not sort_file.exists(): @@ -107,8 +106,11 @@ def test_extract_code_context_for_java(self, java_project_dir): bubble_func = next((f for f in func_list if f.function_name == "bubbleSort"), None) assert bubble_func is not None - # Extract code context - context = get_code_optimization_context(bubble_func, java_project_dir) + # Extract code context via Java language support + code_context = lang_support.extract_code_context(bubble_func, java_project_dir, java_project_dir) + context = JavaFunctionOptimizer._build_optimization_context( + code_context, bubble_func.file_path, bubble_func.language, java_project_dir + ) # Verify context structure assert context.read_writable_code is not None diff --git a/tests/test_languages/test_javascript_e2e.py b/tests/test_languages/test_javascript_e2e.py index 7b7e8503b..c5bb722bc 100644 --- a/tests/test_languages/test_javascript_e2e.py +++ b/tests/test_languages/test_javascript_e2e.py @@ -107,10 +107,8 @@ def test_extract_code_context_for_javascript(self, js_project_dir): """Test extracting code context for a JavaScript function.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context - - lang_current._current_language = Language.JAVASCRIPT + from codeflash.languages import get_language_support + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer fib_file = js_project_dir / "fibonacci.js" if not fib_file.exists(): @@ -122,7 +120,11 @@ def test_extract_code_context_for_javascript(self, js_project_dir): fib_func = next((f for f in func_list if f.function_name == "fibonacci"), None) assert fib_func is not None - context = get_code_optimization_context(fib_func, js_project_dir) + js_support = get_language_support(Language.JAVASCRIPT) + code_context = js_support.extract_code_context(fib_func, js_project_dir, js_project_dir) + context = JavaScriptFunctionOptimizer._build_optimization_context( + code_context, fib_file, "javascript", js_project_dir + ) assert context.read_writable_code is not None assert context.read_writable_code.language == "javascript" diff --git a/tests/test_languages/test_javascript_optimization_flow.py b/tests/test_languages/test_javascript_optimization_flow.py index 89631565b..22c2ab6bc 100644 --- a/tests/test_languages/test_javascript_optimization_flow.py +++ b/tests/test_languages/test_javascript_optimization_flow.py @@ -71,10 +71,8 @@ def test_code_context_preserves_language(self, tmp_path): """Verify language is preserved in code context extraction.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages import get_language_support + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer ts_file = tmp_path / "utils.ts" ts_file.write_text(""" @@ -86,7 +84,11 @@ def test_code_context_preserves_language(self, tmp_path): functions = find_all_functions_in_file(ts_file) func = functions[ts_file][0] - context = get_code_optimization_context(func, tmp_path) + ts_support = get_language_support(Language.TYPESCRIPT) + code_context = ts_support.extract_code_context(func, tmp_path, tmp_path) + context = JavaScriptFunctionOptimizer._build_optimization_context( + code_context, ts_file, "typescript", tmp_path + ) assert context.read_writable_code is not None assert context.read_writable_code.language == "typescript" @@ -373,10 +375,7 @@ def test_get_code_optimization_context_javascript(self, js_project): """Test get_code_optimization_context for JavaScript.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.optimization.function_optimizer import FunctionOptimizer - - lang_current._current_language = Language.JAVASCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer src_file = js_project / "utils.js" functions = find_all_functions_in_file(src_file) @@ -398,7 +397,7 @@ def test_get_code_optimization_context_javascript(self, js_project): pytest_cmd="jest", ) - optimizer = FunctionOptimizer( + optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func_to_optimize, test_cfg=test_config, aiservice_client=MagicMock(), @@ -415,10 +414,7 @@ def test_get_code_optimization_context_typescript(self, ts_project): """Test get_code_optimization_context for TypeScript.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.optimization.function_optimizer import FunctionOptimizer - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer src_file = ts_project / "utils.ts" functions = find_all_functions_in_file(src_file) @@ -440,7 +436,7 @@ def test_get_code_optimization_context_typescript(self, ts_project): pytest_cmd="vitest", ) - optimizer = FunctionOptimizer( + optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func_to_optimize, test_cfg=test_config, aiservice_client=MagicMock(), @@ -461,10 +457,7 @@ def test_helper_functions_have_correct_language_javascript(self, tmp_path): """Verify helper functions have language='javascript' for .js files.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.optimization.function_optimizer import FunctionOptimizer - - lang_current._current_language = Language.JAVASCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer # Create a file with helper functions src_file = tmp_path / "main.js" @@ -499,7 +492,7 @@ def test_helper_functions_have_correct_language_javascript(self, tmp_path): pytest_cmd="jest", ) - optimizer = FunctionOptimizer( + optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func_to_optimize, test_cfg=test_config, aiservice_client=MagicMock(), @@ -515,10 +508,7 @@ def test_helper_functions_have_correct_language_typescript(self, tmp_path): """Verify helper functions have language='typescript' for .ts files.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.optimization.function_optimizer import FunctionOptimizer - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer # Create a file with helper functions src_file = tmp_path / "main.ts" @@ -551,7 +541,7 @@ def test_helper_functions_have_correct_language_typescript(self, tmp_path): pytest_cmd="vitest", ) - optimizer = FunctionOptimizer( + optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func_to_optimize, test_cfg=test_config, aiservice_client=MagicMock(), diff --git a/tests/test_languages/test_javascript_run_and_parse.py b/tests/test_languages/test_javascript_run_and_parse.py index 4222b001c..3781cc637 100644 --- a/tests/test_languages/test_javascript_run_and_parse.py +++ b/tests/test_languages/test_javascript_run_and_parse.py @@ -16,8 +16,6 @@ Tests will be skipped if dependencies are not available. """ -import os -import shutil import subprocess from pathlib import Path from unittest.mock import MagicMock @@ -26,7 +24,7 @@ from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.languages.base import Language -from codeflash.models.models import FunctionParent, TestFile, TestFiles, TestType, TestingMode +from codeflash.models.models import FunctionParent from codeflash.verification.verification_utils import TestConfig @@ -58,13 +56,7 @@ def install_dependencies(project_dir: Path) -> bool: if has_node_modules(project_dir): return True try: - result = subprocess.run( - ["npm", "install"], - cwd=project_dir, - capture_output=True, - text=True, - timeout=120 - ) + result = subprocess.run(["npm", "install"], cwd=project_dir, capture_output=True, text=True, timeout=120) return result.returncode == 0 except Exception: return False @@ -82,6 +74,7 @@ def skip_if_js_not_supported(): """Skip test if JavaScript/TypeScript languages are not supported.""" try: from codeflash.languages import get_language_support + get_language_support(Language.JAVASCRIPT) except Exception as e: pytest.skip(f"JavaScript/TypeScript language support not available: {e}") @@ -157,8 +150,8 @@ def test_instrument_javascript_test_file(self, js_project_dir): """Test that JavaScript test instrumentation module can be imported.""" skip_if_js_not_supported() from codeflash.languages import get_language_support + # Verify the instrumentation module can be imported - from codeflash.languages.javascript.instrument import inject_profiling_into_existing_js_test # Get JavaScript support js_support = get_language_support(Language.JAVASCRIPT) @@ -272,8 +265,8 @@ def test_instrument_typescript_test_file(self, ts_project_dir): """Test that TypeScript test instrumentation module can be imported.""" skip_if_js_not_supported() from codeflash.languages import get_language_support + # Verify the instrumentation module can be imported - from codeflash.languages.javascript.instrument import inject_profiling_into_existing_js_test test_file = ts_project_dir / "tests" / "math.test.ts" @@ -356,10 +349,7 @@ def test_function_optimizer_run_and_parse_typescript(self, vitest_project): """ skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.optimization.function_optimizer import FunctionOptimizer - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer # Find the fibonacci function fib_file = vitest_project / "fibonacci.ts" @@ -389,10 +379,8 @@ def test_function_optimizer_run_and_parse_typescript(self, vitest_project): ) # Create optimizer - func_optimizer = FunctionOptimizer( - function_to_optimize=func, - test_cfg=test_config, - aiservice_client=MagicMock(), + func_optimizer = JavaScriptFunctionOptimizer( + function_to_optimize=func, test_cfg=test_config, aiservice_client=MagicMock() ) # Get code context - this should work @@ -419,8 +407,8 @@ def test_timing_marker_format(self): # The marker format used by codeflash for JavaScript # Start marker: !$######{tag}######$! # End marker: !######{tag}:{duration}######! - start_pattern = r'!\$######(.+?)######\$!' - end_pattern = r'!######(.+?):(\d+)######!' + start_pattern = r"!\$######(.+?)######\$!" + end_pattern = r"!######(.+?):(\d+)######!" start_marker = "!$######test/math.test.ts:TestMath.test_add:add:1:0_0######$!" end_marker = "!######test/math.test.ts:TestMath.test_add:add:1:0_0:12345######!" @@ -472,6 +460,7 @@ def test_parse_vitest_junit_xml(self, tmp_path): # Parse the XML import xml.etree.ElementTree as ET + tree = ET.parse(junit_xml) root = tree.getroot() @@ -504,6 +493,7 @@ def test_parse_jest_junit_xml(self, tmp_path): # Parse the XML import xml.etree.ElementTree as ET + tree = ET.parse(junit_xml) root = tree.getroot() diff --git a/tests/test_languages/test_javascript_support.py b/tests/test_languages/test_javascript_support.py index 8a7f9afe1..800e01a29 100644 --- a/tests/test_languages/test_javascript_support.py +++ b/tests/test_languages/test_javascript_support.py @@ -52,7 +52,7 @@ def test_discover_simple_function(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 1 assert functions[0].function_name == "add" @@ -76,7 +76,7 @@ def test_discover_multiple_functions(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 3 names = {func.function_name for func in functions} @@ -94,7 +94,7 @@ def test_discover_arrow_function(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 2 names = {func.function_name for func in functions} @@ -114,7 +114,7 @@ def test_discover_function_without_return_excluded(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) # Only the function with return should be discovered assert len(functions) == 1 @@ -136,7 +136,7 @@ def test_discover_class_methods(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 2 for func in functions: @@ -157,7 +157,7 @@ def test_discover_async_functions(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 2 @@ -182,7 +182,7 @@ def test_discover_with_filter_exclude_async(self, js_support): f.flush() criteria = FunctionFilterCriteria(include_async=False) - functions = js_support.discover_functions(Path(f.name), criteria) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name), criteria) assert len(functions) == 1 assert functions[0].function_name == "syncFunc" @@ -204,7 +204,7 @@ def test_discover_with_filter_exclude_methods(self, js_support): f.flush() criteria = FunctionFilterCriteria(include_methods=False) - functions = js_support.discover_functions(Path(f.name), criteria) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name), criteria) assert len(functions) == 1 assert functions[0].function_name == "standalone" @@ -224,7 +224,7 @@ def test_discover_line_numbers(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) func1 = next(f for f in functions if f.function_name == "func1") func2 = next(f for f in functions if f.function_name == "func2") @@ -246,7 +246,7 @@ def test_discover_generator_function(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 1 assert functions[0].function_name == "numberGenerator" @@ -257,14 +257,14 @@ def test_discover_invalid_file_returns_empty(self, js_support): f.write("this is not valid javascript {{{{") f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) # Tree-sitter is lenient, so it may still parse partial code # The important thing is it doesn't crash assert isinstance(functions, list) def test_discover_nonexistent_file_returns_empty(self, js_support): """Test that nonexistent file returns empty list.""" - functions = js_support.discover_functions(Path("/nonexistent/file.js")) + functions = js_support.discover_functions("", Path("/nonexistent/file.js")) assert functions == [] def test_discover_function_expression(self, js_support): @@ -277,7 +277,7 @@ def test_discover_function_expression(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 1 assert functions[0].function_name == "add" @@ -296,7 +296,7 @@ def test_discover_immediately_invoked_function_excluded(self, js_support): """) f.flush() - functions = js_support.discover_functions(Path(f.name)) + functions = js_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) # Only the named function should be discovered assert len(functions) == 1 @@ -507,7 +507,7 @@ def test_extract_with_helper(self, js_support): file_path = Path(f.name) # First discover functions to get accurate line numbers - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) main_func = next(f for f in functions if f.function_name == "main") context = js_support.extract_code_context(main_func, file_path.parent, file_path.parent) @@ -535,7 +535,7 @@ def test_discover_and_replace_workflow(self, js_support): file_path = Path(f.name) # Discover - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) assert len(functions) == 1 func = functions[0] assert func.function_name == "fibonacci" @@ -584,7 +584,7 @@ def test_multiple_classes_and_functions(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should find 4 functions assert len(functions) == 4 @@ -623,7 +623,7 @@ def test_jsx_file(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should find both components names = {f.function_name for f in functions} @@ -653,7 +653,7 @@ def test_find_jest_tests(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -687,7 +687,7 @@ def test_extract_class_method_wraps_in_class(self, js_support): file_path = Path(f.name) # Discover the method - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_method = next(f for f in functions if f.function_name == "add") # Extract code context @@ -725,7 +725,7 @@ def test_extract_class_method_with_jsdoc(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_method = next(f for f in functions if f.function_name == "add") context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) @@ -763,7 +763,7 @@ def test_extract_class_method_syntax_valid(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) fib_method = next(f for f in functions if f.function_name == "fibonacci") context = js_support.extract_code_context(fib_method, file_path.parent, file_path.parent) @@ -802,7 +802,7 @@ def test_extract_nested_class_method(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_method = next((f for f in functions if f.function_name == "add"), None) if add_method: @@ -832,7 +832,7 @@ def test_extract_async_class_method(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) fetch_method = next(f for f in functions if f.function_name == "fetchData") context = js_support.extract_code_context(fetch_method, file_path.parent, file_path.parent) @@ -865,7 +865,7 @@ def test_extract_static_class_method(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_method = next((f for f in functions if f.function_name == "add"), None) if add_method: @@ -894,7 +894,7 @@ def test_extract_class_method_without_class_jsdoc(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) method = next(f for f in functions if f.function_name == "simpleMethod") context = js_support.extract_code_context(method, file_path.parent, file_path.parent) @@ -1079,7 +1079,7 @@ def test_class_with_constructor(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should find constructor and increment names = {f.function_name for f in functions} @@ -1109,7 +1109,7 @@ def test_class_with_getters_setters(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should find at least greet names = {f.function_name for f in functions} @@ -1137,7 +1137,7 @@ def test_class_extending_another(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Find Dog's fetch method fetch_method = next((f for f in functions if f.function_name == "fetch" and f.class_name == "Dog"), None) @@ -1172,7 +1172,7 @@ def test_class_with_private_method(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should at least find publicMethod names = {f.function_name for f in functions} @@ -1192,7 +1192,7 @@ def test_commonjs_class_export(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_method = next(f for f in functions if f.function_name == "add") context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) @@ -1212,7 +1212,7 @@ def test_es_module_class_export(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Find the add method add_method = next((f for f in functions if f.function_name == "add"), None) @@ -1265,7 +1265,7 @@ def test_extract_context_then_replace_method(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) increment_func = next(fn for fn in functions if fn.function_name == "increment") # Step 1: Extract code context (includes constructor for AI context) @@ -1362,7 +1362,7 @@ def test_typescript_extract_context_then_replace_method(self): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + functions = ts_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) get_name_func = next(fn for fn in functions if fn.function_name == "getName") # Step 1: Extract code context (includes fields and constructor) @@ -1462,7 +1462,7 @@ def test_extract_replace_preserves_other_methods(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_func = next(fn for fn in functions if fn.function_name == "add") # Extract context for add @@ -1546,7 +1546,7 @@ def test_extract_static_method_then_replace(self, js_support): f.flush() file_path = Path(f.name) - functions = js_support.discover_functions(file_path) + functions = js_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) add_func = next(fn for fn in functions if fn.function_name == "add") # Extract context diff --git a/tests/test_languages/test_javascript_test_discovery.py b/tests/test_languages/test_javascript_test_discovery.py index df697d482..d9da2f9b3 100644 --- a/tests/test_languages/test_javascript_test_discovery.py +++ b/tests/test_languages/test_javascript_test_discovery.py @@ -53,7 +53,7 @@ def test_discover_tests_basic(self, js_support): """) # Discover functions first - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) assert len(functions) == 1 # Discover tests @@ -90,7 +90,7 @@ def test_discover_tests_spec_suffix(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -124,7 +124,7 @@ def test_discover_tests_in_tests_directory(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -170,7 +170,7 @@ def test_discover_tests_nested_describe(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -208,7 +208,7 @@ def test_discover_tests_with_it_block(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -242,7 +242,7 @@ def test_discover_tests_es_module_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -270,7 +270,7 @@ def test_discover_tests_default_export(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -316,7 +316,7 @@ def test_discover_tests_class_methods(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should find tests for class methods @@ -363,7 +363,7 @@ def test_discover_tests_multi_level_directories(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -399,7 +399,7 @@ def test_discover_tests_async_functions(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -436,7 +436,7 @@ def test_discover_tests_jsx_component(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # JSX tests should be discovered @@ -466,7 +466,7 @@ def test_discover_tests_no_matching_tests(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should not find tests for our function @@ -502,7 +502,7 @@ def test_discover_tests_function_name_in_source(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should find tests for isEmail @@ -546,7 +546,7 @@ def test_discover_tests_multiple_test_files(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -574,7 +574,7 @@ def test_discover_tests_template_literal_names(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # May or may not find depending on template literal handling @@ -605,7 +605,7 @@ def test_discover_tests_aliased_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should still find tests since original name is imported @@ -626,7 +626,7 @@ def test_find_basic_tests(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -651,7 +651,7 @@ def test_find_describe_blocks(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -675,7 +675,7 @@ def test_find_nested_describe_blocks(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -699,7 +699,7 @@ def test_find_tests_with_skip(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -720,7 +720,7 @@ def test_find_tests_with_only(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -738,7 +738,7 @@ def test_find_tests_with_single_quotes(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -757,7 +757,7 @@ def test_find_tests_with_double_quotes(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -773,7 +773,7 @@ def test_find_tests_empty_file(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -806,7 +806,7 @@ def test_require_named_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # funcA should have tests @@ -833,7 +833,7 @@ def test_es_module_named_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # funcX should have tests @@ -859,7 +859,7 @@ def test_default_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -896,7 +896,7 @@ def test_comments_in_test_file(self, js_support): */ """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -921,7 +921,7 @@ def test_test_file_with_syntax_error(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) # Should not crash tests = js_support.discover_tests(tmpdir, functions) assert isinstance(tests, dict) @@ -949,7 +949,7 @@ def test_function_with_same_name_as_jest_api(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should still work despite naming conflicts @@ -966,7 +966,7 @@ def test_empty_test_directory(self, js_support): module.exports = { lonelyFunc }; """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should return empty dict, not crash @@ -1001,7 +1001,7 @@ def test_circular_imports(self, js_support): }); """) - functions_a = js_support.discover_functions(file_a) + functions_a = js_support.discover_functions(file_a.read_text(encoding="utf-8"), file_a) tests = js_support.discover_tests(tmpdir, functions_a) # Should handle circular imports gracefully @@ -1047,7 +1047,7 @@ def test_find_test_each_array(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1073,7 +1073,7 @@ def test_find_describe_each(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1098,7 +1098,7 @@ def test_find_it_each(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1174,7 +1174,7 @@ def test_full_discovery_workflow(self, js_support): """) # Discover functions - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) assert len(functions) == 3 # Discover tests @@ -1242,7 +1242,7 @@ def test_discovery_with_fixtures(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -1280,7 +1280,7 @@ def test_test_file_imports_different_module(self, js_support): """) # Discover functions from moduleB - functions_b = js_support.discover_functions(source_b) + functions_b = js_support.discover_functions(source_b.read_text(encoding="utf-8"), source_b) tests = js_support.discover_tests(tmpdir, functions_b) # funcB should not have any tests since test file doesn't import it @@ -1312,7 +1312,7 @@ def test_test_file_imports_only_specific_function(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Check that tests were found @@ -1340,7 +1340,7 @@ def test_function_name_as_string_not_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Current implementation may still match on string occurrence @@ -1367,7 +1367,7 @@ def test_module_import_with_method_access(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should find tests since 'calculate' appears in source @@ -1399,7 +1399,7 @@ def test_class_method_discovery_via_class_import(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should find tests for class methods @@ -1432,7 +1432,7 @@ def test_nested_module_structure(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) assert len(tests) > 0 @@ -1456,7 +1456,7 @@ def test_dynamic_test_names(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1484,7 +1484,7 @@ def test_conditional_tests(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1508,7 +1508,7 @@ def test_test_with_timeout(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1531,7 +1531,7 @@ def test_todo_tests(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1554,7 +1554,7 @@ def test_concurrent_tests(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1597,7 +1597,7 @@ def test_multiple_functions_same_file_different_tests(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # All three functions should be discovered @@ -1628,7 +1628,7 @@ def test_test_in_wrong_describe_still_discovered(self, js_support): }); """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) tests = js_support.discover_tests(tmpdir, functions) # Should still find tests @@ -1653,7 +1653,7 @@ def test_mocha_bdd_style(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1684,7 +1684,7 @@ def test_context_block(self, js_support): f.flush() file_path = Path(f.name) - source = file_path.read_text() + source = file_path.read_text(encoding="utf-8") from codeflash.languages.javascript.treesitter import get_analyzer_for_file analyzer = get_analyzer_for_file(file_path) @@ -1712,7 +1712,7 @@ def test_class_method_qualified_name(self, js_support): module.exports = { Calculator }; """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) # Check qualified names include class add_func = next((f for f in functions if f.function_name == "add"), None) @@ -1737,7 +1737,7 @@ class Inner { module.exports = { Outer }; """) - functions = js_support.discover_functions(source_file) + functions = js_support.discover_functions(source_file.read_text(encoding="utf-8"), source_file) # Should find at least the Outer class method assert any(f.class_name == "Outer" for f in functions) diff --git a/tests/test_languages/test_javascript_test_runner.py b/tests/test_languages/test_javascript_test_runner.py index 905ef24a8..5f8c67b60 100644 --- a/tests/test_languages/test_javascript_test_runner.py +++ b/tests/test_languages/test_javascript_test_runner.py @@ -728,3 +728,371 @@ def test_get_jest_config_for_project_without_bundler(self): # Verify codeflash configs were NOT created assert not (tmpdir_path / "jest.codeflash.config.js").exists() assert not (tmpdir_path / "tsconfig.codeflash.json").exists() + + +class TestBundledJestReporter: + """Tests for the bundled codeflash/jest-reporter. + + Verifies that: + 1. The reporter JS file exists in the runtime package + 2. Jest commands reference 'codeflash/jest-reporter' (not jest-junit) + 3. The reporter produces valid JUnit XML + 4. The CODEFLASH_JEST_REPORTER constant is correct + """ + + def test_reporter_js_file_exists(self): + """The jest-reporter.js file must exist in the runtime directory.""" + reporter_path = Path(__file__).resolve().parents[2] / "packages" / "codeflash" / "runtime" / "jest-reporter.js" + assert reporter_path.exists(), f"jest-reporter.js not found at {reporter_path}" + + def test_reporter_constant_value(self): + """CODEFLASH_JEST_REPORTER should be 'codeflash/jest-reporter'.""" + from codeflash.languages.javascript.test_runner import CODEFLASH_JEST_REPORTER + + assert CODEFLASH_JEST_REPORTER == "codeflash/jest-reporter" + + def test_behavioral_command_uses_bundled_reporter(self): + """run_jest_behavioral_tests should use codeflash/jest-reporter in --reporters flag.""" + from codeflash.languages.javascript.test_runner import run_jest_behavioral_tests + from codeflash.models.models import TestFile, TestFiles + from codeflash.models.test_type import TestType + + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = Path(tmpdir) + (tmpdir_path / "package.json").write_text('{"name": "test"}') + test_dir = tmpdir_path / "test" + test_dir.mkdir() + test_file = test_dir / "test_func.test.js" + test_file.write_text("// test") + + mock_test_files = TestFiles( + test_files=[ + TestFile( + original_file_path=test_file, + instrumented_behavior_file_path=test_file, + benchmarking_file_path=test_file, + test_type=TestType.GENERATED_REGRESSION, + ), + ] + ) + + with patch("subprocess.run") as mock_run: + mock_result = MagicMock() + mock_result.stdout = "" + mock_result.stderr = "" + mock_result.returncode = 1 + mock_run.return_value = mock_result + + try: + run_jest_behavioral_tests( + test_paths=mock_test_files, + test_env={}, + cwd=tmpdir_path, + project_root=tmpdir_path, + ) + except Exception: + pass + + if mock_run.called: + cmd = mock_run.call_args[0][0] + reporter_args = [a for a in cmd if "--reporters=" in a and "jest-reporter" in a] + assert len(reporter_args) == 1, f"Expected exactly one codeflash/jest-reporter flag, got: {reporter_args}" + assert reporter_args[0] == "--reporters=codeflash/jest-reporter" + # Must NOT reference jest-junit + jest_junit_args = [a for a in cmd if "jest-junit" in a] + assert len(jest_junit_args) == 0, f"Should not reference jest-junit: {jest_junit_args}" + + def test_benchmarking_command_uses_bundled_reporter(self): + """run_jest_benchmarking_tests should use codeflash/jest-reporter.""" + from codeflash.languages.javascript.test_runner import run_jest_benchmarking_tests + from codeflash.models.models import TestFile, TestFiles + from codeflash.models.test_type import TestType + + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = Path(tmpdir) + (tmpdir_path / "package.json").write_text('{"name": "test"}') + test_dir = tmpdir_path / "test" + test_dir.mkdir() + test_file = test_dir / "test_func__perf.test.js" + test_file.write_text("// test") + + mock_test_files = TestFiles( + test_files=[ + TestFile( + original_file_path=test_file, + instrumented_behavior_file_path=test_file, + benchmarking_file_path=test_file, + test_type=TestType.GENERATED_REGRESSION, + ), + ] + ) + + with patch("subprocess.run") as mock_run: + mock_result = MagicMock() + mock_result.stdout = "" + mock_result.stderr = "" + mock_result.returncode = 1 + mock_run.return_value = mock_result + + try: + run_jest_benchmarking_tests( + test_paths=mock_test_files, + test_env={}, + cwd=tmpdir_path, + project_root=tmpdir_path, + ) + except Exception: + pass + + if mock_run.called: + cmd = mock_run.call_args[0][0] + reporter_args = [a for a in cmd if "--reporters=codeflash/jest-reporter" in a] + assert len(reporter_args) == 1 + + def test_line_profile_command_uses_bundled_reporter(self): + """run_jest_line_profile_tests should use codeflash/jest-reporter.""" + from codeflash.languages.javascript.test_runner import run_jest_line_profile_tests + from codeflash.models.models import TestFile, TestFiles + from codeflash.models.test_type import TestType + + with tempfile.TemporaryDirectory() as tmpdir: + tmpdir_path = Path(tmpdir) + (tmpdir_path / "package.json").write_text('{"name": "test"}') + test_dir = tmpdir_path / "test" + test_dir.mkdir() + test_file = test_dir / "test_func__line.test.js" + test_file.write_text("// test") + + mock_test_files = TestFiles( + test_files=[ + TestFile( + original_file_path=test_file, + instrumented_behavior_file_path=test_file, + benchmarking_file_path=test_file, + test_type=TestType.GENERATED_REGRESSION, + ), + ] + ) + + with patch("subprocess.run") as mock_run: + mock_result = MagicMock() + mock_result.stdout = "" + mock_result.stderr = "" + mock_result.returncode = 1 + mock_run.return_value = mock_result + + try: + run_jest_line_profile_tests( + test_paths=mock_test_files, + test_env={}, + cwd=tmpdir_path, + project_root=tmpdir_path, + ) + except Exception: + pass + + if mock_run.called: + cmd = mock_run.call_args[0][0] + reporter_args = [a for a in cmd if "--reporters=codeflash/jest-reporter" in a] + assert len(reporter_args) == 1 + + def test_reporter_produces_valid_junit_xml(self): + """The reporter JS should produce JUnit XML parseable by junitparser.""" + import subprocess + + reporter_path = Path(__file__).resolve().parents[2] / "packages" / "codeflash" / "runtime" / "jest-reporter.js" + + with tempfile.TemporaryDirectory() as tmpdir: + output_file = Path(tmpdir) / "results.xml" + + # Create a Node.js script that exercises the reporter with mock data + test_script = Path(tmpdir) / "test_reporter.js" + # Use forward slashes to avoid Windows backslash escape issues in JS strings + test_script.write_text(f""" +// Set env vars BEFORE requiring reporter (matches real Jest behavior) +process.env.JEST_JUNIT_OUTPUT_FILE = '{output_file.as_posix()}'; +process.env.JEST_JUNIT_CLASSNAME = '{{filepath}}'; +process.env.JEST_JUNIT_SUITE_NAME = '{{filepath}}'; +process.env.JEST_JUNIT_ADD_FILE_ATTRIBUTE = 'true'; +process.env.JEST_JUNIT_INCLUDE_CONSOLE_OUTPUT = 'true'; + +const Reporter = require('{reporter_path.as_posix()}'); + +// Mock Jest globalConfig +const globalConfig = {{ rootDir: '/tmp/project' }}; +const reporter = new Reporter(globalConfig, {{}}); + +// Mock test results (matches Jest's aggregatedResults structure) +const results = {{ + testResults: [ + {{ + testFilePath: '/tmp/project/test/math.test.js', + displayName: 'math tests', + console: [{{ type: 'log', message: 'CODEFLASH_START test1' }}], + testResults: [ + {{ + fullName: 'math > adds numbers', + title: 'adds numbers', + status: 'passed', + duration: 12, + }}, + {{ + fullName: 'math > handles failure', + title: 'handles failure', + status: 'failed', + duration: 5, + failureMessages: ['Expected 4 but got 5'], + }}, + {{ + fullName: 'math > skipped test', + title: 'skipped test', + status: 'pending', + duration: 0, + }}, + ], + }}, + ], +}}; + +// Simulate onTestFileResult for console capture +reporter.onTestFileResult(null, results.testResults[0], null); + +// Simulate onRunComplete +reporter.onRunComplete([], results); + +console.log('OK'); +""") + + result = subprocess.run( + ["node", str(test_script)], + capture_output=True, + text=True, + timeout=10, + ) + + assert result.returncode == 0, f"Reporter script failed: {result.stderr}" + assert output_file.exists(), "Reporter did not create output file" + + xml_content = output_file.read_text() + + # Verify basic XML structure + assert '" in xml_content + + # Verify system-out with console output + assert "" in xml_content + assert "CODEFLASH_START" in xml_content + + # Verify it's parseable by junitparser (our actual parser) + from junitparser import JUnitXml + + parsed = JUnitXml.fromfile(str(output_file)) + suites = list(parsed) + assert len(suites) == 1 + testcases = list(suites[0]) + assert len(testcases) == 3 + + def test_reporter_export_in_package_json(self): + """package.json should export codeflash/jest-reporter.""" + import json + + pkg_path = Path(__file__).resolve().parents[2] / "packages" / "codeflash" / "package.json" + with pkg_path.open() as f: + pkg = json.load(f) + + exports = pkg.get("exports", {}) + assert "./jest-reporter" in exports, "Missing ./jest-reporter export in package.json" + assert exports["./jest-reporter"]["require"] == "./runtime/jest-reporter.js" + + + +class TestUnsupportedFrameworkError: + """Tests for clear error on unsupported test frameworks.""" + + def test_unknown_framework_raises_error_behavioral(self): + """run_behavioral_tests should raise NotImplementedError for unknown frameworks.""" + from codeflash.languages.javascript.support import JavaScriptSupport + + support = JavaScriptSupport() + with pytest.raises(NotImplementedError, match="not yet supported"): + support.run_behavioral_tests( + test_paths=MagicMock(), + test_env={}, + cwd=Path("."), + test_framework="tap", + ) + + def test_unknown_framework_raises_error_benchmarking(self): + """run_benchmarking_tests should raise NotImplementedError for unknown frameworks.""" + from codeflash.languages.javascript.support import JavaScriptSupport + + support = JavaScriptSupport() + with pytest.raises(NotImplementedError, match="not yet supported"): + support.run_benchmarking_tests( + test_paths=MagicMock(), + test_env={}, + cwd=Path("."), + test_framework="tap", + ) + + def test_unknown_framework_raises_error_line_profile(self): + """run_line_profile_tests should raise NotImplementedError for unknown frameworks.""" + from codeflash.languages.javascript.support import JavaScriptSupport + + support = JavaScriptSupport() + with pytest.raises(NotImplementedError, match="not yet supported"): + support.run_line_profile_tests( + test_paths=MagicMock(), + test_env={}, + cwd=Path("."), + test_framework="tap", + ) + + def test_jest_framework_does_not_raise_not_implemented(self): + """jest framework should NOT raise NotImplementedError.""" + from codeflash.languages.javascript.support import JavaScriptSupport + + support = JavaScriptSupport() + try: + support.run_behavioral_tests( + test_paths=MagicMock(), + test_env={}, + cwd=Path("."), + test_framework="jest", + ) + except NotImplementedError: + pytest.fail("jest framework should not raise NotImplementedError") + except Exception: + pass # Other exceptions are fine — Jest isn't installed in test env + + def test_mocha_framework_does_not_raise_not_implemented(self): + """mocha framework should NOT raise NotImplementedError.""" + from codeflash.languages.javascript.support import JavaScriptSupport + + support = JavaScriptSupport() + try: + support.run_behavioral_tests( + test_paths=MagicMock(), + test_env={}, + cwd=Path("."), + test_framework="mocha", + ) + except NotImplementedError: + pytest.fail("mocha framework should not raise NotImplementedError") + except Exception: + pass # Other exceptions are fine — Mocha isn't installed in test env diff --git a/tests/test_languages/test_js_code_extractor.py b/tests/test_languages/test_js_code_extractor.py index a21f15e2e..424fdbe8c 100644 --- a/tests/test_languages/test_js_code_extractor.py +++ b/tests/test_languages/test_js_code_extractor.py @@ -13,7 +13,7 @@ from codeflash.languages.javascript.support import JavaScriptSupport, TypeScriptSupport from codeflash.languages.registry import get_language_support from codeflash.models.models import FunctionParent -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer from codeflash.verification.verification_utils import TestConfig FIXTURES_DIR = Path(__file__).parent / "fixtures" @@ -37,7 +37,7 @@ def js_support(self): def test_discover_class_methods(self, js_support, cjs_project): """Test that class methods are discovered correctly.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) method_names = {f.function_name for f in functions} @@ -47,17 +47,19 @@ def test_discover_class_methods(self, js_support, cjs_project): def test_class_method_has_correct_parent(self, js_support, cjs_project): """Test parent class information for methods.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) for func in functions: # All methods should belong to Calculator class assert func.is_method is True, f"{func.function_name} should be a method" - assert func.class_name == "Calculator", f"{func.function_name} should belong to Calculator, got {func.class_name}" + assert func.class_name == "Calculator", ( + f"{func.function_name} should belong to Calculator, got {func.class_name}" + ) def test_extract_permutation_code(self, js_support, cjs_project): """Test permutation method code extraction.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) permutation_func = next(f for f in functions if f.function_name == "permutation") @@ -93,7 +95,7 @@ class Calculator { def test_extract_context_includes_direct_helpers(self, js_support, cjs_project): """Test that direct helper functions are included in context.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) permutation_func = next(f for f in functions if f.function_name == "permutation") @@ -129,7 +131,7 @@ def test_extract_context_includes_direct_helpers(self, js_support, cjs_project): def test_extract_compound_interest_code(self, js_support, cjs_project): """Test calculateCompoundInterest code extraction.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) compound_func = next(f for f in functions if f.function_name == "calculateCompoundInterest") @@ -175,7 +177,7 @@ class Calculator { def test_extract_compound_interest_helpers(self, js_support, cjs_project): """Test helper extraction for calculateCompoundInterest.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) compound_func = next(f for f in functions if f.function_name == "calculateCompoundInterest") @@ -235,7 +237,7 @@ def test_extract_compound_interest_helpers(self, js_support, cjs_project): def test_extract_context_includes_imports(self, js_support, cjs_project): """Test import statement extraction.""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) compound_func = next(f for f in functions if f.function_name == "calculateCompoundInterest") @@ -256,7 +258,7 @@ def test_extract_context_includes_imports(self, js_support, cjs_project): def test_extract_static_method(self, js_support, cjs_project): """Test static method extraction (quickAdd).""" calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) quick_add_func = next(f for f in functions if f.function_name == "quickAdd") @@ -315,7 +317,7 @@ def js_support(self): def test_discover_esm_methods(self, js_support, esm_project): """Test method discovery in ESM project.""" calculator_file = esm_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) method_names = {f.function_name for f in functions} @@ -326,7 +328,7 @@ def test_discover_esm_methods(self, js_support, esm_project): def test_esm_permutation_extraction(self, js_support, esm_project): """Test permutation method extraction in ESM.""" calculator_file = esm_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) permutation_func = next(f for f in functions if f.function_name == "permutation") @@ -376,7 +378,7 @@ class Calculator { def test_esm_compound_interest_extraction(self, js_support, esm_project): """Test calculateCompoundInterest extraction in ESM with import syntax.""" calculator_file = esm_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) compound_func = next(f for f in functions if f.function_name == "calculateCompoundInterest") @@ -502,7 +504,7 @@ def test_typescript_support_properties(self, ts_support): def test_discover_ts_methods(self, ts_support, ts_project): """Test method discovery in TypeScript.""" calculator_file = ts_project / "calculator.ts" - functions = ts_support.discover_functions(calculator_file) + functions = ts_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) method_names = {f.function_name for f in functions} @@ -513,7 +515,7 @@ def test_discover_ts_methods(self, ts_support, ts_project): def test_ts_permutation_extraction(self, ts_support, ts_project): """Test permutation method extraction in TypeScript.""" calculator_file = ts_project / "calculator.ts" - functions = ts_support.discover_functions(calculator_file) + functions = ts_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) permutation_func = next(f for f in functions if f.function_name == "permutation") @@ -566,7 +568,7 @@ class Calculator { def test_ts_compound_interest_extraction(self, ts_support, ts_project): """Test calculateCompoundInterest extraction in TypeScript.""" calculator_file = ts_project / "calculator.ts" - functions = ts_support.discover_functions(calculator_file) + functions = ts_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) compound_func = next(f for f in functions if f.function_name == "calculateCompoundInterest") @@ -676,7 +678,7 @@ def test_standalone_function(self, js_support, tmp_path): test_file = tmp_path / "standalone.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) func = next(f for f in functions if f.function_name == "standalone") context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) @@ -709,7 +711,7 @@ def test_external_package_excluded(self, js_support, tmp_path): test_file = tmp_path / "processor.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) func = next(f for f in functions if f.function_name == "processArray") context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) @@ -744,7 +746,7 @@ def test_recursive_function(self, js_support, tmp_path): test_file = tmp_path / "recursive.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) func = next(f for f in functions if f.function_name == "fibonacci") context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) @@ -777,7 +779,7 @@ def test_arrow_function_helper(self, js_support, tmp_path): test_file = tmp_path / "arrow.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) func = next(f for f in functions if f.function_name == "processValue") context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) @@ -835,7 +837,7 @@ def test_method_extraction_includes_constructor(self, js_support, tmp_path): test_file = tmp_path / "counter.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) increment_func = next(f for f in functions if f.function_name == "increment") context = js_support.extract_code_context(function=increment_func, project_root=tmp_path, module_root=tmp_path) @@ -874,7 +876,7 @@ def test_method_extraction_class_without_constructor(self, js_support, tmp_path) test_file = tmp_path / "math_utils.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) add_func = next(f for f in functions if f.function_name == "add") context = js_support.extract_code_context(function=add_func, project_root=tmp_path, module_root=tmp_path) @@ -910,7 +912,7 @@ def test_typescript_method_extraction_includes_fields(self, ts_support, tmp_path test_file = tmp_path / "user.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) get_name_func = next(f for f in functions if f.function_name == "getName") context = ts_support.extract_code_context(function=get_name_func, project_root=tmp_path, module_root=tmp_path) @@ -949,7 +951,7 @@ def test_typescript_fields_only_no_constructor(self, ts_support, tmp_path): test_file = tmp_path / "config.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) get_url_func = next(f for f in functions if f.function_name == "getUrl") context = ts_support.extract_code_context(function=get_url_func, project_root=tmp_path, module_root=tmp_path) @@ -990,7 +992,7 @@ def test_constructor_with_jsdoc(self, js_support, tmp_path): test_file = tmp_path / "logger.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) get_prefix_func = next(f for f in functions if f.function_name == "getPrefix") context = js_support.extract_code_context(function=get_prefix_func, project_root=tmp_path, module_root=tmp_path) @@ -1032,7 +1034,7 @@ def test_static_method_includes_constructor(self, js_support, tmp_path): test_file = tmp_path / "factory.js" test_file.write_text(source) - functions = js_support.discover_functions(test_file) + functions = js_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) create_func = next(f for f in functions if f.function_name == "create") context = js_support.extract_code_context(function=create_func, project_root=tmp_path, module_root=tmp_path) @@ -1074,7 +1076,7 @@ def test_function_optimizer_workflow(self, cjs_project): js_support = get_language_support("javascript") calculator_file = cjs_project / "calculator.js" - functions = js_support.discover_functions(calculator_file) + functions = js_support.discover_functions(calculator_file.read_text(encoding="utf-8"), calculator_file) target = next(f for f in functions if f.function_name == "permutation") parents = [FunctionParent(name=p.name, type=p.type) for p in target.parents] @@ -1099,7 +1101,7 @@ def test_function_optimizer_workflow(self, cjs_project): pytest_cmd="jest", ) - func_optimizer = FunctionOptimizer( + func_optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func, test_cfg=test_config, aiservice_client=MagicMock() ) result = func_optimizer.get_code_optimization_context() @@ -1182,7 +1184,7 @@ def test_extract_same_file_interface_from_parameter(self, ts_support, tmp_path): test_file = tmp_path / "geometry.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) distance_func = next(f for f in functions if f.function_name == "distance") context = ts_support.extract_code_context(function=distance_func, project_root=tmp_path, module_root=tmp_path) @@ -1224,7 +1226,7 @@ def test_extract_same_file_enum_from_parameter(self, ts_support, tmp_path): test_file = tmp_path / "status.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) process_func = next(f for f in functions if f.function_name == "processStatus") context = ts_support.extract_code_context(function=process_func, project_root=tmp_path, module_root=tmp_path) @@ -1259,7 +1261,7 @@ def test_extract_same_file_type_alias_from_return_type(self, ts_support, tmp_pat test_file = tmp_path / "compute.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) compute_func = next(f for f in functions if f.function_name == "compute") context = ts_support.extract_code_context(function=compute_func, project_root=tmp_path, module_root=tmp_path) @@ -1301,7 +1303,7 @@ def test_extract_class_field_types(self, ts_support, tmp_path): test_file = tmp_path / "service.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) get_timeout_func = next(f for f in functions if f.function_name == "getTimeout") context = ts_support.extract_code_context( @@ -1332,7 +1334,7 @@ def test_primitive_types_not_included(self, ts_support, tmp_path): test_file = tmp_path / "add.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) add_func = next(f for f in functions if f.function_name == "add") context = ts_support.extract_code_context(function=add_func, project_root=tmp_path, module_root=tmp_path) @@ -1363,7 +1365,7 @@ def test_extract_multiple_types(self, ts_support, tmp_path): test_file = tmp_path / "rect.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) create_rect_func = next(f for f in functions if f.function_name == "createRect") context = ts_support.extract_code_context( @@ -1409,7 +1411,7 @@ def test_extract_imported_type_definition(self, ts_support, ts_types_project): } """) - functions = ts_support.discover_functions(geometry_file) + functions = ts_support.discover_functions(geometry_file.read_text(encoding="utf-8"), geometry_file) calc_distance_func = next(f for f in functions if f.function_name == "calculateDistance") context = ts_support.extract_code_context( @@ -1460,7 +1462,7 @@ def test_type_with_jsdoc_included(self, ts_support, tmp_path): test_file = tmp_path / "user.ts" test_file.write_text(source) - functions = ts_support.discover_functions(test_file) + functions = ts_support.discover_functions(test_file.read_text(encoding="utf-8"), test_file) greet_func = next(f for f in functions if f.function_name == "greetUser") context = ts_support.extract_code_context(function=greet_func, project_root=tmp_path, module_root=tmp_path) diff --git a/tests/test_languages/test_js_code_replacer.py b/tests/test_languages/test_js_code_replacer.py index 5700c4bfd..5ed2a903f 100644 --- a/tests/test_languages/test_js_code_replacer.py +++ b/tests/test_languages/test_js_code_replacer.py @@ -7,6 +7,7 @@ - ES Modules (import/export) syntax - TypeScript import handling """ + from __future__ import annotations import shutil @@ -14,8 +15,8 @@ import pytest -from codeflash.languages.python.static_analysis.code_replacer import replace_function_definitions_for_language from codeflash.languages.base import Language +from codeflash.languages.code_replacer import replace_function_definitions_for_language from codeflash.languages.current import set_current_language from codeflash.languages.javascript.module_system import ( ModuleSystem, @@ -25,7 +26,6 @@ ensure_module_system_compatibility, get_import_statement, ) - from codeflash.languages.javascript.support import JavaScriptSupport, TypeScriptSupport from codeflash.models.models import CodeStringsMarkdown @@ -50,7 +50,6 @@ def temp_project(tmp_path): return project_root - FIXTURES_DIR = Path(__file__).parent / "fixtures" @@ -308,7 +307,9 @@ class TestTsJestSkipsConversion: When ts-jest is installed, it handles module interoperability internally, so we skip conversion to avoid breaking valid imports. """ - def __init__(self): + + @pytest.fixture(autouse=True) + def _set_language(self): set_current_language(Language.TYPESCRIPT) def test_commonjs_not_converted_when_ts_jest_installed(self, tmp_path): @@ -751,6 +752,7 @@ def test_esm_fixture_conversion_removes_import(self, esm_project): f"import statements should be converted to require.\nFound import lines: {import_lines}" ) + class TestSimpleFunctionReplacement: """Tests for simple function body replacement with strict assertions.""" @@ -764,7 +766,8 @@ def test_replace_simple_function_body(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] # Optimized version with different body @@ -800,7 +803,8 @@ def test_replace_function_with_multiple_statements(self, js_support, temp_projec file_path = temp_project / "processor.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] # Optimized version using map @@ -839,7 +843,8 @@ def test_replace_preserves_surrounding_code(self, js_support, temp_project): file_path = temp_project / "module.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) target_func = next(f for f in functions if f.function_name == "targetFunction") optimized_code = """\ @@ -891,7 +896,8 @@ def test_replace_class_method_body(self, js_support, temp_project): file_path = temp_project / "calculator.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) add_method = next(f for f in functions if f.function_name == "add") # Optimized version provided in class context @@ -954,7 +960,8 @@ def test_replace_method_calling_sibling_methods(self, js_support, temp_project): file_path = temp_project / "processor.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) process_method = next(f for f in functions if f.function_name == "process") optimized_code = """\ @@ -1016,7 +1023,8 @@ def test_replace_preserves_jsdoc_above_function(self, js_support, temp_project): file_path = temp_project / "math.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1070,7 +1078,8 @@ def test_replace_class_method_with_jsdoc(self, js_support, temp_project): file_path = temp_project / "cache.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) get_method = next(f for f in functions if f.function_name == "get") optimized_code = """\ @@ -1131,7 +1140,8 @@ def test_replace_async_function_body(self, js_support, temp_project): file_path = temp_project / "api.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1172,7 +1182,8 @@ def test_replace_async_class_method(self, js_support, temp_project): file_path = temp_project / "client.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) get_method = next(f for f in functions if f.function_name == "get") optimized_code = """\ @@ -1223,7 +1234,8 @@ def test_replace_generator_function_body(self, js_support, temp_project): file_path = temp_project / "generators.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1262,7 +1274,8 @@ def test_replace_typescript_function_with_types(self, ts_support, temp_project): file_path = temp_project / "processor.ts" file_path.write_text(original_source, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1303,7 +1316,8 @@ def test_replace_typescript_class_method_with_generics(self, ts_support, temp_pr file_path = temp_project / "container.ts" file_path.write_text(original_source, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) get_all_method = next(f for f in functions if f.function_name == "getAll") optimized_code = """\ @@ -1356,7 +1370,8 @@ def test_replace_typescript_interface_typed_function(self, ts_support, temp_proj file_path = temp_project / "user.ts" file_path.write_text(original_source, encoding="utf-8") - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) func = next(f for f in functions if f.function_name == "createUser") optimized_code = """\ @@ -1411,7 +1426,8 @@ def test_replace_function_with_nested_functions(self, js_support, temp_project): file_path = temp_project / "processor.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) process_func = next(f for f in functions if f.function_name == "processItems") optimized_code = """\ @@ -1458,7 +1474,8 @@ def test_replace_multiple_methods_sequentially(self, js_support, temp_project): file_path.write_text(original_source, encoding="utf-8") # First replacement: sum method - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) sum_method = next(f for f in functions if f.function_name == "sum") optimized_sum = """\ @@ -1505,7 +1522,8 @@ def test_replace_function_with_complex_destructuring(self, js_support, temp_proj file_path = temp_project / "config.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1544,7 +1562,8 @@ def test_replace_minimal_function_body(self, js_support, temp_project): file_path = temp_project / "minimal.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1571,7 +1590,8 @@ def test_replace_single_line_function(self, js_support, temp_project): file_path = temp_project / "utils.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1598,7 +1618,8 @@ def test_replace_function_with_special_characters_in_strings(self, js_support, t file_path = temp_project / "formatter.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1633,7 +1654,8 @@ def test_replace_function_with_regex(self, js_support, temp_project): file_path = temp_project / "validator.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] optimized_code = """\ @@ -1676,7 +1698,8 @@ def test_replace_exported_function_commonjs(self, js_support, temp_project): file_path = temp_project / "module.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) main_func = next(f for f in functions if f.function_name == "main") optimized_code = """\ @@ -1719,7 +1742,8 @@ def test_replace_exported_function_esm(self, js_support, temp_project): file_path = temp_project / "module.js" file_path.write_text(original_source, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) main_func = next(f for f in functions if f.function_name == "main") optimized_code = """\ @@ -1750,20 +1774,16 @@ def test_all_replacements_produce_valid_syntax(self, js_support, temp_project): """Test that various replacements all produce valid JavaScript.""" test_cases = [ # (original, optimized, description) - ( - "export function f(x) { return x + 1; }", - "export function f(x) { return ++x; }", - "increment replacement" - ), + ("export function f(x) { return x + 1; }", "export function f(x) { return ++x; }", "increment replacement"), ( "export function f(arr) { return arr.length > 0; }", "export function f(arr) { return !!arr.length; }", - "boolean conversion" + "boolean conversion", ), ( "export function f(a, b) { if (a) { return a; } return b; }", "export function f(a, b) { return a || b; }", - "logical OR replacement" + "logical OR replacement", ), ] @@ -1771,7 +1791,8 @@ def test_all_replacements_produce_valid_syntax(self, js_support, temp_project): file_path = temp_project / f"test_{i}.js" file_path.write_text(original, encoding="utf-8") - functions = js_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = js_support.discover_functions(source, file_path) func = functions[0] result = js_support.replace_function(original, func, optimized) @@ -1875,7 +1896,8 @@ def test_code_replacer_for_class_method(ts_support, temp_project): target_func = "findDuplicates" parent_class = "DataProcessor" - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) # find function target_func_info = None for func in functions: @@ -1920,11 +1942,15 @@ class DataProcessor { ``` """ code_markdown = CodeStringsMarkdown.parse_markdown_code(new_code) - replaced = replace_function_definitions_for_language([f"{parent_class}.{target_func}"], code_markdown, file_path, temp_project) + replaced = replace_function_definitions_for_language( + [f"{parent_class}.{target_func}"], code_markdown, file_path, temp_project, lang_support=ts_support + ) assert replaced new_code = file_path.read_text() - assert new_code == """/** + assert ( + new_code + == """/** * DataProcessor class - demonstrates class method optimization in TypeScript. * Contains intentionally inefficient implementations for optimization testing. */ @@ -2015,7 +2041,7 @@ class DataProcessor { } } """ - + ) class TestNewVariableFromOptimizedCode: @@ -2030,9 +2056,9 @@ def test_new_bound_method_variable_added_after_referenced_constant(self, ts_supp 1. Add the new variable after the constant it references 2. Replace the function with the optimized version """ - from codeflash.models.models import CodeStringsMarkdown, CodeString + from codeflash.models.models import CodeString, CodeStringsMarkdown - original_source = '''\ + original_source = """\ const CODEFLASH_EMPLOYEE_GITHUB_IDS = new Set([ "1234", ]); @@ -2040,43 +2066,34 @@ def test_new_bound_method_variable_added_after_referenced_constant(self, ts_supp export function isCodeflashEmployee(userId: string): boolean { return CODEFLASH_EMPLOYEE_GITHUB_IDS.has(userId); } -''' +""" file_path = temp_project / "auth.ts" file_path.write_text(original_source, encoding="utf-8") # Optimized code introduces a bound method variable for performance - optimized_code = '''const _has: (id: string) => boolean = CODEFLASH_EMPLOYEE_GITHUB_IDS.has.bind( + optimized_code = """const _has: (id: string) => boolean = CODEFLASH_EMPLOYEE_GITHUB_IDS.has.bind( CODEFLASH_EMPLOYEE_GITHUB_IDS ); export function isCodeflashEmployee(userId: string): boolean { return _has(userId); } -''' +""" code_markdown = CodeStringsMarkdown( - code_strings=[ - CodeString( - code=optimized_code, - file_path=Path("auth.ts"), - language="typescript" - ) - ], - language="typescript" + code_strings=[CodeString(code=optimized_code, file_path=Path("auth.ts"), language="typescript")], + language="typescript", ) replaced = replace_function_definitions_for_language( - ["isCodeflashEmployee"], - code_markdown, - file_path, - temp_project, + ["isCodeflashEmployee"], code_markdown, file_path, temp_project, lang_support=ts_support ) assert replaced result = file_path.read_text() # Expected result for strict equality check - expected_result = '''\ + expected_result = """\ const CODEFLASH_EMPLOYEE_GITHUB_IDS = new Set([ "1234", ]); @@ -2088,11 +2105,9 @@ def test_new_bound_method_variable_added_after_referenced_constant(self, ts_supp export function isCodeflashEmployee(userId: string): boolean { return _has(userId); } -''' +""" assert result == expected_result, ( - f"Result does not match expected output.\n" - f"Expected:\n{expected_result}\n\n" - f"Got:\n{result}" + f"Result does not match expected output.\nExpected:\n{expected_result}\n\nGot:\n{result}" ) @@ -2113,7 +2128,7 @@ def test_imported_interface_not_added_as_declaration(self, ts_support, temp_proj contains the TreeNode interface definition (from read-only context), the replacement should NOT add the interface to the original file. """ - from codeflash.models.models import CodeStringsMarkdown, CodeString + from codeflash.models.models import CodeString, CodeStringsMarkdown # Original source imports TreeNode original_source = """\ @@ -2163,20 +2178,13 @@ def test_imported_interface_not_added_as_declaration(self, ts_support, temp_proj code_markdown = CodeStringsMarkdown( code_strings=[ - CodeString( - code=optimized_code_with_interface, - file_path=Path("helpers.ts"), - language="typescript" - ) + CodeString(code=optimized_code_with_interface, file_path=Path("helpers.ts"), language="typescript") ], - language="typescript" + language="typescript", ) replace_function_definitions_for_language( - ["getNearestAbove"], - code_markdown, - file_path, - temp_project, + ["getNearestAbove"], code_markdown, file_path, temp_project, lang_support=ts_support ) result = file_path.read_text() @@ -2203,7 +2211,7 @@ def test_imported_interface_not_added_as_declaration(self, ts_support, temp_proj def test_multiple_imported_types_not_duplicated(self, ts_support, temp_project): """Test that multiple imported types are not duplicated.""" - from codeflash.models.models import CodeStringsMarkdown, CodeString + from codeflash.models.models import CodeString, CodeStringsMarkdown original_source = """\ import type { TreeNode, NodeSpace } from "./constants"; @@ -2235,21 +2243,12 @@ def test_multiple_imported_types_not_duplicated(self, ts_support, temp_project): """ code_markdown = CodeStringsMarkdown( - code_strings=[ - CodeString( - code=optimized_code, - file_path=Path("processor.ts"), - language="typescript" - ) - ], - language="typescript" + code_strings=[CodeString(code=optimized_code, file_path=Path("processor.ts"), language="typescript")], + language="typescript", ) replace_function_definitions_for_language( - ["processNode"], - code_markdown, - file_path, - temp_project, + ["processNode"], code_markdown, file_path, temp_project, lang_support=ts_support ) result = file_path.read_text() diff --git a/tests/test_languages/test_language_parity.py b/tests/test_languages/test_language_parity.py index 2b2035c84..2747e6892 100644 --- a/tests/test_languages/test_language_parity.py +++ b/tests/test_languages/test_language_parity.py @@ -345,8 +345,8 @@ def test_simple_function_discovery(self, python_support, js_support): py_file = write_temp_file(SIMPLE_FUNCTION.python, ".py") js_file = write_temp_file(SIMPLE_FUNCTION.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find exactly one function assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" @@ -365,8 +365,8 @@ def test_multiple_functions_discovery(self, python_support, js_support): py_file = write_temp_file(MULTIPLE_FUNCTIONS.python, ".py") js_file = write_temp_file(MULTIPLE_FUNCTIONS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find 3 functions assert len(py_funcs) == 3, f"Python found {len(py_funcs)}, expected 3" @@ -384,8 +384,8 @@ def test_functions_without_return_excluded(self, python_support, js_support): py_file = write_temp_file(WITH_AND_WITHOUT_RETURN.python, ".py") js_file = write_temp_file(WITH_AND_WITHOUT_RETURN.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find only 1 function (the one with return) assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" @@ -400,8 +400,8 @@ def test_class_methods_discovery(self, python_support, js_support): py_file = write_temp_file(CLASS_METHODS.python, ".py") js_file = write_temp_file(CLASS_METHODS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find 2 methods assert len(py_funcs) == 2, f"Python found {len(py_funcs)}, expected 2" @@ -421,8 +421,8 @@ def test_async_functions_discovery(self, python_support, js_support): py_file = write_temp_file(ASYNC_FUNCTIONS.python, ".py") js_file = write_temp_file(ASYNC_FUNCTIONS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find 2 functions assert len(py_funcs) == 2, f"Python found {len(py_funcs)}, expected 2" @@ -440,32 +440,23 @@ def test_async_functions_discovery(self, python_support, js_support): assert js_sync.is_async is False, "JavaScript sync function should have is_async=False" def test_nested_functions_discovery(self, python_support, js_support): - """Both should discover nested functions with parent info.""" + """Python skips nested functions; JavaScript discovers them with parent info.""" py_file = write_temp_file(NESTED_FUNCTIONS.python, ".py") js_file = write_temp_file(NESTED_FUNCTIONS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) - # Both should find 2 functions (outer and inner) - assert len(py_funcs) == 2, f"Python found {len(py_funcs)}, expected 2" - assert len(js_funcs) == 2, f"JavaScript found {len(js_funcs)}, expected 2" + # Python skips nested functions — only outer is discovered + assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" + assert py_funcs[0].function_name == "outer" - # Check names - py_names = {f.function_name for f in py_funcs} + # JavaScript discovers both + assert len(js_funcs) == 2, f"JavaScript found {len(js_funcs)}, expected 2" js_names = {f.function_name for f in js_funcs} - - assert py_names == {"outer", "inner"}, f"Python found {py_names}" assert js_names == {"outer", "inner"}, f"JavaScript found {js_names}" - # Check parent info for inner function - py_inner = next(f for f in py_funcs if f.function_name == "inner") js_inner = next(f for f in js_funcs if f.function_name == "inner") - - assert len(py_inner.parents) >= 1, "Python inner should have parent info" - assert py_inner.parents[0].name == "outer", "Python inner's parent should be outer" - - # JavaScript nested function parent check assert len(js_inner.parents) >= 1, "JavaScript inner should have parent info" assert js_inner.parents[0].name == "outer", "JavaScript inner's parent should be outer" @@ -474,8 +465,8 @@ def test_static_methods_discovery(self, python_support, js_support): py_file = write_temp_file(STATIC_METHODS.python, ".py") js_file = write_temp_file(STATIC_METHODS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find 1 function assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" @@ -492,8 +483,8 @@ def test_complex_file_discovery(self, python_support, js_support): py_file = write_temp_file(COMPLEX_FILE.python, ".py") js_file = write_temp_file(COMPLEX_FILE.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find 4 functions assert len(py_funcs) == 4, f"Python found {len(py_funcs)}, expected 4" @@ -524,8 +515,8 @@ def test_filter_exclude_async(self, python_support, js_support): criteria = FunctionFilterCriteria(include_async=False) - py_funcs = python_support.discover_functions(py_file, criteria) - js_funcs = js_support.discover_functions(js_file, criteria) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file, criteria) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file, criteria) # Both should find only 1 function (the sync one) assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" @@ -542,8 +533,8 @@ def test_filter_exclude_methods(self, python_support, js_support): criteria = FunctionFilterCriteria(include_methods=False) - py_funcs = python_support.discover_functions(py_file, criteria) - js_funcs = js_support.discover_functions(js_file, criteria) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file, criteria) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file, criteria) # Both should find only 1 function (standalone) assert len(py_funcs) == 1, f"Python found {len(py_funcs)}, expected 1" @@ -554,11 +545,11 @@ def test_filter_exclude_methods(self, python_support, js_support): assert js_funcs[0].function_name == "standalone" def test_nonexistent_file_returns_empty(self, python_support, js_support): - """Both should return empty list for nonexistent files.""" - py_funcs = python_support.discover_functions(Path("/nonexistent/file.py")) - js_funcs = js_support.discover_functions(Path("/nonexistent/file.js")) - + """Both languages return empty list for empty source.""" + py_funcs = python_support.discover_functions("", Path("/nonexistent/file.py")) assert py_funcs == [] + + js_funcs = js_support.discover_functions("", Path("/nonexistent/file.js")) assert js_funcs == [] def test_line_numbers_captured(self, python_support, js_support): @@ -566,8 +557,8 @@ def test_line_numbers_captured(self, python_support, js_support): py_file = write_temp_file(SIMPLE_FUNCTION.python, ".py") js_file = write_temp_file(SIMPLE_FUNCTION.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should have start_line and end_line assert py_funcs[0].starting_line is not None @@ -917,8 +908,8 @@ def test_discover_and_replace_workflow(self, python_support, js_support): js_file = write_temp_file(js_original, ".js") # Discover - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) assert len(py_funcs) == 1 assert len(js_funcs) == 1 @@ -969,8 +960,8 @@ def test_function_info_fields_populated(self, python_support, js_support): py_file = write_temp_file(CLASS_METHODS.python, ".py") js_file = write_temp_file(CLASS_METHODS.javascript, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) for py_func in py_funcs: # Check all expected fields are populated @@ -1003,7 +994,7 @@ def test_arrow_functions_unique_to_js(self, js_support): export const identity = x => x; """ js_file = write_temp_file(js_code, ".js") - funcs = js_support.discover_functions(js_file) + funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Should find all arrow functions names = {f.function_name for f in funcs} @@ -1030,8 +1021,8 @@ def number_generator(): py_file = write_temp_file(py_code, ".py") js_file = write_temp_file(js_code, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Both should find the generator assert len(py_funcs) == 1, f"Python found {len(py_funcs)} generators" @@ -1054,7 +1045,7 @@ def multi_decorated(): return 3 """ py_file = write_temp_file(py_code, ".py") - funcs = python_support.discover_functions(py_file) + funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) # Should find all functions regardless of decorators names = {f.function_name for f in funcs} @@ -1074,7 +1065,7 @@ def test_function_expressions_js(self, js_support): }; """ js_file = write_temp_file(js_code, ".js") - funcs = js_support.discover_functions(js_file) + funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) # Should find function expressions names = {f.function_name for f in funcs} @@ -1094,8 +1085,8 @@ def test_empty_file(self, python_support, js_support): py_file = write_temp_file("", ".py") js_file = write_temp_file("", ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) assert py_funcs == [] assert js_funcs == [] @@ -1119,8 +1110,8 @@ def test_file_with_only_comments(self, python_support, js_support): py_file = write_temp_file(py_code, ".py") js_file = write_temp_file(js_code, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) assert py_funcs == [] assert js_funcs == [] @@ -1139,8 +1130,8 @@ def greeting(): py_file = write_temp_file(py_code, ".py") js_file = write_temp_file(js_code, ".js") - py_funcs = python_support.discover_functions(py_file) - js_funcs = js_support.discover_functions(js_file) + py_funcs = python_support.discover_functions(py_file.read_text(encoding="utf-8"), py_file) + js_funcs = js_support.discover_functions(js_file.read_text(encoding="utf-8"), js_file) assert len(py_funcs) == 1 assert len(js_funcs) == 1 diff --git a/tests/test_languages/test_mocha_runner.py b/tests/test_languages/test_mocha_runner.py new file mode 100644 index 000000000..283ff995a --- /dev/null +++ b/tests/test_languages/test_mocha_runner.py @@ -0,0 +1,502 @@ +"""Tests for Mocha test runner functionality.""" + +import json +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from junitparser import JUnitXml + + +class TestMochaJsonToJunitXml: + """Tests for converting Mocha JSON reporter output to JUnit XML.""" + + def test_passing_tests(self): + from codeflash.languages.javascript.mocha_runner import mocha_json_to_junit_xml + + mocha_json = json.dumps( + { + "stats": {"tests": 2, "passes": 2, "failures": 0, "duration": 50}, + "tests": [ + { + "title": "should add numbers", + "fullTitle": "math should add numbers", + "duration": 20, + "err": {}, + }, + { + "title": "should subtract numbers", + "fullTitle": "math should subtract numbers", + "duration": 30, + "err": {}, + }, + ], + "passes": [], + "failures": [], + "pending": [], + } + ) + + with tempfile.TemporaryDirectory() as tmpdir: + output_file = Path(tmpdir) / "results.xml" + mocha_json_to_junit_xml(mocha_json, output_file) + + assert output_file.exists() + xml = JUnitXml.fromfile(str(output_file)) + total_tests = sum(suite.tests for suite in xml) + assert total_tests == 2 + + def test_failing_tests(self): + from codeflash.languages.javascript.mocha_runner import mocha_json_to_junit_xml + + mocha_json = json.dumps( + { + "stats": {"tests": 1, "passes": 0, "failures": 1, "duration": 10}, + "tests": [ + { + "title": "should fail", + "fullTitle": "errors should fail", + "duration": 10, + "err": { + "message": "expected 1 to equal 2", + "stack": "AssertionError: expected 1 to equal 2\n at Context.", + }, + }, + ], + "passes": [], + "failures": [], + "pending": [], + } + ) + + with tempfile.TemporaryDirectory() as tmpdir: + output_file = Path(tmpdir) / "results.xml" + mocha_json_to_junit_xml(mocha_json, output_file) + + assert output_file.exists() + xml = JUnitXml.fromfile(str(output_file)) + total_failures = sum(suite.failures for suite in xml) + assert total_failures == 1 + + def test_pending_tests(self): + from codeflash.languages.javascript.mocha_runner import mocha_json_to_junit_xml + + mocha_json = json.dumps( + { + "stats": {"tests": 1, "passes": 0, "failures": 0, "pending": 1, "duration": 0}, + "tests": [ + { + "title": "should be pending", + "fullTitle": "todo should be pending", + "duration": 0, + "pending": True, + "err": {}, + }, + ], + "passes": [], + "failures": [], + "pending": [], + } + ) + + with tempfile.TemporaryDirectory() as tmpdir: + output_file = Path(tmpdir) / "results.xml" + mocha_json_to_junit_xml(mocha_json, output_file) + + assert output_file.exists() + xml = JUnitXml.fromfile(str(output_file)) + # Should parse without error and have the test + total_tests = sum(suite.tests for suite in xml) + assert total_tests == 1 + + def test_invalid_json_writes_empty_xml(self): + from codeflash.languages.javascript.mocha_runner import mocha_json_to_junit_xml + + with tempfile.TemporaryDirectory() as tmpdir: + output_file = Path(tmpdir) / "results.xml" + mocha_json_to_junit_xml("not valid json {{{", output_file) + + assert output_file.exists() + content = output_file.read_text() + assert " None: original_helper = helper_file.read_text("utf-8") js_support = get_language_support("javascript") - functions = js_support.discover_functions(main_file) + functions = js_support.discover_functions(main_file.read_text(encoding="utf-8"), main_file) target = None for func in functions: if func.function_name == "calculateStats": @@ -135,7 +135,7 @@ def test_js_replcement() -> None: project_root_path=root_dir, pytest_cmd="jest", ) - func_optimizer = FunctionOptimizer( + func_optimizer = JavaScriptFunctionOptimizer( function_to_optimize=func, test_cfg=test_config, aiservice_client=MagicMock() ) result = func_optimizer.get_code_optimization_context() diff --git a/tests/test_languages/test_python_support.py b/tests/test_languages/test_python_support.py index e4755cf8e..bd1106ab4 100644 --- a/tests/test_languages/test_python_support.py +++ b/tests/test_languages/test_python_support.py @@ -49,7 +49,7 @@ def add(a, b): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 1 assert functions[0].function_name == "add" @@ -70,7 +70,7 @@ def multiply(a, b): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 3 names = {func.function_name for func in functions} @@ -88,7 +88,7 @@ def without_return(): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) # Only the function with return should be discovered assert len(functions) == 1 @@ -107,7 +107,7 @@ def multiply(self, a, b): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 2 for func in functions: @@ -126,7 +126,7 @@ def sync_function(): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 2 @@ -137,7 +137,7 @@ def sync_function(): assert sync_func.is_async is False def test_discover_nested_functions(self, python_support): - """Test discovering nested functions.""" + """Test that nested functions are excluded — only top-level and class-level functions are discovered.""" with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f: f.write(""" def outer(): @@ -147,18 +147,11 @@ def inner(): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) - # Both outer and inner should be discovered - assert len(functions) == 2 - names = {func.function_name for func in functions} - assert names == {"outer", "inner"} - - # Inner should have outer as parent - inner = next(f for f in functions if f.function_name == "inner") - assert len(inner.parents) == 1 - assert inner.parents[0].name == "outer" - assert inner.parents[0].type == "FunctionDef" + # Only outer should be discovered; inner is nested and skipped + assert len(functions) == 1 + assert functions[0].function_name == "outer" def test_discover_static_method(self, python_support): """Test discovering static methods.""" @@ -171,7 +164,7 @@ def helper(x): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) assert len(functions) == 1 assert functions[0].function_name == "helper" @@ -190,7 +183,9 @@ def sync_func(): f.flush() criteria = FunctionFilterCriteria(include_async=False) - functions = python_support.discover_functions(Path(f.name), criteria) + functions = python_support.discover_functions( + Path(f.name).read_text(encoding="utf-8"), Path(f.name), criteria + ) assert len(functions) == 1 assert functions[0].function_name == "sync_func" @@ -209,7 +204,9 @@ def method(self): f.flush() criteria = FunctionFilterCriteria(include_methods=False) - functions = python_support.discover_functions(Path(f.name), criteria) + functions = python_support.discover_functions( + Path(f.name).read_text(encoding="utf-8"), Path(f.name), criteria + ) assert len(functions) == 1 assert functions[0].function_name == "standalone" @@ -227,7 +224,7 @@ def func2(): """) f.flush() - functions = python_support.discover_functions(Path(f.name)) + functions = python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) func1 = next(f for f in functions if f.function_name == "func1") func2 = next(f for f in functions if f.function_name == "func2") @@ -237,18 +234,20 @@ def func2(): assert func2.starting_line == 4 assert func2.ending_line == 7 - def test_discover_invalid_file_returns_empty(self, python_support): - """Test that invalid Python file returns empty list.""" + def test_discover_invalid_file_raises(self, python_support): + """Test that invalid Python file raises a parse error.""" + from libcst._exceptions import ParserSyntaxError + with tempfile.NamedTemporaryFile(suffix=".py", mode="w", delete=False) as f: f.write("this is not valid python {{{{") f.flush() - functions = python_support.discover_functions(Path(f.name)) - assert functions == [] + with pytest.raises(ParserSyntaxError): + python_support.discover_functions(Path(f.name).read_text(encoding="utf-8"), Path(f.name)) - def test_discover_nonexistent_file_returns_empty(self, python_support): - """Test that nonexistent file returns empty list.""" - functions = python_support.discover_functions(Path("/nonexistent/file.py")) + def test_discover_empty_source_returns_empty(self, python_support): + """Test that empty source returns empty list.""" + functions = python_support.discover_functions("", Path("/nonexistent/file.py")) assert functions == [] @@ -500,7 +499,7 @@ def test_discover_and_replace_workflow(self, python_support): file_path = Path(f.name) # Discover - functions = python_support.discover_functions(file_path) + functions = python_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) assert len(functions) == 1 func = functions[0] assert func.function_name == "fibonacci" @@ -541,7 +540,7 @@ def standalone(): f.flush() file_path = Path(f.name) - functions = python_support.discover_functions(file_path) + functions = python_support.discover_functions(file_path.read_text(encoding="utf-8"), file_path) # Should find 4 functions assert len(functions) == 4 @@ -584,12 +583,7 @@ def process(value): return helper_function(value) + 1 """) - func = FunctionToOptimize( - function_name="helper_function", - file_path=source_file, - starting_line=1, - ending_line=2, - ) + func = FunctionToOptimize(function_name="helper_function", file_path=source_file, starting_line=1, ending_line=2) refs = python_support.find_references(func, project_root=tmp_path) @@ -646,12 +640,7 @@ def test_find_references_no_references(python_support, tmp_path): return 42 """) - func = FunctionToOptimize( - function_name="isolated_function", - file_path=source_file, - starting_line=1, - ending_line=2, - ) + func = FunctionToOptimize(function_name="isolated_function", file_path=source_file, starting_line=1, ending_line=2) refs = python_support.find_references(func, project_root=tmp_path) @@ -668,10 +657,7 @@ def test_find_references_nonexistent_function(python_support, tmp_path): """) func = FunctionToOptimize( - function_name="nonexistent_function", - file_path=source_file, - starting_line=1, - ending_line=2, + function_name="nonexistent_function", file_path=source_file, starting_line=1, ending_line=2 ) refs = python_support.find_references(func, project_root=tmp_path) diff --git a/tests/test_languages/test_treesitter_utils.py b/tests/test_languages/test_treesitter_utils.py index 15dd1219b..8774fa0e3 100644 --- a/tests/test_languages/test_treesitter_utils.py +++ b/tests/test_languages/test_treesitter_utils.py @@ -821,3 +821,153 @@ def test_strapi_traverse_entity_pattern(self, ts_analyzer): # createVisitorUtils is NOT wrapped, so not exported via default is_utils_exported, _ = ts_analyzer.is_function_exported(code, "createVisitorUtils") assert is_utils_exported is False + + +class TestNamedExportConstArrow: + """Tests for const arrow functions exported via named export clause. + + Pattern: const joinBy = () => {}; export { joinBy }; + This is common in TypeScript codebases like Strapi. + """ + + @pytest.fixture + def ts_analyzer(self): + return TreeSitterAnalyzer(TreeSitterLanguage.TYPESCRIPT) + + @pytest.fixture + def js_analyzer(self): + return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT) + + def test_named_export_const_arrow(self, ts_analyzer): + """const arrow function exported via separate export { } clause.""" + code = """const joinBy = (arr: string[], separator: string) => { + return arr.join(separator); +}; + +export { joinBy };""" + + functions = ts_analyzer.find_functions(code) + joinBy = next((f for f in functions if f.name == "joinBy"), None) + assert joinBy is not None + assert joinBy.is_exported is True + + def test_named_export_alias(self, ts_analyzer): + """export { foo as bar } — foo should be marked as exported.""" + code = """const foo = (x: number) => { + return x * 2; +}; + +export { foo as bar };""" + + functions = ts_analyzer.find_functions(code) + foo = next((f for f in functions if f.name == "foo"), None) + assert foo is not None + assert foo.is_exported is True + + def test_named_export_multiple(self, ts_analyzer): + """Multiple functions in a single export clause.""" + code = """const a = () => { return 1; }; +const b = () => { return 2; }; +const c = () => { return 3; }; + +export { a, b };""" + + functions = ts_analyzer.find_functions(code) + a = next((f for f in functions if f.name == "a"), None) + b = next((f for f in functions if f.name == "b"), None) + c = next((f for f in functions if f.name == "c"), None) + assert a is not None and a.is_exported is True + assert b is not None and b.is_exported is True + assert c is not None and c.is_exported is False + + def test_named_export_function_declaration(self, js_analyzer): + """Regular function declarations exported via export { }.""" + code = """function processData(data) { + return data; +} + +export { processData };""" + + functions = js_analyzer.find_functions(code) + f = next((f for f in functions if f.name == "processData"), None) + assert f is not None + assert f.is_exported is True + + def test_is_function_exported_with_named_export(self, ts_analyzer): + """is_function_exported should detect named export clause.""" + code = """const joinBy = (arr: string[], separator: string) => { + return arr.join(separator); +}; + +export { joinBy };""" + + is_exported, name = ts_analyzer.is_function_exported(code, "joinBy") + assert is_exported is True + + +class TestCjsReexportObjectMethods: + """Tests for CJS re-export of object containing methods. + + Pattern: const utils = { match() {} }; module.exports = utils; + This is common in Node.js libraries like Moleculer. + """ + + @pytest.fixture + def js_analyzer(self): + return TreeSitterAnalyzer(TreeSitterLanguage.JAVASCRIPT) + + def test_cjs_reexport_object_methods(self, js_analyzer): + """module.exports = varName where varName is object with methods.""" + code = """const utils = { + match(text, pattern) { + return text.match(pattern); + }, + slugify(str) { + return str.toLowerCase(); + } +}; + +module.exports = utils;""" + + is_exported, name = js_analyzer.is_function_exported(code, "match") + assert is_exported is True + + is_exported2, _ = js_analyzer.is_function_exported(code, "slugify") + assert is_exported2 is True + + def test_cjs_reexport_shorthand_props(self, js_analyzer): + """module.exports = varName where object has shorthand properties.""" + code = """function match(text, pattern) { + return text.match(pattern); +} + +const utils = { match }; +module.exports = utils;""" + + is_exported, _ = js_analyzer.is_function_exported(code, "match") + assert is_exported is True + + def test_cjs_reexport_pair_props(self, js_analyzer): + """module.exports = varName where object has key: value pairs.""" + code = """function myMatch(text, pattern) { + return text.match(pattern); +} + +const utils = { match: myMatch }; +module.exports = utils;""" + + is_exported, _ = js_analyzer.is_function_exported(code, "match") + assert is_exported is True + + def test_cjs_reexport_nonexistent_prop(self, js_analyzer): + """A function not in the re-exported object should not be exported.""" + code = """function helper() { return 1; } + +const utils = { + match(text) { return text; } +}; + +module.exports = utils;""" + + is_exported, _ = js_analyzer.is_function_exported(code, "helper") + assert is_exported is False diff --git a/tests/test_languages/test_typescript_code_extraction.py b/tests/test_languages/test_typescript_code_extraction.py index b344a2492..4089049ed 100644 --- a/tests/test_languages/test_typescript_code_extraction.py +++ b/tests/test_languages/test_typescript_code_extraction.py @@ -13,7 +13,7 @@ import pytest -from codeflash.languages.base import FunctionInfo, Language, ParentInfo +from codeflash.languages.base import Language from codeflash.languages.javascript.support import TypeScriptSupport @@ -126,14 +126,13 @@ def test_extract_simple_function(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) assert len(functions) == 1 assert functions[0].function_name == "add" # Extract code context - code_context = ts_support.extract_code_context( - functions[0], file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(functions[0], file_path.parent, file_path.parent) # Verify extracted code is valid assert ts_support.validate_syntax(code_context.target_code) is True @@ -164,14 +163,13 @@ def test_extract_async_function_with_template_literal(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) assert len(functions) == 1 assert functions[0].function_name == "execMongoEval" # Extract code context - code_context = ts_support.extract_code_context( - functions[0], file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(functions[0], file_path.parent, file_path.parent) # Verify extracted code is valid assert ts_support.validate_syntax(code_context.target_code) is True @@ -215,14 +213,13 @@ def test_extract_function_with_complex_try_catch(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) assert len(functions) == 1 assert functions[0].function_name == "figureOutContentsPath" # Extract code context - code_context = ts_support.extract_code_context( - functions[0], file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(functions[0], file_path.parent, file_path.parent) # Verify extracted code is valid assert ts_support.validate_syntax(code_context.target_code) is True @@ -246,12 +243,11 @@ def test_extracted_code_includes_imports(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) assert len(functions) == 1 - code_context = ts_support.extract_code_context( - functions[0], file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(functions[0], file_path.parent, file_path.parent) # Check that imports are captured assert len(code_context.imports) > 0 @@ -278,12 +274,11 @@ def test_extracted_code_includes_global_variables(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) assert len(functions) == 1 - code_context = ts_support.extract_code_context( - functions[0], file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(functions[0], file_path.parent, file_path.parent) # Verify extracted code is valid assert ts_support.validate_syntax(code_context.target_code) is True @@ -324,7 +319,8 @@ def test_private_helper_method_inside_class_wrapper(self, ts_support): file_path = Path(f.name) # Discover the 'post' method - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) post_method = None for func in functions: if func.function_name == "post": @@ -334,9 +330,7 @@ def test_private_helper_method_inside_class_wrapper(self, ts_support): assert post_method is not None, "post method should be discovered" # Extract code context - code_context = ts_support.extract_code_context( - post_method, file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(post_method, file_path.parent, file_path.parent) # The extracted code should be syntactically valid assert ts_support.validate_syntax(code_context.target_code) is True, ( @@ -352,9 +346,7 @@ def test_private_helper_method_inside_class_wrapper(self, ts_support): # Check that addEndpoint appears BEFORE the closing brace of the class class_end_index = code_context.target_code.rfind("}") add_endpoint_index = code_context.target_code.find("addEndpoint") - assert add_endpoint_index < class_end_index, ( - "addEndpoint should be inside the class wrapper" - ) + assert add_endpoint_index < class_end_index, "addEndpoint should be inside the class wrapper" def test_multiple_private_helpers_inside_class(self, ts_support): """Test that multiple private helpers are all included inside the class.""" @@ -386,7 +378,8 @@ def test_multiple_private_helpers_inside_class(self, ts_support): file_path = Path(f.name) # Discover the 'addRoute' method - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) add_route_method = None for func in functions: if func.function_name == "addRoute": @@ -395,9 +388,7 @@ def test_multiple_private_helpers_inside_class(self, ts_support): assert add_route_method is not None - code_context = ts_support.extract_code_context( - add_route_method, file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(add_route_method, file_path.parent, file_path.parent) # Should be valid TypeScript assert ts_support.validate_syntax(code_context.target_code) is True @@ -424,7 +415,8 @@ def test_same_class_helpers_filtered_from_helper_list(self, ts_support): f.flush() file_path = Path(f.name) - functions = ts_support.discover_functions(file_path) + source = file_path.read_text(encoding="utf-8") + functions = ts_support.discover_functions(source, file_path) add_method = None for func in functions: if func.function_name == "add": @@ -433,18 +425,14 @@ def test_same_class_helpers_filtered_from_helper_list(self, ts_support): assert add_method is not None - code_context = ts_support.extract_code_context( - add_method, file_path.parent, file_path.parent - ) + code_context = ts_support.extract_code_context(add_method, file_path.parent, file_path.parent) # 'compute' should be in target_code (inside class) assert "compute" in code_context.target_code # 'compute' should NOT be in helper_functions (would be duplicate) helper_names = [h.name for h in code_context.helper_functions] - assert "compute" not in helper_names, ( - "Same-class helper 'compute' should not be in helper_functions list" - ) + assert "compute" not in helper_names, "Same-class helper 'compute' should not be in helper_functions list" class TestTypeScriptLanguageProperties: diff --git a/tests/test_languages/test_typescript_e2e.py b/tests/test_languages/test_typescript_e2e.py index 87dc81269..49cf07a63 100644 --- a/tests/test_languages/test_typescript_e2e.py +++ b/tests/test_languages/test_typescript_e2e.py @@ -124,10 +124,8 @@ def test_extract_code_context_for_typescript(self, ts_project_dir): """Test extracting code context for a TypeScript function.""" skip_if_ts_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current - from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages import get_language_support + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer fib_file = ts_project_dir / "fibonacci.ts" if not fib_file.exists(): @@ -139,7 +137,11 @@ def test_extract_code_context_for_typescript(self, ts_project_dir): fib_func = next((f for f in func_list if f.function_name == "fibonacci"), None) assert fib_func is not None - context = get_code_optimization_context(fib_func, ts_project_dir) + ts_support = get_language_support(Language.TYPESCRIPT) + code_context = ts_support.extract_code_context(fib_func, ts_project_dir, ts_project_dir) + context = JavaScriptFunctionOptimizer._build_optimization_context( + code_context, fib_file, "typescript", ts_project_dir + ) assert context.read_writable_code is not None # Critical: language should be "typescript", not "javascript" diff --git a/tests/test_languages/test_vitest_e2e.py b/tests/test_languages/test_vitest_e2e.py index fc3c285a4..03d57dfe3 100644 --- a/tests/test_languages/test_vitest_e2e.py +++ b/tests/test_languages/test_vitest_e2e.py @@ -118,11 +118,9 @@ def test_extract_code_context_for_typescript(self, vitest_project_dir): """Test extracting code context for a TypeScript function.""" skip_if_js_not_supported() from codeflash.discovery.functions_to_optimize import find_all_functions_in_file - from codeflash.languages import current as lang_current + from codeflash.languages import get_language_support from codeflash.languages.base import Language - from codeflash.languages.python.context.code_context_extractor import get_code_optimization_context - - lang_current._current_language = Language.TYPESCRIPT + from codeflash.languages.javascript.function_optimizer import JavaScriptFunctionOptimizer fib_file = vitest_project_dir / "fibonacci.ts" if not fib_file.exists(): @@ -134,7 +132,11 @@ def test_extract_code_context_for_typescript(self, vitest_project_dir): fib_func = next((f for f in func_list if f.function_name == "fibonacci"), None) assert fib_func is not None - context = get_code_optimization_context(fib_func, vitest_project_dir) + ts_support = get_language_support(Language.TYPESCRIPT) + code_context = ts_support.extract_code_context(fib_func, vitest_project_dir, vitest_project_dir) + context = JavaScriptFunctionOptimizer._build_optimization_context( + code_context, fib_file, "typescript", vitest_project_dir + ) assert context.read_writable_code is not None assert context.read_writable_code.language == "typescript" diff --git a/tests/test_lru_cache_clear.py b/tests/test_lru_cache_clear.py index 43c08a0ed..83ab3ccfe 100644 --- a/tests/test_lru_cache_clear.py +++ b/tests/test_lru_cache_clear.py @@ -1,10 +1,18 @@ +import os +import sys import types from typing import NoReturn +from unittest.mock import patch import pytest from _pytest.config import Config -from codeflash.verification.pytest_plugin import PytestLoops +from codeflash.verification.pytest_plugin import ( + InvalidTimeParameterError, + PytestLoops, + get_runtime_from_stdout, + should_stop, +) @pytest.fixture @@ -15,39 +23,301 @@ def pytest_loops_instance(pytestconfig: Config) -> PytestLoops: @pytest.fixture def mock_item() -> type: class MockItem: - def __init__(self, function: types.FunctionType) -> None: + def __init__(self, function: types.FunctionType, name: str = "test_func", cls: type = None, module: types.ModuleType = None) -> None: self.function = function + self.name = name + self.cls = cls + self.module = module return MockItem -def create_mock_module(module_name: str, source_code: str) -> types.ModuleType: +def create_mock_module(module_name: str, source_code: str, register: bool = False) -> types.ModuleType: module = types.ModuleType(module_name) exec(source_code, module.__dict__) # noqa: S102 + if register: + sys.modules[module_name] = module return module -def test_clear_lru_caches_function(pytest_loops_instance: PytestLoops, mock_item: type) -> None: - source_code = """ +def mock_session(**kwargs): + """Create a mock session with config options.""" + defaults = { + "codeflash_hours": 0, + "codeflash_minutes": 0, + "codeflash_seconds": 10, + "codeflash_delay": 0.0, + "codeflash_loops": 1, + "codeflash_min_loops": 1, + "codeflash_max_loops": 100_000, + } + defaults.update(kwargs) + + class Option: + pass + + option = Option() + for k, v in defaults.items(): + setattr(option, k, v) + + class MockConfig: + pass + + config = MockConfig() + config.option = option + + class MockSession: + pass + + session = MockSession() + session.config = config + return session + + +# --- get_runtime_from_stdout --- + + +class TestGetRuntimeFromStdout: + def test_valid_payload(self) -> None: + assert get_runtime_from_stdout("!######test_func:12345######!") == 12345 + + def test_valid_payload_with_surrounding_text(self) -> None: + assert get_runtime_from_stdout("some output\n!######mod.func:99999######!\nmore output") == 99999 + + def test_empty_string(self) -> None: + assert get_runtime_from_stdout("") is None + + def test_no_markers(self) -> None: + assert get_runtime_from_stdout("just some output") is None + + def test_missing_end_marker(self) -> None: + assert get_runtime_from_stdout("!######test:123") is None + + def test_missing_start_marker(self) -> None: + assert get_runtime_from_stdout("test:123######!") is None + + def test_no_colon_in_payload(self) -> None: + assert get_runtime_from_stdout("!######nocolon######!") is None + + def test_non_integer_value(self) -> None: + assert get_runtime_from_stdout("!######test:notanumber######!") is None + + def test_multiple_markers_uses_last(self) -> None: + stdout = "!######first:111######! middle !######second:222######!" + assert get_runtime_from_stdout(stdout) == 222 + + +# --- should_stop --- + + +class TestShouldStop: + def test_not_enough_data_for_window(self) -> None: + assert should_stop([100, 100], window=5, min_window_size=3) is False + + def test_below_min_window_size(self) -> None: + assert should_stop([100, 100], window=2, min_window_size=5) is False + + def test_stable_runtimes_stops(self) -> None: + runtimes = [1000000] * 10 + assert should_stop(runtimes, window=5, min_window_size=3, center_rel_tol=0.01, spread_rel_tol=0.01) is True + + def test_unstable_runtimes_continues(self) -> None: + runtimes = [100, 200, 100, 200, 100] + assert should_stop(runtimes, window=5, min_window_size=3, center_rel_tol=0.01, spread_rel_tol=0.01) is False + + def test_zero_runtimes_raises(self) -> None: + # All-zero runtimes cause ZeroDivisionError in median check. + # In practice the caller guards with best_runtime_until_now > 0. + runtimes = [0, 0, 0, 0, 0] + with pytest.raises(ZeroDivisionError): + should_stop(runtimes, window=5, min_window_size=3) + + def test_even_window_median(self) -> None: + # Even window: median is average of two middle values + runtimes = [1000, 1000, 1001, 1001] + assert should_stop(runtimes, window=4, min_window_size=2, center_rel_tol=0.01, spread_rel_tol=0.01) is True + + def test_centered_but_spread_too_large(self) -> None: + # All close to median but spread exceeds tolerance + runtimes = [1000, 1050, 1000, 1050, 1000] + assert should_stop(runtimes, window=5, min_window_size=3, center_rel_tol=0.1, spread_rel_tol=0.001) is False + + +# --- _set_nodeid --- + + +class TestSetNodeid: + def test_appends_count_to_plain_nodeid(self, pytest_loops_instance: PytestLoops) -> None: + result = pytest_loops_instance._set_nodeid("test_module.py::test_func", 3) # noqa: SLF001 + assert result == "test_module.py::test_func[ 3 ]" + assert os.environ["CODEFLASH_LOOP_INDEX"] == "3" + + def test_replaces_existing_count(self, pytest_loops_instance: PytestLoops) -> None: + result = pytest_loops_instance._set_nodeid("test_module.py::test_func[ 1 ]", 5) # noqa: SLF001 + assert result == "test_module.py::test_func[ 5 ]" + + def test_replaces_only_loop_pattern(self, pytest_loops_instance: PytestLoops) -> None: + # Parametrize brackets like [param0] should not be replaced + result = pytest_loops_instance._set_nodeid("test_mod.py::test_func[param0]", 2) # noqa: SLF001 + assert result == "test_mod.py::test_func[param0][ 2 ]" + + +# --- _get_total_time --- + + +class TestGetTotalTime: + def test_seconds_only(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_seconds=30) + assert pytest_loops_instance._get_total_time(session) == 30 # noqa: SLF001 + + def test_mixed_units(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_hours=1, codeflash_minutes=30, codeflash_seconds=45) + assert pytest_loops_instance._get_total_time(session) == 3600 + 1800 + 45 # noqa: SLF001 + + def test_zero_time_is_valid(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_hours=0, codeflash_minutes=0, codeflash_seconds=0) + assert pytest_loops_instance._get_total_time(session) == 0 # noqa: SLF001 + + def test_negative_time_raises(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_hours=0, codeflash_minutes=0, codeflash_seconds=-1) + with pytest.raises(InvalidTimeParameterError): + pytest_loops_instance._get_total_time(session) # noqa: SLF001 + + +# --- _timed_out --- + + +class TestTimedOut: + def test_exceeds_max_loops(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_max_loops=10, codeflash_min_loops=1, codeflash_seconds=9999) + assert pytest_loops_instance._timed_out(session, start_time=0, count=10) is True # noqa: SLF001 + + def test_below_min_loops_never_times_out(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_max_loops=100_000, codeflash_min_loops=50, codeflash_seconds=0) + # Even with 0 seconds budget, count < min_loops means not timed out + assert pytest_loops_instance._timed_out(session, start_time=0, count=5) is False # noqa: SLF001 + + def test_above_min_loops_and_time_exceeded(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_max_loops=100_000, codeflash_min_loops=1, codeflash_seconds=1) + # start_time far in the past → time exceeded + assert pytest_loops_instance._timed_out(session, start_time=0, count=2) is True # noqa: SLF001 + + +# --- _get_delay_time --- + + +class TestGetDelayTime: + def test_returns_configured_delay(self, pytest_loops_instance: PytestLoops) -> None: + session = mock_session(codeflash_delay=0.5) + assert pytest_loops_instance._get_delay_time(session) == 0.5 # noqa: SLF001 + + +# --- pytest_runtest_logreport --- + + +class TestRunTestLogReport: + def test_skipped_when_stability_check_disabled(self, pytestconfig: Config) -> None: + instance = PytestLoops(pytestconfig) + instance.enable_stability_check = False + + class MockReport: + when = "call" + passed = True + capstdout = "!######func:12345######!" + nodeid = "test::func" + + instance.pytest_runtest_logreport(MockReport()) + assert instance.runtime_data_by_test_case == {} + + def test_records_runtime_on_passed_call(self, pytestconfig: Config) -> None: + instance = PytestLoops(pytestconfig) + instance.enable_stability_check = True + + class MockReport: + when = "call" + passed = True + capstdout = "!######func:12345######!" + nodeid = "test::func [ 1 ]" + + instance.pytest_runtest_logreport(MockReport()) + assert "test::func" in instance.runtime_data_by_test_case + assert instance.runtime_data_by_test_case["test::func"] == [12345] + + def test_ignores_non_call_phase(self, pytestconfig: Config) -> None: + instance = PytestLoops(pytestconfig) + instance.enable_stability_check = True + + class MockReport: + when = "setup" + passed = True + capstdout = "!######func:12345######!" + nodeid = "test::func" + + instance.pytest_runtest_logreport(MockReport()) + assert instance.runtime_data_by_test_case == {} + + +# --- pytest_runtest_setup / teardown --- + + +class TestRunTestSetupTeardown: + def test_setup_sets_env_vars(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module = types.ModuleType("my_test_module") + + class MyTestClass: + pass + + item = mock_item(lambda: None, name="test_something[param1]", cls=MyTestClass, module=module) + pytest_loops_instance.pytest_runtest_setup(item) + + assert os.environ["CODEFLASH_TEST_MODULE"] == "my_test_module" + assert os.environ["CODEFLASH_TEST_CLASS"] == "MyTestClass" + assert os.environ["CODEFLASH_TEST_FUNCTION"] == "test_something" + + def test_setup_no_class(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module = types.ModuleType("my_test_module") + item = mock_item(lambda: None, name="test_plain", cls=None, module=module) + pytest_loops_instance.pytest_runtest_setup(item) + + assert os.environ["CODEFLASH_TEST_CLASS"] == "" + + def test_teardown_clears_env_vars(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + os.environ["CODEFLASH_TEST_MODULE"] = "leftover" + os.environ["CODEFLASH_TEST_CLASS"] = "leftover" + os.environ["CODEFLASH_TEST_FUNCTION"] = "leftover" + + item = mock_item(lambda: None) + pytest_loops_instance.pytest_runtest_teardown(item) + + assert "CODEFLASH_TEST_MODULE" not in os.environ + assert "CODEFLASH_TEST_CLASS" not in os.environ + assert "CODEFLASH_TEST_FUNCTION" not in os.environ + + +# --- _clear_lru_caches --- + + +class TestClearLruCaches: + def test_clears_lru_cached_function(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + source_code = """ import functools @functools.lru_cache(maxsize=None) def my_func(x): return x * 2 -my_func(10) # miss the cache -my_func(10) # hit the cache +my_func(10) +my_func(10) """ - mock_module = create_mock_module("test_module_func", source_code) - item = mock_item(mock_module.my_func) - pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 - assert mock_module.my_func.cache_info().hits == 0 - assert mock_module.my_func.cache_info().misses == 0 - assert mock_module.my_func.cache_info().currsize == 0 + mock_module = create_mock_module("test_module_func", source_code) + item = mock_item(mock_module.my_func) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + assert mock_module.my_func.cache_info().hits == 0 + assert mock_module.my_func.cache_info().misses == 0 + assert mock_module.my_func.cache_info().currsize == 0 - -def test_clear_lru_caches_class_method(pytest_loops_instance: PytestLoops, mock_item: type) -> None: - source_code = """ + def test_clears_class_method_cache(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + source_code = """ import functools class MyClass: @@ -56,32 +326,137 @@ def my_method(self, x): return x * 3 obj = MyClass() -obj.my_method(5) # Pre-populate the cache -obj.my_method(5) # Hit the cache +obj.my_method(5) +obj.my_method(5) # """ - mock_module = create_mock_module("test_module_class", source_code) - item = mock_item(mock_module.MyClass.my_method) - pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 - assert mock_module.MyClass.my_method.cache_info().hits == 0 - assert mock_module.MyClass.my_method.cache_info().misses == 0 - assert mock_module.MyClass.my_method.cache_info().currsize == 0 + mock_module = create_mock_module("test_module_class", source_code) + item = mock_item(mock_module.MyClass.my_method) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + assert mock_module.MyClass.my_method.cache_info().hits == 0 + assert mock_module.MyClass.my_method.cache_info().misses == 0 + assert mock_module.MyClass.my_method.cache_info().currsize == 0 + + def test_handles_exception_in_cache_clear(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + class BrokenCache: + def cache_clear(self) -> NoReturn: + msg = "Cache clearing failed!" + raise ValueError(msg) + item = mock_item(BrokenCache()) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 -def test_clear_lru_caches_exception_handling(pytest_loops_instance: PytestLoops, mock_item: type) -> None: - """Test that exceptions during clearing are handled.""" + def test_handles_no_cache(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + def no_cache_func(x: int) -> int: + return x - class BrokenCache: - def cache_clear(self) -> NoReturn: - msg = "Cache clearing failed!" - raise ValueError(msg) + item = mock_item(no_cache_func) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 - item = mock_item(BrokenCache()) - pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + def test_clears_module_level_caches_via_sys_modules(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module_name = "_cf_test_module_scan" + source_code = """ +import functools + +@functools.lru_cache(maxsize=None) +def cached_a(x): + return x + 1 +@functools.lru_cache(maxsize=None) +def cached_b(x): + return x + 2 + +def plain_func(x): + return x + +cached_a(1) +cached_a(1) +cached_b(2) +cached_b(2) +""" + mock_module = create_mock_module(module_name, source_code, register=True) + try: + item = mock_item(mock_module.plain_func) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + + assert mock_module.cached_a.cache_info().currsize == 0 + assert mock_module.cached_b.cache_info().currsize == 0 + finally: + sys.modules.pop(module_name, None) + + def test_skips_protected_modules(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module_name = "_cf_test_protected" + source_code = """ +import functools + +@functools.lru_cache(maxsize=None) +def user_func(x): + return x +""" + mock_module = create_mock_module(module_name, source_code, register=True) + try: + mock_module.os_exists = os.path.exists + item = mock_item(mock_module.user_func) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + finally: + sys.modules.pop(module_name, None) + + def test_caches_scan_result(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module_name = "_cf_test_cache_reuse" + source_code = """ +import functools + +@functools.lru_cache(maxsize=None) +def cached_fn(x): + return x +""" + mock_module = create_mock_module(module_name, source_code, register=True) + try: + item = mock_item(mock_module.cached_fn) + + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + assert module_name in pytest_loops_instance._module_clearables # noqa: SLF001 + + mock_module.cached_fn(42) + assert mock_module.cached_fn.cache_info().currsize == 1 + + with patch("codeflash.verification.pytest_plugin.inspect.getmembers") as mock_getmembers: + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + mock_getmembers.assert_not_called() + + assert mock_module.cached_fn.cache_info().currsize == 0 + finally: + sys.modules.pop(module_name, None) + + def test_handles_wrapped_function(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + module_name = "_cf_test_wrapped" + source_code = """ +import functools + +@functools.lru_cache(maxsize=None) +def inner(x): + return x + +def wrapper(x): + return inner(x) + +wrapper.__wrapped__ = inner +wrapper.__module__ = __name__ + +inner(1) +inner(1) +""" + mock_module = create_mock_module(module_name, source_code, register=True) + try: + item = mock_item(mock_module.wrapper) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + assert mock_module.inner.cache_info().currsize == 0 + finally: + sys.modules.pop(module_name, None) -def test_clear_lru_caches_no_cache(pytest_loops_instance: PytestLoops, mock_item: type) -> None: - def no_cache_func(x: int) -> int: - return x + def test_handles_function_without_module(self, pytest_loops_instance: PytestLoops, mock_item: type) -> None: + def func() -> None: + pass - item = mock_item(no_cache_func) - pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 + func.__module__ = None # type: ignore[assignment] + item = mock_item(func) + pytest_loops_instance._clear_lru_caches(item) # noqa: SLF001 diff --git a/tests/test_multi_file_code_replacement.py b/tests/test_multi_file_code_replacement.py index 37dce437f..82256001a 100644 --- a/tests/test_multi_file_code_replacement.py +++ b/tests/test_multi_file_code_replacement.py @@ -1,8 +1,8 @@ from pathlib import Path from codeflash.discovery.functions_to_optimize import FunctionToOptimize -from codeflash.models.models import CodeOptimizationContext, CodeStringsMarkdown, FunctionParent -from codeflash.optimization.function_optimizer import FunctionOptimizer +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer +from codeflash.models.models import CodeOptimizationContext, CodeStringsMarkdown from codeflash.verification.verification_utils import TestConfig @@ -106,7 +106,7 @@ def _get_string_usage(text: str) -> Usage: test_framework="pytest", pytest_cmd="pytest", ) - func_optimizer = FunctionOptimizer(function_to_optimize=func, test_cfg=test_config) + func_optimizer = PythonFunctionOptimizer(function_to_optimize=func, test_cfg=test_config) code_context: CodeOptimizationContext = func_optimizer.get_code_optimization_context().unwrap() original_helper_code: dict[Path, str] = {} @@ -165,82 +165,3 @@ def _estimate_string_tokens(content: str | Sequence[UserContent]) -> int: assert new_code.rstrip() == original_main.rstrip() # No Change assert new_helper_code.rstrip() == expected_helper.rstrip() - - -def test_optimized_code_for_different_file_not_applied_to_current_file() -> None: - """Test that optimized code for one file is not incorrectly applied to a different file. - - This reproduces the bug from PR #1309 where optimized code for `formatter.py` - was incorrectly applied to `support.py`, causing `normalize_java_code` to be - duplicated. The bug was in `get_optimized_code_for_module` which had a fallback - that applied a single code block to ANY file being processed. - - The scenario: - 1. `support.py` imports `normalize_java_code` from `formatter.py` - 2. AI returns optimized code with a single code block for `formatter.py` - 3. BUG: When processing `support.py`, the fallback applies `formatter.py`'s code - 4. EXPECTED: No code should be applied to `support.py` since the paths don't match - """ - from codeflash.languages.python.static_analysis.code_extractor import find_preexisting_objects - from codeflash.languages.python.static_analysis.code_replacer import replace_function_definitions_in_module - from codeflash.models.models import CodeStringsMarkdown - - root_dir = Path(__file__).parent.parent.resolve() - - # Create support.py - the file that imports the helper - support_file = (root_dir / "code_to_optimize/temp_pr1309_support.py").resolve() - original_support = '''from temp_pr1309_formatter import normalize_java_code - - -class JavaSupport: - """Support class for Java operations.""" - - def normalize_code(self, source: str) -> str: - """Normalize code for deduplication.""" - return normalize_java_code(source) -''' - support_file.write_text(original_support, encoding="utf-8") - - # AI returns optimized code for formatter.py ONLY (with explicit path) - # This simulates what happens when the AI optimizes the helper function - optimized_markdown = '''```python:code_to_optimize/temp_pr1309_formatter.py -def normalize_java_code(source: str) -> str: - """Optimized version with fast-path.""" - if not source: - return "" - return "\\n".join(line.strip() for line in source.splitlines() if line.strip()) -``` -''' - - preexisting_objects = find_preexisting_objects(original_support) - - # Process support.py with the optimized code that's meant for formatter.py - replace_function_definitions_in_module( - function_names=["JavaSupport.normalize_code"], - optimized_code=CodeStringsMarkdown.parse_markdown_code(optimized_markdown), - module_abspath=support_file, - preexisting_objects=preexisting_objects, - project_root_path=root_dir, - ) - - new_support_code = support_file.read_text(encoding="utf-8") - - # Cleanup - support_file.unlink(missing_ok=True) - - # CRITICAL: support.py should NOT have normalize_java_code defined! - # The optimized code was for formatter.py, not support.py. - def_count = new_support_code.count("def normalize_java_code") - assert def_count == 0, ( - f"Bug: normalize_java_code was incorrectly added to support.py!\n" - f"Found {def_count} definition(s) when there should be 0.\n" - f"The optimized code was for formatter.py, not support.py.\n" - f"Resulting code:\n{new_support_code}" - ) - - # The file should remain unchanged since no code matched its path - assert new_support_code.strip() == original_support.strip(), ( - f"support.py was modified when it shouldn't have been.\n" - f"Original:\n{original_support}\n" - f"New:\n{new_support_code}" - ) diff --git a/tests/test_parse_line_profile_test_output.py b/tests/test_parse_line_profile_test_output.py index b694b39a7..e9ce3ef00 100644 --- a/tests/test_parse_line_profile_test_output.py +++ b/tests/test_parse_line_profile_test_output.py @@ -4,7 +4,7 @@ from codeflash.languages import set_current_language from codeflash.languages.base import Language -from codeflash.verification.parse_line_profile_test_output import parse_line_profile_results +from codeflash.languages.java.line_profiler import JavaLineProfiler def test_parse_line_profile_results_non_python_java_json(): @@ -42,7 +42,7 @@ def test_parse_line_profile_results_non_python_java_json(): } profile_file.write_text(json.dumps(profile_data), encoding="utf-8") - results, _ = parse_line_profile_results(profile_file) + results = JavaLineProfiler.parse_results(profile_file) assert results["unit"] == 1e-9 assert results["str_out"] == ( @@ -56,4 +56,3 @@ def test_parse_line_profile_results_non_python_java_json(): ) assert (source_file.as_posix(), 3, "Util.java") in results["timings"] assert results["timings"][(source_file.as_posix(), 3, "Util.java")] == [(3, 6, 1000), (4, 6, 2000)] - diff --git a/tests/test_pickle_patcher.py b/tests/test_pickle_patcher.py index 127fe8a07..9d05da9d8 100644 --- a/tests/test_pickle_patcher.py +++ b/tests/test_pickle_patcher.py @@ -397,8 +397,8 @@ def test_run_and_parse_picklepatch() -> None: test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=1.0, ) assert len(test_results_unused_socket) == 1 @@ -428,8 +428,8 @@ def bubble_sort_with_unused_socket(data_container): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=1.0, ) assert len(optimized_test_results_unused_socket) == 1 @@ -483,8 +483,8 @@ def bubble_sort_with_unused_socket(data_container): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=1.0, ) assert len(test_results_used_socket) == 1 @@ -518,8 +518,8 @@ def bubble_sort_with_used_socket(data_container): test_env=test_env, test_files=func_optimizer.test_files, optimization_iteration=0, - min_outer_loops=1, - max_outer_loops=1, + pytest_min_loops=1, + pytest_max_loops=1, testing_time=1.0, ) assert len(test_results_used_socket) == 1 diff --git a/tests/test_test_runner.py b/tests/test_test_runner.py index 51d13b18b..eb20812d6 100644 --- a/tests/test_test_runner.py +++ b/tests/test_test_runner.py @@ -3,9 +3,9 @@ from pathlib import Path from codeflash.code_utils.code_utils import ImportErrorPattern +from codeflash.languages import current_language_support from codeflash.models.models import TestFile, TestFiles, TestType from codeflash.verification.parse_test_output import parse_test_xml -from codeflash.verification.test_runner import run_behavioral_tests from codeflash.verification.verification_utils import TestConfig @@ -48,8 +48,8 @@ def test_sort(self): test_files=[TestFile(instrumented_behavior_file_path=test_file_path, test_type=TestType.EXISTING_UNIT_TEST)] ) test_file_path.write_text(code, encoding="utf-8") - result_file, process, _, _ = run_behavioral_tests( - test_files, test_framework=config.test_framework, cwd=Path(config.project_root_path), test_env=test_env + result_file, process, _, _ = current_language_support().run_behavioral_tests( + test_paths=test_files, test_env=test_env, cwd=Path(config.project_root_path) ) results = parse_test_xml(result_file, test_files, config, process) assert results[0].did_pass, "Test did not pass as expected" @@ -89,13 +89,8 @@ def test_sort(): test_files=[TestFile(instrumented_behavior_file_path=test_file_path, test_type=TestType.EXISTING_UNIT_TEST)] ) test_file_path.write_text(code, encoding="utf-8") - result_file, process, _, _ = run_behavioral_tests( - test_files, - test_framework=config.test_framework, - cwd=Path(config.project_root_path), - test_env=test_env, - pytest_timeout=1, - pytest_target_runtime_seconds=1, + result_file, process, _, _ = current_language_support().run_behavioral_tests( + test_paths=test_files, test_env=test_env, cwd=Path(config.project_root_path), timeout=1 ) results = parse_test_xml( test_xml_file_path=result_file, test_files=test_files, test_config=config, run_result=process @@ -136,13 +131,8 @@ def test_sort(): test_files=[TestFile(instrumented_behavior_file_path=test_file_path, test_type=TestType.EXISTING_UNIT_TEST)] ) test_file_path.write_text(code, encoding="utf-8") - result_file, process, _, _ = run_behavioral_tests( - test_files, - test_framework=config.test_framework, - cwd=Path(config.project_root_path), - test_env=test_env, - pytest_timeout=1, - pytest_target_runtime_seconds=1, + result_file, process, _, _ = current_language_support().run_behavioral_tests( + test_paths=test_files, test_env=test_env, cwd=Path(config.project_root_path), timeout=1 ) results = parse_test_xml( test_xml_file_path=result_file, test_files=test_files, test_config=config, run_result=process diff --git a/tests/test_unused_helper_revert.py b/tests/test_unused_helper_revert.py index 2a4efae3d..ba5740d5a 100644 --- a/tests/test_unused_helper_revert.py +++ b/tests/test_unused_helper_revert.py @@ -10,8 +10,8 @@ detect_unused_helper_functions, revert_unused_helper_functions, ) +from codeflash.languages.python.function_optimizer import PythonFunctionOptimizer from codeflash.models.models import CodeStringsMarkdown -from codeflash.optimization.function_optimizer import FunctionOptimizer from codeflash.verification.verification_utils import TestConfig @@ -83,7 +83,7 @@ def helper_function_2(x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -194,7 +194,7 @@ def helper_function_2(x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -269,7 +269,7 @@ def helper_function_2(x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -365,7 +365,7 @@ def entrypoint_function(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -559,7 +559,7 @@ def helper_method_2(self, x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -710,7 +710,7 @@ def process_data(self, n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -895,7 +895,7 @@ def local_helper(self, x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1051,7 +1051,7 @@ def entrypoint_function(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1215,7 +1215,7 @@ def entrypoint_function(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1442,7 +1442,7 @@ def calculate_class(cls, n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1576,7 +1576,7 @@ async def async_entrypoint(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1664,7 +1664,7 @@ def sync_entrypoint(n): function_to_optimize = FunctionToOptimize(file_path=main_file, function_name="sync_entrypoint", parents=[]) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1773,7 +1773,7 @@ async def mixed_entrypoint(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1874,7 +1874,7 @@ def sync_helper_method(self, x): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -1960,7 +1960,7 @@ async def async_entrypoint(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -2039,7 +2039,7 @@ def gcd_recursive(a: int, b: int) -> int: function_to_optimize = FunctionToOptimize(file_path=main_file, function_name="gcd_recursive", parents=[]) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), @@ -2152,7 +2152,7 @@ async def async_entrypoint_with_generators(n): ) # Create function optimizer - optimizer = FunctionOptimizer( + optimizer = PythonFunctionOptimizer( function_to_optimize=function_to_optimize, test_cfg=test_cfg, function_to_optimize_source_code=main_file.read_text(), diff --git a/tests/test_worktree.py b/tests/test_worktree.py index 9bc66691e..75de860fd 100644 --- a/tests/test_worktree.py +++ b/tests/test_worktree.py @@ -61,9 +61,9 @@ def test_mirror_paths_for_worktree_mode(monkeypatch: pytest.MonkeyPatch): assert optimizer.args.test_project_root == worktree_dir assert optimizer.args.module_root == worktree_dir / "codeflash" # tests_root is configured as "codeflash" in pyproject.toml - assert optimizer.args.tests_root == worktree_dir / "codeflash" + assert optimizer.args.tests_root == worktree_dir / "tests" assert optimizer.args.file == worktree_dir / "codeflash/optimization/optimizer.py" - assert optimizer.test_cfg.tests_root == worktree_dir / "codeflash" + assert optimizer.test_cfg.tests_root == worktree_dir / "tests" assert optimizer.test_cfg.project_root_path == worktree_dir # same as project_root assert optimizer.test_cfg.tests_project_rootdir == worktree_dir # same as test_project_root diff --git a/uv.lock b/uv.lock index 321dab6b8..ad21f63b7 100644 --- a/uv.lock +++ b/uv.lock @@ -209,15 +209,15 @@ wheels = [ [[package]] name = "blessed" -version = "1.30.0" +version = "1.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinxed", marker = "sys_platform == 'win32'" }, { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/19/e926a0dbbf93c7aeb15d4dfff0d0e3de02653b3ba540b687307d0819c1ff/blessed-1.30.0.tar.gz", hash = "sha256:4d547019d7b40fc5420ea2ba2bc180fdccc31d6715298e2b49ffa7b020d44667", size = 13948932, upload-time = "2026-02-06T19:40:23.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/0c/658dea9ba35fcea19e6feaa8ba0d2dbf8cac9aeaa1f9ab1d77d36f534757/blessed-1.32.0.tar.gz", hash = "sha256:d4090e9908cf86bea15a5275845c8bfc69c4c34eb6d22de07c65d26f1e54a918", size = 13979999, upload-time = "2026-02-28T20:59:01.815Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/b0/8d87c7c8015ce8d4b2c5ee7a82a1d955f10138322c4f0cb387d7d2c1b2e7/blessed-1.30.0-py3-none-any.whl", hash = "sha256:4061a9f10dd22798716c2548ba36385af6a29d856c897f367c6ccc927e0b3a5a", size = 98399, upload-time = "2026-02-06T19:40:20.815Z" }, + { url = "https://files.pythonhosted.org/packages/f6/47/de8f185a1f537fdb5117fcde7050472b8cde3561179e9a68e1a566a6e6c6/blessed-1.32.0-py3-none-any.whl", hash = "sha256:c6fdc18838491ebc7f0460234917eff4e172074934f5f80e82672417bd74be70", size = 111172, upload-time = "2026-02-28T20:58:58.59Z" }, ] [[package]] @@ -269,11 +269,11 @@ wheels = [ [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -436,14 +436,14 @@ dependencies = [ { name = "crosshair-tool", marker = "python_full_version < '3.15'" }, { name = "dill" }, { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "filelock", version = "3.24.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "filelock", version = "3.25.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "gitpython" }, { name = "humanize", version = "4.13.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "humanize", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "inquirer", version = "3.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9.2'" }, { name = "inquirer", version = "3.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9.2'" }, { name = "isort", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "isort", version = "8.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "isort", version = "8.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "jedi" }, { name = "junitparser" }, { name = "libcst" }, @@ -453,7 +453,7 @@ dependencies = [ { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "platformdirs", version = "4.9.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "posthog", version = "6.9.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "posthog", version = "7.9.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "posthog", version = "7.9.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "pydantic" }, { name = "pygls" }, { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -494,8 +494,10 @@ dev = [ { name = "types-openpyxl" }, { name = "types-pexpect" }, { name = "types-pygments" }, - { name = "types-python-dateutil" }, - { name = "types-regex" }, + { name = "types-python-dateutil", version = "2.9.0.20260124", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "types-python-dateutil", version = "2.9.0.20260302", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "types-regex", version = "2026.1.15.20260116", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "types-regex", version = "2026.2.28.20260301", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "types-requests" }, { name = "types-six" }, { name = "types-unidiff" }, @@ -507,7 +509,7 @@ tests = [ { name = "eval-type-backport" }, { name = "jax", version = "0.4.30", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "jax", version = "0.6.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "jax", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jax", version = "0.9.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "numba", version = "0.60.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "numba", version = "0.64.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "numpy", version = "2.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -520,7 +522,7 @@ tests = [ { name = "pyrsistent" }, { name = "scipy", version = "1.13.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "scipy", version = "1.15.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "tensorflow", marker = "python_full_version >= '3.10'" }, { name = "torch", version = "2.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "torch", version = "2.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -947,10 +949,10 @@ wheels = [ [[package]] name = "cuda-pathfinder" -version = "1.3.4" +version = "1.4.0" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/5e/db279a3bfbd18d59d0598922a3b3c1454908d0969e8372260afec9736376/cuda_pathfinder-1.3.4-py3-none-any.whl", hash = "sha256:fb983f6e0d43af27ef486e14d5989b5f904ef45cedf40538bfdcbffa6bb01fb2", size = 30878, upload-time = "2026-02-11T18:50:31.008Z" }, + { url = "https://files.pythonhosted.org/packages/ff/60/d8f1dbfb7f06b94c662e98c95189e6f39b817da638bc8fcea0d003f89e5d/cuda_pathfinder-1.4.0-py3-none-any.whl", hash = "sha256:437079ca59e7b61ae439ecc501d69ed87b3accc34d58153ef1e54815e2c2e118", size = 38406, upload-time = "2026-02-25T22:13:00.807Z" }, ] [[package]] @@ -1070,7 +1072,7 @@ wheels = [ [[package]] name = "filelock" -version = "3.24.3" +version = "3.25.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -1087,9 +1089,9 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/18/a1fd2231c679dcb9726204645721b12498aeac28e1ad0601038f94b42556/filelock-3.25.0.tar.gz", hash = "sha256:8f00faf3abf9dc730a1ffe9c354ae5c04e079ab7d3a683b7c32da5dd05f26af3", size = 40158, upload-time = "2026-03-01T15:08:45.916Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl", hash = "sha256:5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047", size = 26427, upload-time = "2026-03-01T15:08:44.593Z" }, ] [[package]] @@ -1185,73 +1187,73 @@ wheels = [ [[package]] name = "grpcio" -version = "1.78.1" +version = "1.78.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/de/de568532d9907552700f80dcec38219d8d298ad9e71f5e0a095abaf2761e/grpcio-1.78.1.tar.gz", hash = "sha256:27c625532d33ace45d57e775edf1982e183ff8641c72e4e91ef7ba667a149d72", size = 12835760, upload-time = "2026-02-20T01:16:10.869Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/30/0534b643dafd54824769d6260b89c71d518e4ef8b5ad16b84d1ae9272978/grpcio-1.78.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4393bef64cf26dc07cd6f18eaa5170ae4eebaafd4418e7e3a59ca9526a6fa30b", size = 5947661, upload-time = "2026-02-20T01:12:34.922Z" }, - { url = "https://files.pythonhosted.org/packages/4a/f8/f678566655ab822da0f713789555e7eddca7ef93da99f480c63de3aa94b4/grpcio-1.78.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:917047c19cd120b40aab9a4b8a22e9ce3562f4a1343c0d62b3cd2d5199da3d67", size = 11819948, upload-time = "2026-02-20T01:12:39.709Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0b/a4b4210d946055f4e5a8430f2802202ae8f831b4b00d36d55055c5cf4b6a/grpcio-1.78.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff7de398bb3528d44d17e6913a7cfe639e3b15c65595a71155322df16978c5e1", size = 6519850, upload-time = "2026-02-20T01:12:42.715Z" }, - { url = "https://files.pythonhosted.org/packages/ea/d9/a1e657a73000a71fa75ec7140ff3a8dc32eb3427560620e477c6a2735527/grpcio-1.78.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:15f6e636d1152667ddb4022b37534c161c8477274edb26a0b65b215dd0a81e97", size = 7198654, upload-time = "2026-02-20T01:12:46.164Z" }, - { url = "https://files.pythonhosted.org/packages/aa/28/a61c5bdf53c1638e657bb5eebb93c789837820e1fdb965145f05eccc2994/grpcio-1.78.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:27b5cb669603efb7883a882275db88b6b5d6b6c9f0267d5846ba8699b7ace338", size = 6727238, upload-time = "2026-02-20T01:12:48.472Z" }, - { url = "https://files.pythonhosted.org/packages/9d/3e/aa143d0687801986a29d85788c96089449f36651cd4e2a493737ae0c5be9/grpcio-1.78.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:86edb3966778fa05bfdb333688fde5dc9079f9e2a9aa6a5c42e9564b7656ba04", size = 7300960, upload-time = "2026-02-20T01:12:51.139Z" }, - { url = "https://files.pythonhosted.org/packages/30/d3/53e0f26b46417f28d14b5951fc6a1eff79c08c8a339e967c0a19ec7cf9e9/grpcio-1.78.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:849cc62eb989bc3be5629d4f3acef79be0d0ff15622201ed251a86d17fef6494", size = 8285274, upload-time = "2026-02-20T01:12:53.315Z" }, - { url = "https://files.pythonhosted.org/packages/29/d0/e0e9fd477ce86c07ed1ed1d5c34790f050b6d58bfde77b02b36e23f8b235/grpcio-1.78.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9a00992d6fafe19d648b9ccb4952200c50d8e36d0cce8cf026c56ed3fdc28465", size = 7726620, upload-time = "2026-02-20T01:12:56.498Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b5/e138a9f7810d196081b2e047c378ca12358c5906d79c42ddec41bb43d528/grpcio-1.78.1-cp310-cp310-win32.whl", hash = "sha256:f8759a1347f3b4f03d9a9d4ce8f9f31ad5e5d0144ba06ccfb1ffaeb0ba4c1e20", size = 4076778, upload-time = "2026-02-20T01:12:59.098Z" }, - { url = "https://files.pythonhosted.org/packages/4e/95/9b02316b85731df0943a635ca6d02f155f673c4f17e60be0c4892a6eb051/grpcio-1.78.1-cp310-cp310-win_amd64.whl", hash = "sha256:e840405a3f1249509892be2399f668c59b9d492068a2cf326d661a8c79e5e747", size = 4798925, upload-time = "2026-02-20T01:13:03.186Z" }, - { url = "https://files.pythonhosted.org/packages/bf/1e/ad774af3b2c84f49c6d8c4a7bea4c40f02268ea8380630c28777edda463b/grpcio-1.78.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:3a8aa79bc6e004394c0abefd4b034c14affda7b66480085d87f5fbadf43b593b", size = 5951132, upload-time = "2026-02-20T01:13:05.942Z" }, - { url = "https://files.pythonhosted.org/packages/48/9d/ad3c284bedd88c545e20675d98ae904114d8517a71b0efc0901e9166628f/grpcio-1.78.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8e1fcb419da5811deb47b7749b8049f7c62b993ba17822e3c7231e3e0ba65b79", size = 11831052, upload-time = "2026-02-20T01:13:09.604Z" }, - { url = "https://files.pythonhosted.org/packages/6d/08/20d12865e47242d03c3ade9bb2127f5b4aded964f373284cfb357d47c5ac/grpcio-1.78.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b071dccac245c32cd6b1dd96b722283b855881ca0bf1c685cf843185f5d5d51e", size = 6524749, upload-time = "2026-02-20T01:13:21.692Z" }, - { url = "https://files.pythonhosted.org/packages/c6/53/a8b72f52b253ec0cfdf88a13e9236a9d717c332b8aa5f0ba9e4699e94b55/grpcio-1.78.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:d6fb962947e4fe321eeef3be1ba5ba49d32dea9233c825fcbade8e858c14aaf4", size = 7198995, upload-time = "2026-02-20T01:13:24.275Z" }, - { url = "https://files.pythonhosted.org/packages/13/3c/ac769c8ded1bcb26bb119fb472d3374b481b3cf059a0875db9fc77139c17/grpcio-1.78.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6afd191551fd72e632367dfb083e33cd185bf9ead565f2476bba8ab864ae496", size = 6730770, upload-time = "2026-02-20T01:13:26.522Z" }, - { url = "https://files.pythonhosted.org/packages/dc/c3/2275ef4cc5b942314321f77d66179be4097ff484e82ca34bf7baa5b1ddbc/grpcio-1.78.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b2acd83186305c0802dbc4d81ed0ec2f3e8658d7fde97cfba2f78d7372f05b89", size = 7305036, upload-time = "2026-02-20T01:13:30.923Z" }, - { url = "https://files.pythonhosted.org/packages/91/cb/3c2aa99e12cbbfc72c2ed8aa328e6041709d607d668860380e6cd00ba17d/grpcio-1.78.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5380268ab8513445740f1f77bd966d13043d07e2793487e61fd5b5d0935071eb", size = 8288641, upload-time = "2026-02-20T01:13:39.42Z" }, - { url = "https://files.pythonhosted.org/packages/0d/b2/21b89f492260ac645775d9973752ca873acfd0609d6998e9d3065a21ea2f/grpcio-1.78.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:389b77484959bdaad6a2b7dda44d7d1228381dd669a03f5660392aa0e9385b22", size = 7730967, upload-time = "2026-02-20T01:13:41.697Z" }, - { url = "https://files.pythonhosted.org/packages/24/03/6b89eddf87fdffb8fa9d37375d44d3a798f4b8116ac363a5f7ca84caa327/grpcio-1.78.1-cp311-cp311-win32.whl", hash = "sha256:9dee66d142f4a8cca36b5b98a38f006419138c3c89e72071747f8fca415a6d8f", size = 4076680, upload-time = "2026-02-20T01:13:43.781Z" }, - { url = "https://files.pythonhosted.org/packages/a7/a8/204460b1bc1dff9862e98f56a2d14be3c4171f929f8eaf8c4517174b4270/grpcio-1.78.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b930cf4f9c4a2262bb3e5d5bc40df426a72538b4f98e46f158b7eb112d2d70", size = 4801074, upload-time = "2026-02-20T01:13:46.315Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ed/d2eb9d27fded1a76b2a80eb9aa8b12101da7e41ce2bac0ad3651e88a14ae/grpcio-1.78.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:41e4605c923e0e9a84a2718e4948a53a530172bfaf1a6d1ded16ef9c5849fca2", size = 5913389, upload-time = "2026-02-20T01:13:49.005Z" }, - { url = "https://files.pythonhosted.org/packages/69/1b/40034e9ab010eeb3fa41ec61d8398c6dbf7062f3872c866b8f72700e2522/grpcio-1.78.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:39da1680d260c0c619c3b5fa2dc47480ca24d5704c7a548098bca7de7f5dd17f", size = 11811839, upload-time = "2026-02-20T01:13:51.839Z" }, - { url = "https://files.pythonhosted.org/packages/b4/69/fe16ef2979ea62b8aceb3a3f1e7a8bbb8b717ae2a44b5899d5d426073273/grpcio-1.78.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b5d5881d72a09b8336a8f874784a8eeffacde44a7bc1a148bce5a0243a265ef0", size = 6475805, upload-time = "2026-02-20T01:13:55.423Z" }, - { url = "https://files.pythonhosted.org/packages/5b/1e/069e0a9062167db18446917d7c00ae2e91029f96078a072bedc30aaaa8c3/grpcio-1.78.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:888ceb7821acd925b1c90f0cdceaed1386e69cfe25e496e0771f6c35a156132f", size = 7169955, upload-time = "2026-02-20T01:13:59.553Z" }, - { url = "https://files.pythonhosted.org/packages/38/fc/44a57e2bb4a755e309ee4e9ed2b85c9af93450b6d3118de7e69410ee05fa/grpcio-1.78.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8942bdfc143b467c264b048862090c4ba9a0223c52ae28c9ae97754361372e42", size = 6690767, upload-time = "2026-02-20T01:14:02.31Z" }, - { url = "https://files.pythonhosted.org/packages/b8/87/21e16345d4c75046d453916166bc72a3309a382c8e97381ec4b8c1a54729/grpcio-1.78.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:716a544969660ed609164aff27b2effd3ff84e54ac81aa4ce77b1607ca917d22", size = 7266846, upload-time = "2026-02-20T01:14:12.974Z" }, - { url = "https://files.pythonhosted.org/packages/11/df/d6261983f9ca9ef4d69893765007a9a3211b91d9faf85a2591063df381c7/grpcio-1.78.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d50329b081c223d444751076bb5b389d4f06c2b32d51b31a1e98172e6cecfb9", size = 8253522, upload-time = "2026-02-20T01:14:17.407Z" }, - { url = "https://files.pythonhosted.org/packages/de/7c/4f96a0ff113c5d853a27084d7590cd53fdb05169b596ea9f5f27f17e021e/grpcio-1.78.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e836778c13ff70edada16567e8da0c431e8818eaae85b80d11c1ba5782eccbb", size = 7698070, upload-time = "2026-02-20T01:14:20.032Z" }, - { url = "https://files.pythonhosted.org/packages/17/3c/7b55c0b5af88fbeb3d0c13e25492d3ace41ac9dbd0f5f8f6c0fb613b6706/grpcio-1.78.1-cp312-cp312-win32.whl", hash = "sha256:07eb016ea7444a22bef465cce045512756956433f54450aeaa0b443b8563b9ca", size = 4066474, upload-time = "2026-02-20T01:14:22.602Z" }, - { url = "https://files.pythonhosted.org/packages/5d/17/388c12d298901b0acf10b612b650692bfed60e541672b1d8965acbf2d722/grpcio-1.78.1-cp312-cp312-win_amd64.whl", hash = "sha256:02b82dcd2fa580f5e82b4cf62ecde1b3c7cc9ba27b946421200706a6e5acaf85", size = 4797537, upload-time = "2026-02-20T01:14:25.444Z" }, - { url = "https://files.pythonhosted.org/packages/df/72/754754639cfd16ad04619e1435a518124b2d858e5752225376f9285d4c51/grpcio-1.78.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:2b7ad2981550ce999e25ce3f10c8863f718a352a2fd655068d29ea3fd37b4907", size = 5919437, upload-time = "2026-02-20T01:14:29.403Z" }, - { url = "https://files.pythonhosted.org/packages/5c/84/6267d1266f8bc335d3a8b7ccf981be7de41e3ed8bd3a49e57e588212b437/grpcio-1.78.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:409bfe22220889b9906739910a0ee4c197a967c21b8dd14b4b06dd477f8819ce", size = 11803701, upload-time = "2026-02-20T01:14:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/f3/56/c9098e8b920a54261cd605bbb040de0cde1ca4406102db0aa2c0b11d1fb4/grpcio-1.78.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:34b6cb16f4b67eeb5206250dc5b4d5e8e3db939535e58efc330e4c61341554bd", size = 6479416, upload-time = "2026-02-20T01:14:35.926Z" }, - { url = "https://files.pythonhosted.org/packages/86/cf/5d52024371ee62658b7ed72480200524087528844ec1b65265bbcd31c974/grpcio-1.78.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:39d21fd30d38a5afb93f0e2e71e2ec2bd894605fb75d41d5a40060c2f98f8d11", size = 7174087, upload-time = "2026-02-20T01:14:39.98Z" }, - { url = "https://files.pythonhosted.org/packages/31/e6/5e59551afad4279e27335a6d60813b8aa3ae7b14fb62cea1d329a459c118/grpcio-1.78.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09fbd4bcaadb6d8604ed1504b0bdf7ac18e48467e83a9d930a70a7fefa27e862", size = 6692881, upload-time = "2026-02-20T01:14:42.466Z" }, - { url = "https://files.pythonhosted.org/packages/db/8f/940062de2d14013c02f51b079eb717964d67d46f5d44f22038975c9d9576/grpcio-1.78.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:db681513a1bdd879c0b24a5a6a70398da5eaaba0e077a306410dc6008426847a", size = 7269092, upload-time = "2026-02-20T01:14:45.826Z" }, - { url = "https://files.pythonhosted.org/packages/09/87/9db657a4b5f3b15560ec591db950bc75a1a2f9e07832578d7e2b23d1a7bd/grpcio-1.78.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f81816faa426da461e9a597a178832a351d6f1078102590a4b32c77d251b71eb", size = 8252037, upload-time = "2026-02-20T01:14:48.57Z" }, - { url = "https://files.pythonhosted.org/packages/e2/37/b980e0265479ec65e26b6e300a39ceac33ecb3f762c2861d4bac990317cf/grpcio-1.78.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffbb760df1cd49e0989f9826b2fd48930700db6846ac171eaff404f3cfbe5c28", size = 7695243, upload-time = "2026-02-20T01:14:51.376Z" }, - { url = "https://files.pythonhosted.org/packages/98/46/5fc42c100ab702fa1ea41a75c890c563c3f96432b4a287d5a6369654f323/grpcio-1.78.1-cp313-cp313-win32.whl", hash = "sha256:1a56bf3ee99af5cf32d469de91bf5de79bdac2e18082b495fc1063ea33f4f2d0", size = 4065329, upload-time = "2026-02-20T01:14:53.952Z" }, - { url = "https://files.pythonhosted.org/packages/b0/da/806d60bb6611dfc16cf463d982bd92bd8b6bd5f87dfac66b0a44dfe20995/grpcio-1.78.1-cp313-cp313-win_amd64.whl", hash = "sha256:8991c2add0d8505178ff6c3ae54bd9386279e712be82fa3733c54067aae9eda1", size = 4797637, upload-time = "2026-02-20T01:14:57.276Z" }, - { url = "https://files.pythonhosted.org/packages/96/3a/2d2ec4d2ce2eb9d6a2b862630a0d9d4ff4239ecf1474ecff21442a78612a/grpcio-1.78.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:d101fe49b1e0fb4a7aa36ed0c3821a0f67a5956ef572745452d2cd790d723a3f", size = 5920256, upload-time = "2026-02-20T01:15:00.23Z" }, - { url = "https://files.pythonhosted.org/packages/9c/92/dccb7d087a1220ed358753945230c1ddeeed13684b954cb09db6758f1271/grpcio-1.78.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:5ce1855e8cfc217cdf6bcfe0cf046d7cf81ddcc3e6894d6cfd075f87a2d8f460", size = 11813749, upload-time = "2026-02-20T01:15:03.312Z" }, - { url = "https://files.pythonhosted.org/packages/ef/47/c20e87f87986da9998f30f14776ce27e61f02482a3a030ffe265089342c6/grpcio-1.78.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd26048d066b51f39fe9206e2bcc2cea869a5e5b2d13c8d523f4179193047ebd", size = 6488739, upload-time = "2026-02-20T01:15:14.349Z" }, - { url = "https://files.pythonhosted.org/packages/a6/c2/088bd96e255133d7d87c3eed0d598350d16cde1041bdbe2bb065967aaf91/grpcio-1.78.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b8d7fda614cf2af0f73bbb042f3b7fee2ecd4aea69ec98dbd903590a1083529", size = 7173096, upload-time = "2026-02-20T01:15:17.687Z" }, - { url = "https://files.pythonhosted.org/packages/60/ce/168db121073a03355ce3552b3b1f790b5ded62deffd7d98c5f642b9d3d81/grpcio-1.78.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:656a5bd142caeb8b1efe1fe0b4434ecc7781f44c97cfc7927f6608627cf178c0", size = 6693861, upload-time = "2026-02-20T01:15:20.911Z" }, - { url = "https://files.pythonhosted.org/packages/ae/d0/90b30ec2d9425215dd56922d85a90babbe6ee7e8256ba77d866b9c0d3aba/grpcio-1.78.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:99550e344482e3c21950c034f74668fccf8a546d50c1ecb4f717543bbdc071ba", size = 7278083, upload-time = "2026-02-20T01:15:23.698Z" }, - { url = "https://files.pythonhosted.org/packages/c1/fb/73f9ba0b082bcd385d46205095fd9c917754685885b28fce3741e9f54529/grpcio-1.78.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8f27683ca68359bd3f0eb4925824d71e538f84338b3ae337ead2ae43977d7541", size = 8252546, upload-time = "2026-02-20T01:15:26.517Z" }, - { url = "https://files.pythonhosted.org/packages/85/c5/6a89ea3cb5db6c3d9ed029b0396c49f64328c0cf5d2630ffeed25711920a/grpcio-1.78.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a40515b69ac50792f9b8ead260f194ba2bb3285375b6c40c7ff938f14c3df17d", size = 7696289, upload-time = "2026-02-20T01:15:29.718Z" }, - { url = "https://files.pythonhosted.org/packages/3d/05/63a7495048499ef437b4933d32e59b7f737bd5368ad6fb2479e2bd83bf2c/grpcio-1.78.1-cp314-cp314-win32.whl", hash = "sha256:2c473b54ef1618f4fb85e82ff4994de18143b74efc088b91b5a935a3a45042ba", size = 4142186, upload-time = "2026-02-20T01:15:32.786Z" }, - { url = "https://files.pythonhosted.org/packages/1c/ce/adfe7e5f701d503be7778291757452e3fab6b19acf51917c79f5d1cf7f8a/grpcio-1.78.1-cp314-cp314-win_amd64.whl", hash = "sha256:e2a6b33d1050dce2c6f563c5caf7f7cbeebf7fba8cde37ffe3803d50526900d1", size = 4932000, upload-time = "2026-02-20T01:15:36.127Z" }, - { url = "https://files.pythonhosted.org/packages/66/3a/0195cdf3f4fcde27fe82e2ec93913bf6575e7c7449b006bb5eff1fa75faf/grpcio-1.78.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:559f58b6823e1abc38f82e157800aff649146f8906f7998c356cd48ae274d512", size = 5949570, upload-time = "2026-02-20T01:15:39.478Z" }, - { url = "https://files.pythonhosted.org/packages/b4/4a/59741882c26c4d21a9af0b3552262711e3e9b0c4eb67696568366790cfc2/grpcio-1.78.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:36aeff5ba8aaf70ceb2cbf6cbba9ad6beef715ad744841f3e0cd977ec02e5966", size = 11825370, upload-time = "2026-02-20T01:15:42.432Z" }, - { url = "https://files.pythonhosted.org/packages/31/a9/a62a0b0fe9bc5fe2cce031c0df5746115296ffd35e5eb075f04c2460c378/grpcio-1.78.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0fa9943d4c7f4a14a9a876153a4e8ee2bb20a410b65c09f31510b2a42271f41b", size = 6521350, upload-time = "2026-02-20T01:15:46.334Z" }, - { url = "https://files.pythonhosted.org/packages/ad/37/39c1ac921df29b530d56a67457195d5883462360771eaf635399390cf680/grpcio-1.78.1-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:75fa92c47d048d696f12b81a775316fca68385ffc6e6cb1ed1d76c8562579f74", size = 7198980, upload-time = "2026-02-20T01:15:49.779Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ce/12062fc4d702e274a11bfa6e76ef87d0da38cb49872f62c24dac178aedd5/grpcio-1.78.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ca6aebae928383e971d5eace4f1a217fd7aadaf18d5ddd3163d80354105e9068", size = 6727055, upload-time = "2026-02-20T01:15:52.38Z" }, - { url = "https://files.pythonhosted.org/packages/ab/28/33a96519cf0315fe065e028a8241e6cf15e175df3a58e902890f112556b3/grpcio-1.78.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5572c5dd1e43dbb452b466be9794f77e3502bdb6aa6a1a7feca72c98c5085ca7", size = 7298944, upload-time = "2026-02-20T01:15:55.624Z" }, - { url = "https://files.pythonhosted.org/packages/3b/f3/fd420ef1e0fef3202f5a2f83264dc9f030f3547dcc9cf42c53294de33237/grpcio-1.78.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e49e720cd6b092504ec7bb2f60eb459aaaf4ce0e5fe20521c201b179e93b5d5d", size = 8285531, upload-time = "2026-02-20T01:15:58.957Z" }, - { url = "https://files.pythonhosted.org/packages/60/43/808c927e5fe8d82eba42c38e6b5bfb53f82c182baee3f35e70992ba05580/grpcio-1.78.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ebeec1383aed86530a5f39646984e92d6596c050629982ac54eeb4e2f6ead668", size = 7724167, upload-time = "2026-02-20T01:16:02.439Z" }, - { url = "https://files.pythonhosted.org/packages/34/c4/c91ad78f61b274405fcdc2430cf16da8f31cc1ccf82c9e97573c603f5e91/grpcio-1.78.1-cp39-cp39-win32.whl", hash = "sha256:263307118791bc350f4642749a9c8c2d13fec496228ab11070973e568c256bfd", size = 4077361, upload-time = "2026-02-20T01:16:05.053Z" }, - { url = "https://files.pythonhosted.org/packages/a0/4a/bbb2eeb77dab12e1b8d1a3a19af37aa783913b64f67340a9f65bde2bd1af/grpcio-1.78.1-cp39-cp39-win_amd64.whl", hash = "sha256:13937b28986f45fee342806b07c6344db785ad74a549ebcb00c659142973556f", size = 4800213, upload-time = "2026-02-20T01:16:07.75Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/a8/690a085b4d1fe066130de97a87de32c45062cf2ecd218df9675add895550/grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5", size = 5946986, upload-time = "2026-02-06T09:54:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/c7/1b/e5213c5c0ced9d2d92778d30529ad5bb2dcfb6c48c4e2d01b1f302d33d64/grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2", size = 11816533, upload-time = "2026-02-06T09:54:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/18/37/1ba32dccf0a324cc5ace744c44331e300b000a924bf14840f948c559ede7/grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d", size = 6519964, upload-time = "2026-02-06T09:54:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f5/c0e178721b818072f2e8b6fde13faaba942406c634009caf065121ce246b/grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb", size = 7198058, upload-time = "2026-02-06T09:54:42.389Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/40d43c91ae9cd667edc960135f9f08e58faa1576dc95af29f66ec912985f/grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7", size = 6727212, upload-time = "2026-02-06T09:54:44.91Z" }, + { url = "https://files.pythonhosted.org/packages/ed/88/9da42eed498f0efcfcd9156e48ae63c0cde3bea398a16c99fb5198c885b6/grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec", size = 7300845, upload-time = "2026-02-06T09:54:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/23/3f/1c66b7b1b19a8828890e37868411a6e6925df5a9030bfa87ab318f34095d/grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a", size = 8284605, upload-time = "2026-02-06T09:54:50.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/c4/ca1bd87394f7b033e88525384b4d1e269e8424ab441ea2fba1a0c5b50986/grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813", size = 7726672, upload-time = "2026-02-06T09:54:53.11Z" }, + { url = "https://files.pythonhosted.org/packages/41/09/f16e487d4cc65ccaf670f6ebdd1a17566b965c74fc3d93999d3b2821e052/grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de", size = 4076715, upload-time = "2026-02-06T09:54:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/2a/32/4ce60d94e242725fd3bcc5673c04502c82a8e87b21ea411a63992dc39f8f/grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf", size = 4799157, upload-time = "2026-02-06T09:54:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, + { url = "https://files.pythonhosted.org/packages/58/6c/40a4bba2c753ea8eeb8d776a31e9c54f4e506edf36db93a3db5456725294/grpcio-1.78.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:86f85dd7c947baa707078a236288a289044836d4b640962018ceb9cd1f899af5", size = 5947902, upload-time = "2026-02-06T09:56:48.469Z" }, + { url = "https://files.pythonhosted.org/packages/c0/4c/ed7664a37a7008be41204c77e0d88bbc4ac531bcf0c27668cd066f9ff6e2/grpcio-1.78.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:de8cb00d1483a412a06394b8303feec5dcb3b55f81d83aa216dbb6a0b86a94f5", size = 11824772, upload-time = "2026-02-06T09:56:51.264Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5b/45a5c23ba3c4a0f51352366d9b25369a2a51163ab1c93482cb8408726617/grpcio-1.78.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e888474dee2f59ff68130f8a397792d8cb8e17e6b3434339657ba4ee90845a8c", size = 6521579, upload-time = "2026-02-06T09:56:54.967Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/392e647d918004231e3d1c780ed125c48939bfc8f845adb8b5820410da3e/grpcio-1.78.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:86ce2371bfd7f212cf60d8517e5e854475c2c43ce14aa910e136ace72c6db6c1", size = 7199330, upload-time = "2026-02-06T09:56:57.611Z" }, + { url = "https://files.pythonhosted.org/packages/68/2f/42a52d78bdbdb3f1310ed690a3511cd004740281ca75d300b7bd6d9d3de3/grpcio-1.78.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b0c689c02947d636bc7fab3e30cc3a3445cca99c834dfb77cd4a6cabfc1c5597", size = 6726696, upload-time = "2026-02-06T09:57:00.357Z" }, + { url = "https://files.pythonhosted.org/packages/0f/83/b3d932a4fbb2dce3056f6df2926fc2d3ddc5d5acbafbec32c84033cf3f23/grpcio-1.78.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ce7599575eeb25c0f4dc1be59cada6219f3b56176f799627f44088b21381a28a", size = 7299076, upload-time = "2026-02-06T09:57:04.124Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d9/70ea1be55efaf91fd19f7258b1292772a8226cf1b0e237717fba671073cb/grpcio-1.78.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:684083fd383e9dc04c794adb838d4faea08b291ce81f64ecd08e4577c7398adf", size = 8284493, upload-time = "2026-02-06T09:57:06.746Z" }, + { url = "https://files.pythonhosted.org/packages/d0/2f/3dddccf49e3e75564655b84175fca092d3efd81d2979fc89c4b1c1d879dc/grpcio-1.78.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ab399ef5e3cd2a721b1038a0f3021001f19c5ab279f145e1146bb0b9f1b2b12c", size = 7724340, upload-time = "2026-02-06T09:57:09.453Z" }, + { url = "https://files.pythonhosted.org/packages/79/ae/dfdb3183141db787a9363078a98764675996a7c2448883153091fd7c8527/grpcio-1.78.0-cp39-cp39-win32.whl", hash = "sha256:f3d6379493e18ad4d39537a82371c5281e153e963cecb13f953ebac155756525", size = 4077641, upload-time = "2026-02-06T09:57:11.881Z" }, + { url = "https://files.pythonhosted.org/packages/aa/aa/694b2f505345cfdd234cffb2525aa379a81695e6c02fd40d7e9193e871c6/grpcio-1.78.0-cp39-cp39-win_amd64.whl", hash = "sha256:5361a0630a7fdb58a6a97638ab70e1dae2893c4d08d7aba64ded28bb9e7a29df", size = 4799428, upload-time = "2026-02-06T09:57:14.493Z" }, ] [[package]] @@ -1577,7 +1579,7 @@ wheels = [ [[package]] name = "isort" -version = "8.0.0" +version = "8.0.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -1594,9 +1596,9 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/e3/e72b0b3a85f24cf5fc2cd8e92b996592798f896024c5cdf3709232e6e377/isort-8.0.0.tar.gz", hash = "sha256:fddea59202f231e170e52e71e3510b99c373b6e571b55d9c7b31b679c0fed47c", size = 769482, upload-time = "2026-02-19T16:31:59.716Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/7c/ec4ab396d31b3b395e2e999c8f46dec78c5e29209fac49d1f4dace04041d/isort-8.0.1.tar.gz", hash = "sha256:171ac4ff559cdc060bcfff550bc8404a486fee0caab245679c2abe7cb253c78d", size = 769592, upload-time = "2026-02-28T10:08:20.685Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/ea/cf3aad99dd12c026e2d6835d559efb6fc50ccfd5b46d42d5fec2608b116a/isort-8.0.0-py3-none-any.whl", hash = "sha256:184916a933041c7cf718787f7e52064f3c06272aff69a5cb4dc46497bd8911d9", size = 89715, upload-time = "2026-02-19T16:31:57.745Z" }, + { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" }, ] [[package]] @@ -1641,7 +1643,7 @@ wheels = [ [[package]] name = "jax" -version = "0.9.0.1" +version = "0.9.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -1658,15 +1660,15 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", ] dependencies = [ - { name = "jaxlib", version = "0.9.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "jaxlib", version = "0.9.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "ml-dtypes", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "opt-einsum", marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/40/f85d1feadd8f793fc1bfab726272523ef34b27302b55861ea872ec774019/jax-0.9.0.1.tar.gz", hash = "sha256:e395253449d74354fa813ff9e245acb6e42287431d8a01ff33d92e9ee57d36bd", size = 2534795, upload-time = "2026-02-05T18:47:33.088Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/4d/f45853fdc2b811e78b866d5f80b8a21a848278361f66c066706132f415cf/jax-0.9.1.tar.gz", hash = "sha256:ce1b82477ee192f0b1d9801b095aa0cf3839bc1fe0cbc071c961a24b3ff30361", size = 2625994, upload-time = "2026-03-02T11:24:18.382Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/1e/63ac22ec535e08129e16cb71b7eeeb8816c01d627ea1bc9105e925a71da0/jax-0.9.0.1-py3-none-any.whl", hash = "sha256:3baeaec6dc853394c272eb38a35ffba1972d67cf55d07a76bdb913bcd867e2ca", size = 2955477, upload-time = "2026-02-05T18:45:22.885Z" }, + { url = "https://files.pythonhosted.org/packages/80/e4/88778c6a23b65224e5088e68fd0924e5bde2196a26e76edb3ea3543fed6a/jax-0.9.1-py3-none-any.whl", hash = "sha256:d11cb53d362912253013e8c4d6926cb9f3a4b59ab5b25a7dc08123567067d088", size = 3062162, upload-time = "2026-03-02T11:22:05.089Z" }, ] [[package]] @@ -1740,7 +1742,7 @@ wheels = [ [[package]] name = "jaxlib" -version = "0.9.0.1" +version = "0.9.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -1759,31 +1761,31 @@ resolution-markers = [ dependencies = [ { name = "ml-dtypes", marker = "python_full_version >= '3.11'" }, { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/fd/040321b0f4303ec7b558d69488c6130b1697c33d88dab0a0d2ccd2e0817c/jaxlib-0.9.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff2c550dab210278ed3a3b96454b19108a02e0795625be56dca5a181c9833c9", size = 56092920, upload-time = "2026-02-05T18:46:20.873Z" }, - { url = "https://files.pythonhosted.org/packages/e9/76/a558cd5e2ac8a2c16fe7f7e429dd5749cef48bc1a89941bb5b72bd3d7de3/jaxlib-0.9.0.1-cp311-cp311-manylinux_2_27_aarch64.whl", hash = "sha256:c4ac3cfd7aaacc37f37a6a332ee009dee39e3b5081bb4b473f410583436be553", size = 74767780, upload-time = "2026-02-05T18:46:23.917Z" }, - { url = "https://files.pythonhosted.org/packages/87/49/f72fb26e2feb100fd84d297a17111364b15d5979843f62b7539cd120f9bb/jaxlib-0.9.0.1-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:dc95ee32ae2bd4ed947ad0218fd6576b50a60ce45b60714d7ff2fd9fa195ed9e", size = 80323754, upload-time = "2026-02-05T18:46:27.405Z" }, - { url = "https://files.pythonhosted.org/packages/55/fc/fa3c07d833a60cfb928f7a727fef25059e2e9af1dbc5d09821ad3a728292/jaxlib-0.9.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ed35e3300caa228c42897d8fbe961d6e03b797717e44eccbd3a788b5ac5c623", size = 60483840, upload-time = "2026-02-05T18:46:30.606Z" }, - { url = "https://files.pythonhosted.org/packages/c8/76/e89fd547f292663d8ce11b3247cd653a220e0d3cedbdbd094f0a8460d735/jaxlib-0.9.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3707bf0a58410da7c053c15ec6efee1fe12e70361416e055e4109b8041f4119b", size = 56104032, upload-time = "2026-02-05T18:46:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/c1/92/40d4f0acecb3d6f7078b9eb468e524778a3497d0882c7ecf80509c10b7d3/jaxlib-0.9.0.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:5ea8ebd62165b6f18f89b02fab749e02f5c584c2a1c703f04592d4d803f9e981", size = 74769175, upload-time = "2026-02-05T18:46:36.767Z" }, - { url = "https://files.pythonhosted.org/packages/1d/89/0dd938e6ed65ee994a49351a13aceaea46235ffbc1db5444d9ba3a279814/jaxlib-0.9.0.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:e0e4a0a24ef98ec021b913991fbda09aeb96481b1bc0e5300a0339aad216b226", size = 80339748, upload-time = "2026-02-05T18:46:40.148Z" }, - { url = "https://files.pythonhosted.org/packages/bb/02/265e5ccadd65fee2f0716431573d9e512e5c6aecb23f478a7a92053cf219/jaxlib-0.9.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:08733d1431238a7cf9108338ab7be898b97181cba0eef53f2f9fd3de17d20adb", size = 60508788, upload-time = "2026-02-05T18:46:43.209Z" }, - { url = "https://files.pythonhosted.org/packages/f0/8d/f5a78b4d2a08e2d358e01527a3617af2df67c70231029ce1bdbb814219ff/jaxlib-0.9.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e857cafdd12e18493d96d4a290ed31aa9d99a0dc3056b4b42974c0f342c9bb0c", size = 56103168, upload-time = "2026-02-05T18:46:46.481Z" }, - { url = "https://files.pythonhosted.org/packages/47/c3/fd3a9e2f02c1a04a1a00ff74adb6dd09e34040587bbb1b51b0176151dfa1/jaxlib-0.9.0.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:b73b85f927d9b006f07622d5676092eab916645c4804fed6568da5fb4a541dfc", size = 74768692, upload-time = "2026-02-05T18:46:49.571Z" }, - { url = "https://files.pythonhosted.org/packages/d9/48/34923a6add7dda5fb8f30409a98b638f0dbd2d9571dbbf73db958eaec44a/jaxlib-0.9.0.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:54dd2d34c6bec4f099f888a2f7895069a47c3ba86aaa77b0b78e9c3f9ef948f1", size = 80337646, upload-time = "2026-02-05T18:46:53.299Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a9/629bed81406902653973d57de5af92842c7da63dfa8fcd84ee490c62ee94/jaxlib-0.9.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:27db7fbc49938f819f2a93fefef0bdc25bd523b499ab4d8a71ed8915c037c0b4", size = 60508306, upload-time = "2026-02-05T18:46:56.441Z" }, - { url = "https://files.pythonhosted.org/packages/45/e3/6943589aaa58d9934838e00c6149dd1fc81e0c8555e9fcc9f527648faf5c/jaxlib-0.9.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9312fcfb4c5586802c08bc1b3b2419e48aa2a4cd1356251fe791ad71edc2da2a", size = 56210697, upload-time = "2026-02-05T18:46:59.642Z" }, - { url = "https://files.pythonhosted.org/packages/7e/ff/39479759b71f1d281b77050184759ac76dfd23a3ae75132ef92d168099c5/jaxlib-0.9.0.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:b536512cf84a0cb031196d6d5233f7093745e87eb416e45ad96fbb764b2befed", size = 74882879, upload-time = "2026-02-05T18:47:02.708Z" }, - { url = "https://files.pythonhosted.org/packages/87/0d/e41eeddd761110d733688d6493defe776440c8f3d114419a8ecaef55601f/jaxlib-0.9.0.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:c4dc8828bb236532033717061d132906075452556b12d1ff6ccc10e569435dfe", size = 80438424, upload-time = "2026-02-05T18:47:06.437Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ec/54b1251cea5c74a2f0d22106f5d1c7dc9e7b6a000d6a81a88deffa34c6fe/jaxlib-0.9.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:43272e52e5c89dbc4f02c7ccb6ffa5d587a09ac8db5163cb0c43e125b7075129", size = 56101484, upload-time = "2026-02-05T18:47:09.46Z" }, - { url = "https://files.pythonhosted.org/packages/29/ce/91ba780439aa1e6bae964ea641169e8b9c9349c175fcb1a723b96ba54313/jaxlib-0.9.0.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:82348cee1521d6123038c4c3beeafa2076c8f4ae29a233b8abff9d6dc8b44145", size = 74789558, upload-time = "2026-02-05T18:47:12.394Z" }, - { url = "https://files.pythonhosted.org/packages/ce/9b/3d7baca233c378b01fa445c9f63b260f592249ff69950baf893cea631b10/jaxlib-0.9.0.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:e61e88032eeb31339c72ead9ed60c6153cd2222512624caadea67c350c78432e", size = 80343053, upload-time = "2026-02-05T18:47:16.042Z" }, - { url = "https://files.pythonhosted.org/packages/92/5d/80efe5295133d5114fb7b0f27bdf82bc7a2308356dde6ba77c2afbaa3a36/jaxlib-0.9.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:abd9f127d23705105683448781914f17898b2b6591a051b259e6b947d4dcb93f", size = 62826248, upload-time = "2026-02-05T18:47:19.986Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a9/f72578daa6af9bed9bda75b842c97581b31a577d7b2072daf8ba3d5a8156/jaxlib-0.9.0.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5b01a75fbac8098cc985f6f1690bfb62f98b0785c84199287e0baaae50fa4238", size = 56209722, upload-time = "2026-02-05T18:47:23.193Z" }, - { url = "https://files.pythonhosted.org/packages/95/ea/eefb118305dd5e1b0ad8d942f2bf43616c964d89fe491bec8628173da24d/jaxlib-0.9.0.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:76f23cbb109e673ea7a90781aca3e02a0c72464410c019fe14fba3c044f2b778", size = 74881382, upload-time = "2026-02-05T18:47:26.703Z" }, - { url = "https://files.pythonhosted.org/packages/0a/aa/a42fb912fd1f9c83e22dc2577cdfbf1a1b07d6660532cb44724db7a7c479/jaxlib-0.9.0.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:f80d30dedce96c73a7f5dcb79c4c827a1bde2304f502a56ce7e7f723df2a5398", size = 80438052, upload-time = "2026-02-05T18:47:30.039Z" }, + { name = "scipy", version = "1.17.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/c8/ebba6a5cd16b080a7cdbb0002b5cd5aa2775b3fb5b66bdc8e1b6f3572a03/jaxlib-0.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2976b09c3a0b97403912e9e2a88893c4c6e6629974e2941943e9a1ff4e3db08c", size = 57971840, upload-time = "2026-03-02T11:23:02.3Z" }, + { url = "https://files.pythonhosted.org/packages/26/04/b42037bfa38e5506a02a17a42abb494a601b78f02a8756dc1745cd3efb56/jaxlib-0.9.1-cp311-cp311-manylinux_2_27_aarch64.whl", hash = "sha256:dc1085450dfd582d648426a65e5bd87f7f2f14dad66a6bda4aace26471f450bf", size = 76830011, upload-time = "2026-03-02T11:23:05.706Z" }, + { url = "https://files.pythonhosted.org/packages/48/7c/8ebb7f5a487641b7292f039f3c56a0189d0d9a262fca6705fb40c3663f5e/jaxlib-0.9.1-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:97239348cd95d5b3356f475fa837408e4ca0df26455409eaee5f8d42f4449c75", size = 82461550, upload-time = "2026-03-02T11:23:09.357Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1b/bc1d41fb12e21449364fb76dd50f37b7e22048685f657796f2aadf76157a/jaxlib-0.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:266661be43c6db0daaf76c6492f9d03600c922d5b115456e14a4b6f8b94ba82b", size = 62144638, upload-time = "2026-03-02T11:23:12.837Z" }, + { url = "https://files.pythonhosted.org/packages/8f/06/59b1da0a3b2450a4abbf66cbb3bbfe0b14f9723b1f8997c0178db3549e54/jaxlib-0.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea7f98a1a558fab5cf8f569e5567a3c288667dd223261adaeb9645c37e4ad8b", size = 57980807, upload-time = "2026-03-02T11:23:16.042Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b9/e0419783cbff9fa3bbc053dbe130f9051f60de4f424f650d70aae7f3bdf1/jaxlib-0.9.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:f80e8aead3461683657027e14e814e5bdd00be8ce8e05c0a5db86403db297c2e", size = 76828062, upload-time = "2026-03-02T11:23:19.202Z" }, + { url = "https://files.pythonhosted.org/packages/53/6b/b381bda5850f5611822d791cd25dfe36efda2688a68c4dda0f8a92c36dec/jaxlib-0.9.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:e2ab8c97be30354a34e64d17066df0fce7d1d0f40f7a48eded19e9e837896f5d", size = 82472923, upload-time = "2026-03-02T11:23:23.352Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e9/e4dc1f699b894651f3d3ed6622c3c113c21003c2ed832ab00ed62055062b/jaxlib-0.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:836b78e16bb06d984c41ae0605e96ef031b720191b489a0c09f7185dcabcbed0", size = 62164632, upload-time = "2026-03-02T11:23:28.285Z" }, + { url = "https://files.pythonhosted.org/packages/08/18/fee700125fe4367c75be1d0f300d13069f5ed119a635ea9199de4b4bc9dc/jaxlib-0.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e9915bcaa9ffefd40cd3fdb08a83b16b79f1f3c9ba187884f5b442ad2a47ffd1", size = 57982624, upload-time = "2026-03-02T11:23:31.412Z" }, + { url = "https://files.pythonhosted.org/packages/fd/5f/d4a79d6802f3cef02773852453d9528569dd0896964117d4401658828aba/jaxlib-0.9.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:9e88c35248b37d5219423ff8ddca60c6a561e665ded5c4fcbc61f0763e03f1e3", size = 76828438, upload-time = "2026-03-02T11:23:34.793Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2e/d84cafbd07e8cdc7701d9f840f4eea0cfcf3487a99ada14507702172da14/jaxlib-0.9.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:da60d967b4ac2084a3e3535ad982392894dd6bdf79c9a56978aba08404a58c82", size = 82473711, upload-time = "2026-03-02T11:23:38.356Z" }, + { url = "https://files.pythonhosted.org/packages/45/e6/4d09ec33a5d096c541025272dc31a36aa9d9a5752b37e05193b23c125810/jaxlib-0.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:7ec6e2f43be6e1ae9321efe9a98affcd8acbe0e1fe59aba1d307ba0462752988", size = 62164682, upload-time = "2026-03-02T11:23:41.761Z" }, + { url = "https://files.pythonhosted.org/packages/8a/be/7d810371aa3bdf30882df60965c15773b8990c90e350a650e366e6dedbaa/jaxlib-0.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:872e5917ad20cfde85ce6d50a6dffb205ce551d5c691532f0f07e30c34bbb6c3", size = 58092440, upload-time = "2026-03-02T11:23:46.233Z" }, + { url = "https://files.pythonhosted.org/packages/e9/63/0f5acacd3bd6906f2e1f730ceeafac4afc5cc612f43be4820785608cb951/jaxlib-0.9.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:469f08a30f6b541557e29c5de61ea6df16ac0ef9225879373bb2b332f1b27d14", size = 76949185, upload-time = "2026-03-02T11:23:49.378Z" }, + { url = "https://files.pythonhosted.org/packages/91/c5/a4dee13627d913c7bd0cf29b7f5c1d6a2605760d08a7cff952f9098ebb61/jaxlib-0.9.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:2e2225b80689610cbb472822dadf7cc200aa4bdac813112a3f6e074d96b1458c", size = 82584273, upload-time = "2026-03-02T11:23:52.762Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b0/f2c9caa6f545d4ecc1eab528c68c9191e40087f1bc79a6da2e29c6416510/jaxlib-0.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3071bf493f6f48207c56b1e9a5bf895e2acebc5bd40f6f35458e76eb8bf210c7", size = 57984052, upload-time = "2026-03-02T11:23:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e7/237ec5f4cd07420ef50d79a048b769664dbe306e31bdb10f9dcb9accabe9/jaxlib-0.9.1-cp314-cp314-manylinux_2_27_aarch64.whl", hash = "sha256:531dff9fae7aea14449ee544cc1415880cc8a346a9287d347dbd1b2b51d8aabd", size = 76846925, upload-time = "2026-03-02T11:23:59.18Z" }, + { url = "https://files.pythonhosted.org/packages/76/fe/67d2c414b0860d42f4a20b1fadbe7aeffb1b3d885efebd7aedf22a4bc2a2/jaxlib-0.9.1-cp314-cp314-manylinux_2_27_x86_64.whl", hash = "sha256:2287a1c891b152c52eb9b73925f57cde01be35d2bab4dad9673d3c83c5982ca8", size = 82484342, upload-time = "2026-03-02T11:24:02.541Z" }, + { url = "https://files.pythonhosted.org/packages/54/0d/a8e27c1c434e489883c1182bd52de27775b8a78013de62e6eabf80991df5/jaxlib-0.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:61160d686e6a4703ef30a6a3aa199c934e6359f42d0aa1c0f9c475d3953b9459", size = 64553355, upload-time = "2026-03-02T11:24:05.976Z" }, + { url = "https://files.pythonhosted.org/packages/fa/4a/e5cb3a32320da2e9496c66045a4e19e16597c92a6496dd493b630585c219/jaxlib-0.9.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ac3db6b164a8a5b473c77ad9da4f43937d309a27f5cb2f38932930b26e42c68", size = 58096335, upload-time = "2026-03-02T11:24:09.01Z" }, + { url = "https://files.pythonhosted.org/packages/50/d2/35ecc2e92065ac035a954fcb4b752baa72747dcc3a3466525c42c4404958/jaxlib-0.9.1-cp314-cp314t-manylinux_2_27_aarch64.whl", hash = "sha256:30fe58e8e4e105dffe364a6f0dccca16d93433576d4a015babc83339ca7f1f38", size = 76948543, upload-time = "2026-03-02T11:24:12.026Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cb/a8de776aee88f42937d07472953cf7980e45f5fb30aa9d5ee652b4acc771/jaxlib-0.9.1-cp314-cp314t-manylinux_2_27_x86_64.whl", hash = "sha256:6b6654a20d54e7cc77d1d54c33f1db851ef9d70bb112b627776178221036e720", size = 82585090, upload-time = "2026-03-02T11:24:15.783Z" }, ] [[package]] @@ -2079,53 +2081,66 @@ wheels = [ [[package]] name = "line-profiler" -version = "5.0.1" +version = "5.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/2a/498665a424404a560b2c6b3a3ea8b4304dbe493ccc3d01a6866c7a38890e/line_profiler-5.0.1.tar.gz", hash = "sha256:3e56c5eee51aa8b82a09d8a35ab19f4100ee45d70eb676c2c58deedcc06c34b1", size = 406557, upload-time = "2026-02-07T05:06:49.814Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/68/0a52f3868aca7722938094003bda3f05a36a5ac72a3faa3b468fb939ffc4/line_profiler-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:64f099711f4752bc4e3dae762b971ef3016ad7572507db4b22a9a7bb0f4fd05f", size = 657258, upload-time = "2026-02-07T05:05:15.094Z" }, - { url = "https://files.pythonhosted.org/packages/4a/00/8609a3774a221aa4c48c3d5f3ecf63194e44c931b74a3bad6637057f07c4/line_profiler-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c55ae9dd54deda74ddb79a60818a59c32d53e87eb5628eab53183123aca6c53", size = 514405, upload-time = "2026-02-07T05:05:17.253Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1b/208adab75d25140c6ba4469da3e4d8bf51bb65a0e9e5b04f24dd0e6dadc7/line_profiler-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e6502518f7d7c1241b8d72fce275a8d8ac08081335a1bd99ad69fadaf044784d", size = 502984, upload-time = "2026-02-07T05:05:18.866Z" }, - { url = "https://files.pythonhosted.org/packages/1e/d9/fbc770fa6df84ea32580dae6c46447c07831fac97f3e5e5f3f6182c7d5ab/line_profiler-5.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8b9dd773b1bde3f9864bb302e8bb78a9b35573448e1b837e8a6d2740580ff18e", size = 1505200, upload-time = "2026-02-07T05:05:20.428Z" }, - { url = "https://files.pythonhosted.org/packages/1f/12/77c03fcb93b0d206b785ed45f461b29195bdd9cfd609ced3cdfb654287b3/line_profiler-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b28c8902092e1dbc1aa35929e7b5472a5bdb32da1fbd3570c5e78376a71ee86", size = 2530747, upload-time = "2026-02-07T05:05:21.501Z" }, - { url = "https://files.pythonhosted.org/packages/96/3d/a001ec8c4154cbfd949bd570036163e8a7dbeca84a8a82c03cf33919bdcd/line_profiler-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:3b8bf20a9a15029e833361d6c8bed4397c806725a80a2bfb457ce1d60a918dfe", size = 485432, upload-time = "2026-02-07T05:05:22.617Z" }, - { url = "https://files.pythonhosted.org/packages/f9/55/0a74021f3ecfe71be86b3263f98890a28902ed0715a841507ac2eb0316db/line_profiler-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8103e12337802850af69ad74fa2d540cb24b35905cab5d093e4d5a88f89d7305", size = 656106, upload-time = "2026-02-07T05:05:56.919Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e4/63b961fe4ce9cd9b05a4710858b32c537ad8364ed84ec52b1a463733b8b9/line_profiler-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:029abeda3bedf2205fd9e9f9d35b38d369cc33d5581d875aa27c80b03facd95e", size = 513554, upload-time = "2026-02-07T05:05:59.044Z" }, - { url = "https://files.pythonhosted.org/packages/a5/2b/0c15fe6ae98340a8315f76a289720b3db7cfd2b43581f07771b39ac59a69/line_profiler-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc5e42b471316fe55fb52f3dd048a359652d3715e302707a4342844ade009166", size = 502698, upload-time = "2026-02-07T05:06:00.11Z" }, - { url = "https://files.pythonhosted.org/packages/1e/9c/2b0ede405364e23a5ec45100a6c053db40afff36b17d2778541e16766cae/line_profiler-5.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e5fe36bf67e5114b56956017cbdb3e14851afa047aee06a6249c7e4524985d30", size = 1546113, upload-time = "2026-02-07T05:06:01.144Z" }, - { url = "https://files.pythonhosted.org/packages/8e/c8/80bd62dd8fd4d594cb9bc12f40ade5222c5e18a7073f2003091d53ee264a/line_profiler-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:daab2aa2a1c67e7706ab43e13b544eb7c0e2321d7a0646e0380745361e2477ce", size = 2570825, upload-time = "2026-02-07T05:06:02.782Z" }, - { url = "https://files.pythonhosted.org/packages/20/75/87a0b452a42783848a82ca67a390f920a5844ef0db092f9029cc42933a72/line_profiler-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a717a5eed30311982b8e707eda30384c5532ccbd557d57e40a1dbc5588667c3", size = 486002, upload-time = "2026-02-07T05:06:04.085Z" }, - { url = "https://files.pythonhosted.org/packages/54/79/0bf2de84d3680318bf85f3375fe0c296c6d4b1ed02dcad686fa09ced8df1/line_profiler-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:6d4b626c948be1d7742ea2314261eccfc4b9f7dfb2adae8ece4409776a9e2511", size = 470516, upload-time = "2026-02-07T05:06:05.227Z" }, - { url = "https://files.pythonhosted.org/packages/fa/8e/bd5b0cc87203ff280cf01ef65b263472983adad5a0f710cf191e292fc3df/line_profiler-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b9b58a4d805ea11a0ea18c75a21b9d3bc1bb69d1f7d9282625386b8b47689b3b", size = 652481, upload-time = "2026-02-07T05:06:06.329Z" }, - { url = "https://files.pythonhosted.org/packages/a0/26/01d65c99809cdec0566c3f86b4cefec6ba558b261f75dac0b856a1570d7e/line_profiler-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a5401dfe1dcd6f01d0f35feff02c96ebd73d2e45058e39ba935e822bde33f191", size = 511256, upload-time = "2026-02-07T05:06:07.847Z" }, - { url = "https://files.pythonhosted.org/packages/1e/4d/5862629dc59f8154eae76ac0ea2a69c0d11b0b79483957f3c1c6a1af9896/line_profiler-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3350cfe27fa71082ac3d773d905b5cff7584a7923a36ea894a4619c0eb40116", size = 501428, upload-time = "2026-02-07T05:06:08.889Z" }, - { url = "https://files.pythonhosted.org/packages/81/1d/adda8aff5cc3e1d8687a128593a562fbf28d650513674aa773381068ce95/line_profiler-5.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:348f34f54d68dcb249124d6b6275cbfcaea33920aecdb2f7d536d395abbaeda7", size = 1489869, upload-time = "2026-02-07T05:06:10.034Z" }, - { url = "https://files.pythonhosted.org/packages/96/6e/a5f92fb2451982ea49dd1bbc1b4a308aaeda81320583b3593731bc8654e8/line_profiler-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4344e66d853824be0f0fa5d99ba6014cb093334e178fac942870bc4a4dd4c146", size = 2501328, upload-time = "2026-02-07T05:06:12.101Z" }, - { url = "https://files.pythonhosted.org/packages/5f/79/cd66262b78a9f1e6ccd7452f331237c3489fb93191f95fe0b9c4cdac4733/line_profiler-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:87d2295feaa5ac933e672d1c5ac5b83e2a1f7ebce25d290f81a7aabb1d46ac1f", size = 484346, upload-time = "2026-02-07T05:06:13.847Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ba/ec80db0e0b2a46832127f5de5cd6d059d60aeb0daf2a2eddd7a05ff092da/line_profiler-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:d3c93c975c1ccbc77db82579e9ec65897d783d53c3456cd2a8a582cae7cb5b81", size = 467828, upload-time = "2026-02-07T05:06:14.896Z" }, - { url = "https://files.pythonhosted.org/packages/35/83/23b24ceb224f89725c2baa0be1b889ea9eec84b4ec3835c8f7ff62abf918/line_profiler-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6100734916900f6c0ee5ba1cae05e7860c53aac4cd7a016faefd50092be22a14", size = 648194, upload-time = "2026-02-07T05:06:16.59Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ec/6e71a59baf77b95c38ac07dc6e622f46674a526ea9dbd348ac310c24b358/line_profiler-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef083f93bbb8cd8e7fa49b07e09245195a9be47755e7e353fb526aee9d983427", size = 509348, upload-time = "2026-02-07T05:06:18.195Z" }, - { url = "https://files.pythonhosted.org/packages/46/29/ce75d7e9c07e72ffa513424881d0509a559a21a433f462fb197604a0e4ce/line_profiler-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b74a89eba20a20222bf6220e017244849cb675125a0e9e7ade5411af3d6c246", size = 499198, upload-time = "2026-02-07T05:06:19.72Z" }, - { url = "https://files.pythonhosted.org/packages/90/ae/3bccce627f42151b2bd7389ef1304b9255e38d6c79ae23fbd8c33600ea45/line_profiler-5.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f8a74fb63ff4cb1358fa9378daa853396f5868d5c81cad88d17b1f48a761f04", size = 1488964, upload-time = "2026-02-07T05:06:20.86Z" }, - { url = "https://files.pythonhosted.org/packages/ff/24/0940490a9be8e19ed097da03463547c5a7e066b8612e208e005fd440c3e2/line_profiler-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7460781af7e8754850c5dc8b6f1d0133d48aa3a24723cfe9d445dd27d42a798d", size = 2500824, upload-time = "2026-02-07T05:06:22.19Z" }, - { url = "https://files.pythonhosted.org/packages/3e/53/f73fc9515d3919c9733b88fc9d51b81dba50d74da9e8f357a72ed5c503b7/line_profiler-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:20174e74b142d13ccb8836ebabfa1ca4e2cde4d0961f3ee078a3cc64f2832bd6", size = 484852, upload-time = "2026-02-07T05:06:23.467Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/2cdf45c274410632c15a28075ccc865e13b2dd5ae3b11a25313cf8e0d8af/line_profiler-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:39ed06465de1dc1eccf1df7dcd90aa100af3f55472ef25fa6c8bd228d8d5f819", size = 467511, upload-time = "2026-02-07T05:06:24.588Z" }, - { url = "https://files.pythonhosted.org/packages/dd/76/f857c647597bca495dcba3f7edaf986516bde919152f19c71bef47a546fa/line_profiler-5.0.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:8c0cd9f38eaddb506990080593381f276b1942c416e415a933632c4943895df3", size = 653895, upload-time = "2026-02-07T05:06:25.724Z" }, - { url = "https://files.pythonhosted.org/packages/ad/3c/7688dff38a2bdcf66b990f5d7c496ca41dc63171a3e03a6049488842f786/line_profiler-5.0.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4e901d75109f12a1a65edc2352401875cd51b69bf91537a9555c7691fdc0dd46", size = 514383, upload-time = "2026-02-07T05:06:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/93/4a/79513220bc2c4fa2a4e7468b89e18b917e82bc7ea1e7be1b924412f9cd20/line_profiler-5.0.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5630d495f16babd812f4ef5cba90cf3cf3cc06b10a24f9becfb76a64e511bcbd", size = 505430, upload-time = "2026-02-07T05:06:28.216Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f4/012446292f1fee6c4a5b7ebf3d5de7741550b8b3e781186a32c333ced1fa/line_profiler-5.0.1-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d9c0b8d01eddb99ed76f53e2f81cce8ceff68e751370af2bd1fd276fb17570e", size = 1480761, upload-time = "2026-02-07T05:06:29.297Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e1/48aefe03d27a32b93ffec6aaaab1e0f5d5b94e0a44b3ddf0929c9eeef50c/line_profiler-5.0.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b76d6f7ab6d2b3018bea10172bbe105624d14f63bde9549c393502ca4ea9fb5", size = 2500278, upload-time = "2026-02-07T05:06:30.782Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f8/0959ab4ff46a99c9db6d90de90d08bff6d3277fc4b80c9fb5d04300af798/line_profiler-5.0.1-cp314-cp314-win_amd64.whl", hash = "sha256:9bb97a77d8d5ffa8bf0193c5ee4d804dc8360244861f104c10c9e58c95721066", size = 491243, upload-time = "2026-02-07T05:06:32.087Z" }, - { url = "https://files.pythonhosted.org/packages/54/6d/91e7e2390c064233c1e64de8d82059212814c29b46f33f554bc7fe0a2711/line_profiler-5.0.1-cp314-cp314-win_arm64.whl", hash = "sha256:c3a4807a30adda81ac246744bf42bff8cc54bcbbe5e3bfff4523b171349c5059", size = 475314, upload-time = "2026-02-07T05:06:33.358Z" }, - { url = "https://files.pythonhosted.org/packages/be/ed/0a0c4a2bb84de941e52a46642341552c721d091e0a4d7be5138849de4902/line_profiler-5.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:92b724b3668755967a2174c20b56e7a69ce46aea1935f1605bc7f5f5ed672f15", size = 658806, upload-time = "2026-02-07T05:06:42.141Z" }, - { url = "https://files.pythonhosted.org/packages/ad/79/b7a36d46cff3f4d17d18e8c3d6e8275ac05559952e25dc4c95e8c4cf7337/line_profiler-5.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75eba02f688601a9ef23d709787c2e2e26f8c46de9b883d60227ef391dd8c513", size = 515234, upload-time = "2026-02-07T05:06:43.211Z" }, - { url = "https://files.pythonhosted.org/packages/88/af/a8aaf394f1a15df4cbcfabc228c215dc014082a864f38d4b074fc63caef8/line_profiler-5.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b3b551c90eff946c38933c76c4c77e2904c403a20dc9eb467b756042066e6a4", size = 503766, upload-time = "2026-02-07T05:06:44.303Z" }, - { url = "https://files.pythonhosted.org/packages/6d/37/c0c27f093a2352fa5d491a0404beb8b8ea1a56a8e88d61081160ef284da3/line_profiler-5.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15c53434bd2885938a46eee75373d5a5fef724803578a2262076ce4693032c6d", size = 1501758, upload-time = "2026-02-07T05:06:45.403Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/c2160db00c0c07a044f6f29034bb441c5c3eb29e907590a823cdfede8ad3/line_profiler-5.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ac96cc3c3946a9bfbb723843a0eceeed3295d633fe65960e3ed096d31b065eab", size = 2524435, upload-time = "2026-02-07T05:06:46.716Z" }, - { url = "https://files.pythonhosted.org/packages/e6/92/262533d5bb1fa81da52d1a6d2dc828c05a578fe4ed4506fb6feaa00f14d6/line_profiler-5.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7f17946e5cf2cdcf8406656bebc0ba8fb9550b4a0558bce52e2b8e2c047d1a3", size = 486001, upload-time = "2026-02-07T05:06:48.63Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/03/b6/6d18ad201417a9c5168995541d0fd7981b5652b2b34f6e46a3a93c0f1beb/line_profiler-5.0.2.tar.gz", hash = "sha256:8d8a990c84c64bcde45af22af502d17bc0ae107be405ce41bba92af5c39c0000", size = 407075, upload-time = "2026-02-23T23:31:20.698Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/9c/d2ba5e1f7da98e3dff9c333dd914c284cd733827987e7ed6a039c7fc008c/line_profiler-5.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ab5d8de6d3c0381b477cc73dde9c36b6c52ba1928d6daba85ac9e790a3f0086", size = 651703, upload-time = "2026-02-23T23:29:45.092Z" }, + { url = "https://files.pythonhosted.org/packages/1b/20/9f99d89ff0ad56e5e6190262ce16a8b2dad1f23b9dc0bc4da608fd42c16f/line_profiler-5.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45da5408286b5395ccb707d1cb2b5aeeb8828466cd2f62e8ab2d7cfb0e1b38c", size = 508846, upload-time = "2026-02-23T23:29:47.341Z" }, + { url = "https://files.pythonhosted.org/packages/5a/44/04d4f21dd1ffca9911402a8cc0ded6f9d89dfd5d3f2a0498704502e5b9af/line_profiler-5.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f6163a43474584db9da495d00869d51a66fdef3962ec3df76f998b1a89308123", size = 497428, upload-time = "2026-02-23T23:29:49.008Z" }, + { url = "https://files.pythonhosted.org/packages/bd/61/7e4658db06e3e3c41713bc600fc26e22607b08969d6044dd640ca26613b1/line_profiler-5.0.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7759e9e4688ed1be1e674dee599500ba47f2f2c76f903184df615352bc182a8", size = 1486964, upload-time = "2026-02-23T23:29:50.849Z" }, + { url = "https://files.pythonhosted.org/packages/03/2e/5507cf3190052906ba9fe77477cf655d446a9c517a41c290050e05cd1fec/line_profiler-5.0.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0f8b3b766bde49ca8c1e26b5ff9013358435106d11bc4838764e117d2bcd3ed", size = 1499663, upload-time = "2026-02-23T23:29:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d9/cb19fb7b899f30d2261bb792d8f9d1e0b6ba5b4f3fc94b45136fd412562a/line_profiler-5.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:42d4bef2ce9f2e2cc6b872034ef4bf2e18ec44f3a8a09bdf91232a74abe4074c", size = 2442445, upload-time = "2026-02-23T23:29:55.952Z" }, + { url = "https://files.pythonhosted.org/packages/83/5d/9b1d142f41ae23b6da22954ec42c367491bcad356c3507f291c101522e88/line_profiler-5.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7dd5942d091541803b38ebc4c9d7f375d43b93e41e811f2449a022fd5b24d283", size = 2525207, upload-time = "2026-02-23T23:29:57.671Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/d9ac8429b74b1e30e22e2d51bc5d534864dd276ba856ea8ca32fca1f1198/line_profiler-5.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c22d1d8ee371e92149b5d9578e78072bcb36435adb62e84bf3bb0c173e14c6f6", size = 479862, upload-time = "2026-02-23T23:29:59.178Z" }, + { url = "https://files.pythonhosted.org/packages/d1/cd/a92661cb24987d0a4cf86f7ec9f6a0f74ea981c520b6458275d41b11ec0a/line_profiler-5.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:602370a9bb8d020ea28ddac3bb7fd4331c91d495e6e81d5f75752cbb2f2bb802", size = 650547, upload-time = "2026-02-23T23:30:00.593Z" }, + { url = "https://files.pythonhosted.org/packages/7e/77/5489458f8cc01ea00cdf25bc6fa74e748a30e7b758275cc98b485b59de91/line_profiler-5.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26956eef9668f641c9c6f9adb6b1b236252a73405e238452768812af14f9a145", size = 507989, upload-time = "2026-02-23T23:30:01.848Z" }, + { url = "https://files.pythonhosted.org/packages/cc/4f/14aace66e067fb5a774580bc71b348c323c91a4e2ac223a98822de67ecda/line_profiler-5.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64376a726009b7842706a3fef2a6dba051f0bf5a98cbbcafa4c23d6c83bac53c", size = 497137, upload-time = "2026-02-23T23:30:03.515Z" }, + { url = "https://files.pythonhosted.org/packages/52/48/ea92cc96538a192fdf74249f28ad9e9c0526743b12817f920985e7fdbbe2/line_profiler-5.0.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:68432878d7598916f02be3fbb83e3a4727443cfd96af9cbea05cc1ae9749ed82", size = 1526617, upload-time = "2026-02-23T23:30:05.211Z" }, + { url = "https://files.pythonhosted.org/packages/2f/8d/c73544fba5683a50d7579f21614942d9223e2dd618986be56d5beff561e5/line_profiler-5.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7956e79526dc4898ca63dd8e3f435ff674b35d7e06457bfff883efae3ecb8359", size = 1540576, upload-time = "2026-02-23T23:30:07.234Z" }, + { url = "https://files.pythonhosted.org/packages/bf/33/a3255c143148e5886179b689b0413bb0b7edbd6af1531db577f8bf8b69fd/line_profiler-5.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29370b9d239c0e68ea017bbf2b582beed6122a439ef97a8e38228708b52ba595", size = 2480641, upload-time = "2026-02-23T23:30:08.638Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c8/d398438f9af55c4bd799c15c6a9fc5d349c36ef461edce8ed3569768c964/line_profiler-5.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f47682cb1cef2a3b3865e59fdaf2f486196476fa508ddcdd838e3484626c2a68", size = 2565285, upload-time = "2026-02-23T23:30:10.015Z" }, + { url = "https://files.pythonhosted.org/packages/74/08/663f3dd52ebebcb98cddca9ca4f4b51fcd43ce2c8c6b676de28e2ad6f384/line_profiler-5.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:b74416342261d0434e2ae0ff074ec0ecf0ea4e66ec2db5a95dd8b0ec7f2b1a8b", size = 480440, upload-time = "2026-02-23T23:30:11.588Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c4/12ac6f1c139780301d23b9f64ccb160366063124e91134fcccfb24d8b9b7/line_profiler-5.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:f0d51ddb054d38a37607b335475c8be9fae4152b01873d1fc1d6b6a317b66398", size = 464965, upload-time = "2026-02-23T23:30:13.262Z" }, + { url = "https://files.pythonhosted.org/packages/99/92/fb766e6355118d2a681c18525d4c005c146ec44b064ccfd70f4529d8d260/line_profiler-5.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:256c1d5e84a93254dbe656d0486322190cc68f6b517544edef17a9f00167e680", size = 646920, upload-time = "2026-02-23T23:30:14.692Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9d/3583c1cdc740206de9e4734bdcf377d649b89ea876bc36001d95b3dea67d/line_profiler-5.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:892f0cd9967b101ce7528be2d388616037c73cb27830effd7493fa021165c622", size = 505695, upload-time = "2026-02-23T23:30:16.375Z" }, + { url = "https://files.pythonhosted.org/packages/27/60/412476a1d09beac783d11d3bbf85fe6c1e3d50058e3c28967fee59c46649/line_profiler-5.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d2d02735843c14337dae3e80d95a732b4657ef759def75162ef97a1aa7466aac", size = 495859, upload-time = "2026-02-23T23:30:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/37/75/65028bad08264fd8f9c3f0fd405c539ff552c2d1cf2a00965157ad148973/line_profiler-5.0.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d2e166a86dc9c78c349ee18b592b98ebfb9dae615f63fc77cce5f5f751a6ad0", size = 1464882, upload-time = "2026-02-23T23:30:19.273Z" }, + { url = "https://files.pythonhosted.org/packages/0d/6c/2d0286f67e6bb2b00ae23f9af6df18bfc6bb1ac5d803a8f46bd3eb22a8f1/line_profiler-5.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a870b68af1539d718d030f4c4726d35cff4b14ab605147e65222933c5c0e10e", size = 1484331, upload-time = "2026-02-23T23:30:20.571Z" }, + { url = "https://files.pythonhosted.org/packages/4e/a4/b01359733214a1a85c5f86f3953b07deb61b267efa0328e8d436a1ad80ea/line_profiler-5.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fe8cd787caa2a02ca7e138832fa4cab1f198377eaf6e5e8263e8b7506157c454", size = 2411802, upload-time = "2026-02-23T23:30:21.995Z" }, + { url = "https://files.pythonhosted.org/packages/d1/f4/1fa91206a6c50091cf614fdd5c9d349eb3a57d23f5eb8be8fffe7e0525b9/line_profiler-5.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:70ff915ade9e3ec38ff043ff093b590bbb3055e6fc8b311e0fe14cd78fb2a7f7", size = 2495790, upload-time = "2026-02-23T23:30:23.448Z" }, + { url = "https://files.pythonhosted.org/packages/87/18/d389c72dce6c8318c088a7c29ee8961a913c8a1c6469888b517e8f47ddaf/line_profiler-5.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:026779b9dfca0f367174f5d34bcccffce2755db40a4389f0d8a531a2e3ca7cfc", size = 478790, upload-time = "2026-02-23T23:30:24.848Z" }, + { url = "https://files.pythonhosted.org/packages/3f/54/d171600a4190c07215090a88846ef0093b5bf34a81f8059115592dbb1354/line_profiler-5.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:fe22b927f05a61a0149976bf0d22d8e56fa742ec89f3d72358db71a1f440c77b", size = 462269, upload-time = "2026-02-23T23:30:26.237Z" }, + { url = "https://files.pythonhosted.org/packages/a7/64/856b920e026fbd239df875ec05e63583f7bd7f250805215ab6e132da11d1/line_profiler-5.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:016effba91d34d15229d41984e921a27f66a7b634f1d7adf6c57c743f3d6a0eb", size = 642642, upload-time = "2026-02-23T23:30:27.63Z" }, + { url = "https://files.pythonhosted.org/packages/3b/08/0a56fab0a36818af6ffc8073700db2f402db5a62477b69d938c19871d631/line_profiler-5.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:506e800dd408a8aafadf39ff4e4a1375ae7794910d00098f191520a2f390cb99", size = 503787, upload-time = "2026-02-23T23:30:29.226Z" }, + { url = "https://files.pythonhosted.org/packages/ed/9a/0ab45cf92b2c13261b475c440e18bb18d9497cc2ad5dfaf38c231c72b02b/line_profiler-5.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e67f77bcb349a663cb22819f65621bcd2a39889524dd890d1d88f8736841b7b", size = 493631, upload-time = "2026-02-23T23:30:30.502Z" }, + { url = "https://files.pythonhosted.org/packages/fb/15/a5b603f0c7c795aa656a95e2a70d139dc499b5d153b6a3129bbba6b6f913/line_profiler-5.0.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6b9d08e85fd48d254ae253e76dc72598e94200ef7002eb1ae0bab4cc9c5e41a", size = 1464022, upload-time = "2026-02-23T23:30:31.793Z" }, + { url = "https://files.pythonhosted.org/packages/27/6f/0f399c72eecaf8f8c00e84238b5786afc34d0a4ef5ad10c63c712715ba86/line_profiler-5.0.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:31290e06ac25cd87fee46ebe979541d4ec7c8d6f15c5cbe5874a932b1cee95bb", size = 1483425, upload-time = "2026-02-23T23:30:33.15Z" }, + { url = "https://files.pythonhosted.org/packages/65/18/f4c642a29719a84d17ea8b58cd6e60943573a28228c30c568565ed5512aa/line_profiler-5.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d7fbcc2dbd8534fc6f7d2b440076749b2235cdc525eb177fefafeaf7550373f", size = 2410276, upload-time = "2026-02-23T23:30:34.943Z" }, + { url = "https://files.pythonhosted.org/packages/90/33/701203686e7d27a545e3bbc8e81fffc7d091c42ed33564be4e72376ef45b/line_profiler-5.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:55f04671f48afcd90858c18fbdb2509463c77d717ed5424664f096e902206b6b", size = 2495283, upload-time = "2026-02-23T23:30:36.616Z" }, + { url = "https://files.pythonhosted.org/packages/34/e1/59fe065f67ed1fb8f974a9e3434685af1fc1f6a154489f7ab0992eab1c73/line_profiler-5.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:d2262d4bbbcf72bd430fc5763073792a0f1cb20e64de0f7ecf6e8ae16627d876", size = 479287, upload-time = "2026-02-23T23:30:38.152Z" }, + { url = "https://files.pythonhosted.org/packages/e9/83/89f6ae52fa77960404ee88fc078ee680e504bf1ab8724ac01430cee0f5a5/line_profiler-5.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:abf755b020d91b639cbc563015eca381ca64e6bd27ee55ef9004a3a17b6d4dcf", size = 461960, upload-time = "2026-02-23T23:30:39.657Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ae/43caf21edd10a7f5e138bdffcad01ade9a704462a923054402bbadbe5364/line_profiler-5.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a1cc30f3f7877fec826d0f40f400ee6c99239dc6a2f587b8d90d06a42d29c8a5", size = 648335, upload-time = "2026-02-23T23:30:41.042Z" }, + { url = "https://files.pythonhosted.org/packages/34/90/8a1fb985dc582d140fc92608dec3037a484c5f8ab99ae05c24031aa68000/line_profiler-5.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f90923e1cc4ff8eda1d18e525089fca7bfd6dfe8817ec530a913a2c7444ba0fd", size = 508823, upload-time = "2026-02-23T23:30:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/a4/01/855c55e195ac0aadb8ca4e4c65311f945ed02a2491b436bc33cee318d841/line_profiler-5.0.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cc3d0ecccb14f014d05b32f687d22adcb98bf59fdcc721e7a4330f0372a56f92", size = 499868, upload-time = "2026-02-23T23:30:44.188Z" }, + { url = "https://files.pythonhosted.org/packages/fc/48/fe73d6192a37637534366306a7871ef0f7ff5973bd87da082e4bf5ec0764/line_profiler-5.0.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5341f36e532e7ed28e323f5502a29b397b66a6708c6427a77f965148a2e5ddec", size = 1460660, upload-time = "2026-02-23T23:30:45.601Z" }, + { url = "https://files.pythonhosted.org/packages/49/1c/e1236e0f3c7ec1e19e74d61ac15143a7826b5767296de87bcf3aa26548a1/line_profiler-5.0.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4cce501f9d996b317b599c0ae99e3eb1bd447874ef8fef1da330b27f3a23eb50", size = 1475222, upload-time = "2026-02-23T23:30:47.014Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ad/02302fd2a82949277036bc557ecebddb9bc6282b76a4da7660258fe82111/line_profiler-5.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b237d82fb792c3db7c80a8675d3c48993d4421b14d96ae602f7fe9ccf1f85903", size = 2413428, upload-time = "2026-02-23T23:30:48.828Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/b3efe646c8b9fdc6fe26720860276c8a2bb745ffe30f5bcbc9726b975673/line_profiler-5.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:74febeca89128a37a32e6500c99665943c0d11e6043f46ce95596d7d1e1732a7", size = 2494741, upload-time = "2026-02-23T23:30:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/0e/ad/ddadd39eb92900f063f27e8f6d748c03dc2638873f07ebf3cee75f29711f/line_profiler-5.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:d6ce98faff60d9552a30e233648a848682b5d664a7e09e9669163a8f01e28147", size = 485700, upload-time = "2026-02-23T23:30:52.373Z" }, + { url = "https://files.pythonhosted.org/packages/d0/45/a529f355eea8fb790fbdee0273d6c0049dba3232a36e82c30d849b00e996/line_profiler-5.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:8be7cc5f4ed9ad87352129d1a494cf5ba7f0fced0472201d83ac9fbfa20f798b", size = 469781, upload-time = "2026-02-23T23:30:53.747Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7b/d991a99d7e61f2dc91dd19fabac1cc73525b57cc00acff96c89a9092a164/line_profiler-5.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e9d958eef80f6ee1a6ca0c8d0b6eb51d6d028fe0ee416b1778e8f35c47338046", size = 653249, upload-time = "2026-02-23T23:31:07.855Z" }, + { url = "https://files.pythonhosted.org/packages/77/ef/2858b749383e68dc2976fa67074985fe6742d3965bf21f5c010c59a3b92c/line_profiler-5.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:805e019833291d4fce1c04802ab1c855b9444daa260bd126809019bd74ebb247", size = 509669, upload-time = "2026-02-23T23:31:09.157Z" }, + { url = "https://files.pythonhosted.org/packages/13/de/b414d1031e3742e51f5fc92abd8fabf1d20e806794ff742c3719e52e72a8/line_profiler-5.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2aa014d0dbd8b7969cd660e16922419dbdff8a78f16a5a459f784a112b1377dc", size = 498207, upload-time = "2026-02-23T23:31:10.333Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/3080af1e015749795cd641b4cb3167a5d5c1f39fa726de61d136164f3abd/line_profiler-5.0.2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2895dede79ec527d757b4664c37718c10fa17fcacce9bd5c58160eb3675da405", size = 1482476, upload-time = "2026-02-23T23:31:12.266Z" }, + { url = "https://files.pythonhosted.org/packages/b2/51/7537180a417a3a91481273a908b7f9e2ee052ca4d02a9f4ba68b36ec54b4/line_profiler-5.0.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d817ccfa338e0e25e0753e4e82764ad153df498d12cbf77cc173eaa5d1861a5c", size = 1496222, upload-time = "2026-02-23T23:31:14.035Z" }, + { url = "https://files.pythonhosted.org/packages/63/0e/3276d3f1f7f3f30bd9a6eba3952bf134fc7984b617be273ac91acb33c2d0/line_profiler-5.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d457871d99dcafc0beb9ee3ae5b89a9ea7c44c00de59d0782be6fc569f916890", size = 2439919, upload-time = "2026-02-23T23:31:15.496Z" }, + { url = "https://files.pythonhosted.org/packages/f2/57/0c146a3af2af42427fac034a8eba32e537f7969e8c38f00fa4a65b339c0b/line_profiler-5.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:aadee364ba01354ddc5c9167c788bd210e9209262ca433121507f8aa6fc5ceed", size = 2518895, upload-time = "2026-02-23T23:31:17.434Z" }, + { url = "https://files.pythonhosted.org/packages/32/9f/228020e1bce6308723b5455e7de054428b9908b340b4c702dd2b3409f016/line_profiler-5.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:2b70a38fe852d7c95eca105ec603a28ca6f0bd3c909f2cca9e7cca2bf19cb77e", size = 480441, upload-time = "2026-02-23T23:31:19.162Z" }, ] [[package]] @@ -3175,112 +3190,110 @@ wheels = [ [[package]] name = "optree" -version = "0.18.0" +version = "0.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/8e/09d899ad531d50b79aa24e7558f604980fe4048350172e643bb1b9983aec/optree-0.18.0.tar.gz", hash = "sha256:3804fb6ddc923855db2dc4805b4524c66e00f1ef30b166be4aadd52822b13e06", size = 165178, upload-time = "2025-11-14T08:58:31.234Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b9/45/22dfc0a3a8ca568c2702d725cdce85e2e1767d4241f2ac7d28c518ce7019/optree-0.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f19867b02a547fc9f11d27c0413e7483cef89699e16f3b9e8af73a9b25e6061", size = 353272, upload-time = "2025-11-14T08:56:31.887Z" }, - { url = "https://files.pythonhosted.org/packages/73/ba/f89cd7fe3fca9f434b6cd6ba80716f1ad59f40453482c04d6887e5c516de/optree-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad428ccdb2a40804919880dfe8d2a3021fd4418be15ea7ecb8434ab249badf9f", size = 330497, upload-time = "2025-11-14T08:56:33.447Z" }, - { url = "https://files.pythonhosted.org/packages/73/93/463a531b863bffae92d6d1c7857f655234f12ad46fe088bf5bd5cd37cd67/optree-0.18.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e058cc51d9d57b45801060af9f74765b95bedfc59fd6df1c7489ae0825126be5", size = 349724, upload-time = "2025-11-14T08:56:34.475Z" }, - { url = "https://files.pythonhosted.org/packages/b9/4f/7d54f0eeea24f5893422d65fce835d80e644ebac8a4570d762f994cfe97c/optree-0.18.0-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:daab231cf768937ce4675376ea3e214d399116d9867a6737372c31c58630bdfc", size = 404190, upload-time = "2025-11-14T08:56:35.622Z" }, - { url = "https://files.pythonhosted.org/packages/66/af/044080368dc8ab809c5b089ad3132e7775043c5a8a165465df1f0c99dd62/optree-0.18.0-cp310-cp310-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ea357657143f364a764b63b2b1ce12d77156d48a1f32def990b696d755acb629", size = 401798, upload-time = "2025-11-14T08:56:37.001Z" }, - { url = "https://files.pythonhosted.org/packages/0b/97/c449712ccb50af3cb2608718e503e31b259aa55de40ee83bebb159b07593/optree-0.18.0-cp310-cp310-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f81f5340c8df50662abaf753ab07095901e40b934efb27da50032a4ae71c5a97", size = 397477, upload-time = "2025-11-14T08:56:38.298Z" }, - { url = "https://files.pythonhosted.org/packages/66/8e/bdda357f31e2b7c80b61b9785e6dea25e94c01e7237c7a8b1af38f369ee4/optree-0.18.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:66f142c743732cd4e630ea84415f654a00c792793c7f80d4511167f0f89796a6", size = 386631, upload-time = "2025-11-14T08:56:39.707Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2f/32575d5ac11993c19a6b60f6e15cafcc807adabf92caf06824bc3b5d50f9/optree-0.18.0-cp310-cp310-manylinux_2_39_riscv64.whl", hash = "sha256:55a2ccd121fccc9df961e982db2f4e8f2b4f7015e814ef70b1140514cdffe214", size = 347972, upload-time = "2025-11-14T08:56:40.656Z" }, - { url = "https://files.pythonhosted.org/packages/bf/7d/72bc9147ed59a6a1cab4e0c43a27d6542029a66c46c5301d05f990dd1991/optree-0.18.0-cp310-cp310-win32.whl", hash = "sha256:090a3f0ccafa0fe99d71e7d974ae52ff966ac26c409ec41f96556b96646054ef", size = 277698, upload-time = "2025-11-14T08:56:41.801Z" }, - { url = "https://files.pythonhosted.org/packages/ee/53/cf4e66ac4f134c2211a6f3f04e9d3a98effb6229316bcdadc3d3a9247362/optree-0.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:0e0dbe995241efe70cfb522e89c1a7c968216926725a0e5e20cc72bd5d0311b1", size = 302673, upload-time = "2025-11-14T08:56:43.132Z" }, - { url = "https://files.pythonhosted.org/packages/aa/b2/66c12a8707722bad5f25cd917d528796544fc9045c5933532f5db071cb02/optree-0.18.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:80f28e4666aad66e5e20bdc2c47b5bf320250bb5407b3a39dfb1772787a7068f", size = 363110, upload-time = "2025-11-14T08:56:44.782Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f6/5377f265a8dcd61edabf8b87b657d78fca9051eeaf311ed77f73b43526a9/optree-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72fa79be4d6515682417f103ae759a22345439eb1319886be936029215ee00dc", size = 337284, upload-time = "2025-11-14T08:56:46.039Z" }, - { url = "https://files.pythonhosted.org/packages/5e/d7/3045564c1183c7b7cfb32d11a6250fbe9f904f723c02c80a91f71c150b78/optree-0.18.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cc92339899acb685ee718fd22b25069dfa7be038c63274c54481d54ccc2f9e2", size = 361562, upload-time = "2025-11-14T08:56:47.162Z" }, - { url = "https://files.pythonhosted.org/packages/bd/32/5a7b5a441d82b44c6d2b31ad14f7b1a4d3be7afcc38437d07762212bc9c6/optree-0.18.0-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:1545c68299c0ce600e4ea1bc9112765dc4afe9a0b8ab43f955df6566bf78db42", size = 420660, upload-time = "2025-11-14T08:56:48.478Z" }, - { url = "https://files.pythonhosted.org/packages/0d/06/fc7aea4d6c72c4e0f42c157183b3e91c615fdc15da5a5e4e5f8b596a24f3/optree-0.18.0-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a901666afc2d7a8d0c20decc8079763e3313457ee67210382162d90163c0007", size = 417586, upload-time = "2025-11-14T08:56:49.51Z" }, - { url = "https://files.pythonhosted.org/packages/95/cc/bb0607eb8d20cf80ea6b122c059954fb525bbbb7150d650fd87696e4d141/optree-0.18.0-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd162e3bfc7812d75ebf2d0fb2783daee2407a92155af8a90650a6b0fa9342e", size = 413013, upload-time = "2025-11-14T08:56:50.842Z" }, - { url = "https://files.pythonhosted.org/packages/2a/6f/7f2238ec5e9d33e56252c30880bb8f44aec1415474b62b9e33b38594953d/optree-0.18.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b0986ff1267a3b44d3ed76c3efb8b7239371444143f6e0d79f9dd23dbe02c7f9", size = 400775, upload-time = "2025-11-14T08:56:52.249Z" }, - { url = "https://files.pythonhosted.org/packages/e5/42/f17e2977ecacdbca50c888731c4f0488f4d499fca1c48c3063bff0d5303b/optree-0.18.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:382e5ca02cbd5b20d713d4da189a8613f828832e2af57ccbe04a9c6b0bd9497e", size = 359998, upload-time = "2025-11-14T08:56:53.321Z" }, - { url = "https://files.pythonhosted.org/packages/73/f2/f5c5758e5db9d1b52e7b7809dcf876f071f64ec03bbd6007eee633bf0027/optree-0.18.0-cp311-cp311-win32.whl", hash = "sha256:056894ce6242cd1c7fed71325a7d9f633b2d3b4420c52af48f6a0c4560d74ca1", size = 283573, upload-time = "2025-11-14T08:56:54.426Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a3/91942b7e6e365f4e05d196dbbb52909aae11f1e2f4b4c8aee5b506f93877/optree-0.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:86f5bf05ad236f666e5395e989d6ac2cbfd02556526703e6c6f0a594c7fa081f", size = 312223, upload-time = "2025-11-14T08:56:55.812Z" }, - { url = "https://files.pythonhosted.org/packages/64/af/a98364b1b27516942db07d9841d8ac2d7ba96039bcd8ed496f7b3f297dc4/optree-0.18.0-cp311-cp311-win_arm64.whl", hash = "sha256:9b1e7e8f9ddc85f05d542b74157bdb73ed0e49aded67d1775f721fcd6eb9be94", size = 317247, upload-time = "2025-11-14T08:56:57.795Z" }, - { url = "https://files.pythonhosted.org/packages/d7/9d/e32dba0876d6514b40cd30e57938e3605b7e07d827ac617f072ff64d9cff/optree-0.18.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f02faeda66d531dc5f5356589afcf2a6bc41c8d00bc903efab60f9a2182b140d", size = 369756, upload-time = "2025-11-14T08:56:58.793Z" }, - { url = "https://files.pythonhosted.org/packages/d5/60/e643c3ab2cea904d24e56ab0060c905443ceac716e542392ad8db87f09ef/optree-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e28024e6e343353285cf99ae9c74210f0e89e47b2f0f3af7c72c4a9e89dc3ebc", size = 342794, upload-time = "2025-11-14T08:56:59.819Z" }, - { url = "https://files.pythonhosted.org/packages/79/af/c002cc41f6eba05fddcd6435e7c4ccc037a39a345de824d1a515386b52ee/optree-0.18.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:057b983a9526645133553184bed7090bb07855df986abd9e99c456922045c6bc", size = 364436, upload-time = "2025-11-14T08:57:01.17Z" }, - { url = "https://files.pythonhosted.org/packages/c8/32/2049f9597ae75a6a1b9c872ffcc31d96ebebf62ff1b20160eb972aea5456/optree-0.18.0-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:8a2003fab79694e04b5f260628511e441c248b46a9fc46138e2424038ac04ada", size = 425359, upload-time = "2025-11-14T08:57:02.509Z" }, - { url = "https://files.pythonhosted.org/packages/d0/14/523cfb3a139df0d2a064b38497c7ee6e49edac6e07c777d2069cffd8545c/optree-0.18.0-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:02d9999840fabef85a6b22e757f336d5591f712f99c710d8b232d52e53115314", size = 421373, upload-time = "2025-11-14T08:57:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/65/48/febb5976fa4f37a805809757b050d95effb25a17d3fec439349d18f5d451/optree-0.18.0-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:79bbe14d6cad81f5840958589daa1b836864ada40031712a446dce8129917efd", size = 420798, upload-time = "2025-11-14T08:57:05.02Z" }, - { url = "https://files.pythonhosted.org/packages/02/9c/c38b861e504829e0dcbed2e2b5bc865a98fb7c655e2ddd2bf8e1fd6ae712/optree-0.18.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a74c45f04def041504bd21682eaf7f359f1a50dc7cf42b548b6f19aab50596bd", size = 408195, upload-time = "2025-11-14T08:57:05.983Z" }, - { url = "https://files.pythonhosted.org/packages/2e/71/ff4b053ad1242f3d0b8792caa786c1c0138c1fb3d0c0a3720ccc21725739/optree-0.18.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:d2844478690b5892159df0b2500e9d146dc8d3aa5b44e4564d05787b7330eca3", size = 364997, upload-time = "2025-11-14T08:57:07.306Z" }, - { url = "https://files.pythonhosted.org/packages/5f/09/a71dbc096e8195d0e0fe990b394884d061938e0b918b61d16d48d817c0d5/optree-0.18.0-cp312-cp312-win32.whl", hash = "sha256:cfa2e16993ba47e671a4e7ee1ad805f67b8d6744eb30a9d27ea0b07b3b7a22ed", size = 286802, upload-time = "2025-11-14T08:57:08.674Z" }, - { url = "https://files.pythonhosted.org/packages/5d/95/a487d5c93dcb50d768c4cd7d17476b4ce4360c5943bb7251d1e26d38e5cf/optree-0.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:89e81afb11792d13d3777b503c6f21ec17b1a3b7de69cde1ae2c5471bcdcd4a0", size = 312381, upload-time = "2025-11-14T08:57:09.98Z" }, - { url = "https://files.pythonhosted.org/packages/91/ba/0559af098b33103bc65ba9d0c38454e21c142279b913c0b87a89c59c520d/optree-0.18.0-cp312-cp312-win_arm64.whl", hash = "sha256:4eb146711d4cd0876bf93e0118d3e74050b6f633d756c269ce7cda907281b499", size = 315747, upload-time = "2025-11-14T08:57:11.033Z" }, - { url = "https://files.pythonhosted.org/packages/74/60/57874760770dba39e799c88505898b7441786cea24d78bfe0a171e893212/optree-0.18.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:8d88c00c70b5914904feaf8f505f3512c2f3f4493dbbd93951fcdddc85dcfe8c", size = 876547, upload-time = "2025-11-14T08:57:12.492Z" }, - { url = "https://files.pythonhosted.org/packages/c8/bb/413263435c557193c436d977689d1c560a08e362f5bca29e3d62b093412a/optree-0.18.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:c8841d44f3648b0662e99fc39ef8c248726ddfb4d1bfce4bdba982e51bb7e3f8", size = 876759, upload-time = "2025-11-14T08:57:13.673Z" }, - { url = "https://files.pythonhosted.org/packages/00/6a/c0f03b83fe888af829591561af398bb7bbe1ea770c7e7475b4d464b4dd7c/optree-0.18.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:385bd727cc7bd3c01bd6204028ac2adce8a8f622c296053d9df434aa0e30b01f", size = 340330, upload-time = "2025-11-14T08:57:14.749Z" }, - { url = "https://files.pythonhosted.org/packages/10/e1/ea857ed58f36c7d2071aef8f67ca0c911e45ded8cb482636185e842550ae/optree-0.18.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:6fc9f8acde3bb561b2034e96079507fbe6d4624058fe204161eb8ef29f961296", size = 346098, upload-time = "2025-11-14T08:57:15.862Z" }, - { url = "https://files.pythonhosted.org/packages/5b/7c/9ed10c406028c6b215cd26be4a7afd711a323fd98f531432c1d2921f188b/optree-0.18.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:71ca2fcad8972ba56d6cfffbcd962f45f5d4bc04182f23d66154b38c2eb37de3", size = 372349, upload-time = "2025-11-14T08:57:16.935Z" }, - { url = "https://files.pythonhosted.org/packages/09/67/51acf67b1b9850e990a1a9b3fa0afcb5bbe9d645b0b6b8be5b3f2dca8f04/optree-0.18.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fa8e3878a1857761d64f08a23b32140d29754a53f85f7c87186ced2b5b1b49cb", size = 346522, upload-time = "2025-11-14T08:57:17.953Z" }, - { url = "https://files.pythonhosted.org/packages/24/22/ae957579e22d53d4d24de6bad0a3b3811612fd70a8ecd0c85c81253f22e3/optree-0.18.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27611c6c122745a003b5be7aedba49ef86e9fef46d743c234596de0bde6dc679", size = 368715, upload-time = "2025-11-14T08:57:19.392Z" }, - { url = "https://files.pythonhosted.org/packages/e1/6d/9a3399af22aea044a58e1734257b575b9b17eb67c2c6fcbbb194268e6946/optree-0.18.0-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:cbb083a15ea968ad99e7da17d24632348d69e26534e83c69941f3020ed7536eb", size = 430189, upload-time = "2025-11-14T08:57:20.552Z" }, - { url = "https://files.pythonhosted.org/packages/94/07/9c63a8cad90993848ac6cae5162e2e40f62e9a0738cb522972662ef3c7ab/optree-0.18.0-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0d25941de1acba176305dbdeb931dea6143b30d64ebdc5bfea2bfc12ef9e2b0a", size = 424979, upload-time = "2025-11-14T08:57:21.96Z" }, - { url = "https://files.pythonhosted.org/packages/4a/82/0ab26372377ba1a422a6f38d8237bb2d061dcd23be85bc3ed77404f7b05c/optree-0.18.0-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1db0a6497203a13063a8f044ae751dd5d8253cb815359270c38de0e4c9f8bed5", size = 423201, upload-time = "2025-11-14T08:57:23.047Z" }, - { url = "https://files.pythonhosted.org/packages/19/68/0a761a4f1b2e56ffbf3f223e967074c1331404f6dfb2b2cda6ecf62f4653/optree-0.18.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:328857d7a35129904b21164f6b0c2ff1d728ad1f5838589c5f437a16c94213c8", size = 414079, upload-time = "2025-11-14T08:57:24.205Z" }, - { url = "https://files.pythonhosted.org/packages/5d/80/29a767bff7413aa593075477a9d17a05d5098bfc0878c087e6b76a3b15df/optree-0.18.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:9d4b9d8c7e9335120ecf222d817699d17de743ad118080fb40467c367f009143", size = 368196, upload-time = "2025-11-14T08:57:25.328Z" }, - { url = "https://files.pythonhosted.org/packages/16/b6/7dfeb866a56f478103faaede5488e55f03916fa707de716ead34dd6f2c3f/optree-0.18.0-cp313-cp313-win32.whl", hash = "sha256:8b9ad4a01a1346b11acc574b7f932dea1a7c7ab31d93546a7540a1f02b3e724a", size = 287207, upload-time = "2025-11-14T08:57:26.461Z" }, - { url = "https://files.pythonhosted.org/packages/7f/92/6e803aa6bf441fae18874f1953e656e179d402b7cbc00c33ae68f0b632db/optree-0.18.0-cp313-cp313-win_amd64.whl", hash = "sha256:5b75e32c191e4b8cf42a8aa854ed264df82936136c0bcad77be44605da41cdfc", size = 314929, upload-time = "2025-11-14T08:57:27.762Z" }, - { url = "https://files.pythonhosted.org/packages/bd/85/e59302d4286552d2694b118e6f5a886490cfd939751c2011b2d3638b2d02/optree-0.18.0-cp313-cp313-win_arm64.whl", hash = "sha256:8a4ca121b6fc6b04300fa225fe6c31897e424db0d92691875af326f8c4e1cead", size = 317459, upload-time = "2025-11-14T08:57:28.826Z" }, - { url = "https://files.pythonhosted.org/packages/44/c9/2009e027f500fb38920d349523dd06b5714687905be24fe06bab90082706/optree-0.18.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:27b1d0cadcf4627c98abbbdce912dbc2243f5687f3c7df39963b793c89321c65", size = 415598, upload-time = "2025-11-14T08:57:29.936Z" }, - { url = "https://files.pythonhosted.org/packages/e2/d1/de1d6d8654d4765a439f27a155d098092ec8670039e2e0ec8383383a2fe7/optree-0.18.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b8adc912ecb6e4fd9df227ded66efaa6702f46a98e1403554be3c9c51d0ca920", size = 387016, upload-time = "2025-11-14T08:57:31.071Z" }, - { url = "https://files.pythonhosted.org/packages/ec/30/6ce07f763b6c0d967a2d683a486eb4450ec053aeae9731133dba600232b2/optree-0.18.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5bc1221068a58175e0ad62afc199893f77c653206673a5552992a604c66fb77e", size = 386289, upload-time = "2025-11-14T08:57:32.146Z" }, - { url = "https://files.pythonhosted.org/packages/76/26/14ed2ff6a69490754446910280a8d0195c489e9fe610d37046b254971627/optree-0.18.0-cp313-cp313t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a479fa25b6e2430e530d00f0c27a55e15ecb9de8ad2d0aec3d40b680e2d6df64", size = 442286, upload-time = "2025-11-14T08:57:33.285Z" }, - { url = "https://files.pythonhosted.org/packages/f3/c7/50bd556ffc76a1cdac1b7460428dee62f8359b60ed07c9846eab0acb5696/optree-0.18.0-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:446c46c53cb8f13abcc0d7dd1989d59bb059953c122fe9901ef53de7fb38b33e", size = 438254, upload-time = "2025-11-14T08:57:34.358Z" }, - { url = "https://files.pythonhosted.org/packages/47/0e/bb9edf64e79f275e5f59fc3dcc49841147cff81598e99e56413523050506/optree-0.18.0-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:81e755124b77e766166c9d05206b90c68f234f425ad2e3c8a6c96f0db548c67b", size = 437817, upload-time = "2025-11-14T08:57:35.382Z" }, - { url = "https://files.pythonhosted.org/packages/be/c4/808b606f840cb53fca2a94cbe82ff26fe23965484dfc4fbb49b6232f990b/optree-0.18.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ae6945f68771b1389ee46a1778e779f4ad76bca9306f3e39eb397f9a0dd2753", size = 426692, upload-time = "2025-11-14T08:57:36.652Z" }, - { url = "https://files.pythonhosted.org/packages/1e/b7/4156ec100d5539400e85ec213e86e154c396efa6135be277de74e19748e2/optree-0.18.0-cp313-cp313t-manylinux_2_39_riscv64.whl", hash = "sha256:571b732229d7b2e7a2215f57586f8ec0140e07c0faea916e456cbbfa819e56cb", size = 387482, upload-time = "2025-11-14T08:57:37.806Z" }, - { url = "https://files.pythonhosted.org/packages/a0/3b/76a1b45688be72e37965aa467296ebbc743786492287d45907e045933625/optree-0.18.0-cp313-cp313t-win32.whl", hash = "sha256:3014537ff7e4e091ee46e57976f7d95c52f66a0e3eb5ebcbe0de0d924504b58e", size = 318347, upload-time = "2025-11-14T08:57:39.153Z" }, - { url = "https://files.pythonhosted.org/packages/b7/21/87a30a42f1c14365099dc2d656c73bef90a2becbaa1249eca09bf4d9277b/optree-0.18.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a63df296fec376c5cd08298a85109db4a130f4cc8df15916fc92d44ef6068937", size = 351794, upload-time = "2025-11-14T08:57:40.247Z" }, - { url = "https://files.pythonhosted.org/packages/bc/34/37f409de017aa06ee98a01ddb8b93960bd29459f01f090cc461a250977d2/optree-0.18.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9460cba62e941626beb75c99a803373b38a52136d5f1932fcdfdcede1df6f2ef", size = 351225, upload-time = "2025-11-14T08:57:41.582Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f0/dabcb70f2065f782e4c2fac18bde75267d24aa5813b58e7ae9e045ecf9f0/optree-0.18.0-cp314-cp314-android_24_arm64_v8a.whl", hash = "sha256:5b126c34b459ef4f10f3a4d7d222416d9102b3c5a76b39f346c611792f144821", size = 876006, upload-time = "2025-11-14T08:57:42.676Z" }, - { url = "https://files.pythonhosted.org/packages/3c/da/6d524879da8892ea8a2562278d0aca06827e7c053015806c5853bb9c3bd8/optree-0.18.0-cp314-cp314-android_24_x86_64.whl", hash = "sha256:895f23a4cd8aee2c2464efdad2d9bde28a2aaabee634c96423a933f40e74a67e", size = 876251, upload-time = "2025-11-14T08:57:44.173Z" }, - { url = "https://files.pythonhosted.org/packages/54/f8/588807ec9c21bfec2fcf6b3e4f93abac62cad9bc0b8c0e248f1c30d9c160/optree-0.18.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:db00c604c1ae452f6092293bf230984d4f6cbb3ad905a9991e8cf680fd7d1523", size = 339800, upload-time = "2025-11-14T08:57:45.301Z" }, - { url = "https://files.pythonhosted.org/packages/b8/b9/a4214afaa44ff7d8b2c02ed058b318fcfd73af06daeac45d4845ef26d1b6/optree-0.18.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:7172b16e87c87160475275e4bfaa6e4067ccde184d2cca65ba25a402a8ed7758", size = 345613, upload-time = "2025-11-14T08:57:46.362Z" }, - { url = "https://files.pythonhosted.org/packages/8c/cd/31ca853e5f1e9002789de46e5263a3f23d9f9cb9fa490c8bf97fb02076c1/optree-0.18.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:5e669f98b9af9f66144c7ae09912d0367ac3182abe016f67cdd15cb45e13c923", size = 371117, upload-time = "2025-11-14T08:57:47.468Z" }, - { url = "https://files.pythonhosted.org/packages/66/e1/45193039b4432f4142eb978c847cd64533c4db7dc5dcdeb406ceac396961/optree-0.18.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0959bac58631e64e2ac6349cc284b37872c24f353b3d73b4682202a431f07d76", size = 346091, upload-time = "2025-11-14T08:57:48.604Z" }, - { url = "https://files.pythonhosted.org/packages/dc/33/8c6efe13c5cccb464bba868203649888dc875d2010c8a1acec0e9af88e37/optree-0.18.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cde70c97e4cc4e997e8fda2266e40a9bff7679c72ab4af6e15e81748a12882cc", size = 370191, upload-time = "2025-11-14T08:57:49.73Z" }, - { url = "https://files.pythonhosted.org/packages/f0/5e/0fffd06757494e88b3e5699f6df2da301dd9bf19a4f31c197c585dc5001e/optree-0.18.0-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:9104fc8915890e7292e5833fc677e4749607c67aa3cf8884677267078201c2f3", size = 430059, upload-time = "2025-11-14T08:57:50.837Z" }, - { url = "https://files.pythonhosted.org/packages/bf/17/92d0dade6ff46aebad86ae23ac801251e7de18526eee216986de684c3375/optree-0.18.0-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1f674e34202383f8b42fa9335f13bedfb6b6f019c66e1f41034929e4be203423", size = 426169, upload-time = "2025-11-14T08:57:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/e5/36/eedcfcd421801578119ff5fb6731cd50c65f57a729f6f76f8fe6f37d9939/optree-0.18.0-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b75e083137f361377ff8d70df885ab3a1cf8980e4019e3f311237579adadb64", size = 426153, upload-time = "2025-11-14T08:57:53.591Z" }, - { url = "https://files.pythonhosted.org/packages/63/a6/0bf029f0bdd05f49548644267fc69574a7ca18735010a86d736e7a1ed03c/optree-0.18.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f20e8754abe312a701ee00d071ddd8502e9d97ca38fbc56204d14a9ffcb41c", size = 413576, upload-time = "2025-11-14T08:57:54.714Z" }, - { url = "https://files.pythonhosted.org/packages/5e/de/71c51bdf6053e6d7cbdf176eb30d7b5c5ad6180eb6e822d13b36b1edecff/optree-0.18.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:289b184cc41dfc400a30db6207ec997884d14540aae2cba10cb88dc7ebaae2a1", size = 369173, upload-time = "2025-11-14T08:57:56.169Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b0/ea9d562ca87f25bb90944eb411d1ac29ec6c7e38cebf2024e8124fd0e31d/optree-0.18.0-cp314-cp314-win32.whl", hash = "sha256:f5197f864630162f008f5dfad3fceef32553c0fa7639eee1b8e280d924ed678e", size = 292058, upload-time = "2025-11-14T08:57:57.184Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0c/87c9ced927a4cda6b99959cc9341e0a1acb4cd6eb49d2ccd7ac57039c63e/optree-0.18.0-cp314-cp314-win_amd64.whl", hash = "sha256:2b5cfb5fc643f16d3a7d957807e55a937dce07566c49ccc4aa71b01064c56758", size = 322019, upload-time = "2025-11-14T08:57:58.203Z" }, - { url = "https://files.pythonhosted.org/packages/39/a8/481afd23d2e66fddc5891b1540778ebedae90e770fe44c68c9f3dbd9e321/optree-0.18.0-cp314-cp314-win_arm64.whl", hash = "sha256:89d5156f8a0a3792701e1c31473eb307f0b45696f48dc51d721f1bfe0c3a950f", size = 324966, upload-time = "2025-11-14T08:57:59.413Z" }, - { url = "https://files.pythonhosted.org/packages/e5/76/7ba344abd30ce4e3c29d50936a2f28341a772bcebec2948be9915f2a3ece/optree-0.18.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:20536964ba2458f166c1e8ab25951e3fc0a5056b651bd08f16be99bb3ffed54a", size = 415280, upload-time = "2025-11-14T08:58:00.806Z" }, - { url = "https://files.pythonhosted.org/packages/89/27/90de0dcbfdaf82ce616eaa2193a540ec7b4dd1587a5ff0c6a7485c846dd6/optree-0.18.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:07c5f64783ad0f0f80e61c25f276ce79b47deda83ed7956a4a9af6385fe8f60d", size = 387087, upload-time = "2025-11-14T08:58:02.501Z" }, - { url = "https://files.pythonhosted.org/packages/d3/ff/91b9898b08b6f3187a4c99836648893f68d62f61a304b6f6ec61d3e27a77/optree-0.18.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:30a2636279bdc805c8e154a0f346bcf704626b831ff44724d305fb72c90b7389", size = 386244, upload-time = "2025-11-14T08:58:03.628Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b2/d20c302926c6c18c379801a6532c0722f7f1a305b7d5712e437708ebdb42/optree-0.18.0-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:80d971060c888c3989132b7e75dfb50848636d41bc931af1b93fe2019fba469c", size = 442297, upload-time = "2025-11-14T08:58:04.887Z" }, - { url = "https://files.pythonhosted.org/packages/90/7d/015c58cf2b0aa0049ac33e1aa76b1fd4563551aeb9f83b10c2217668c355/optree-0.18.0-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d569730b2647c51a5ee68d67198aa9a78c7a55563d57b8cc1ca8d8c8377e7621", size = 438180, upload-time = "2025-11-14T08:58:06.252Z" }, - { url = "https://files.pythonhosted.org/packages/38/83/1eea2619385bd3ecfda76bb563f4127dc8b4197dcb614eb3f9032c82c2a7/optree-0.18.0-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c017539e1196ea08f20aea3a4c473f758149b851678edd3d15773b4326decf83", size = 437838, upload-time = "2025-11-14T08:58:07.359Z" }, - { url = "https://files.pythonhosted.org/packages/e6/99/c1b84be2143df01819818e8c5db0c284ce995a51134030352ade6d9d1d75/optree-0.18.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e2cd9ac7fecfd5f6f56ce69f4f805553c226a2744810175959eb408101513c", size = 426705, upload-time = "2025-11-14T08:58:08.426Z" }, - { url = "https://files.pythonhosted.org/packages/70/8a/e1da179a5ebfdb9e279ae655ec38f19e8893a36193203fd6022a31d573b4/optree-0.18.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:a5c213a291c798139ed9ff80aec4bfcd2ac8f001bc015a9cdeb78457e9687dd3", size = 387489, upload-time = "2025-11-14T08:58:09.459Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f8/b1367b93290b9e1b99a5ad1bbedaf76da62cf81578f452a91bfef5cfd1bb/optree-0.18.0-cp314-cp314t-win32.whl", hash = "sha256:e4a468ae1541614b5aa7b4f00254bce005ab7572fbb1fc764af4ee17d90fde7b", size = 327239, upload-time = "2025-11-14T08:58:10.52Z" }, - { url = "https://files.pythonhosted.org/packages/35/84/295aa33e8530c72b45592714a5b07b23e178d2df44baa964c8a91226eac4/optree-0.18.0-cp314-cp314t-win_amd64.whl", hash = "sha256:94983b3aa31ee401d2ac77ba570a3157d83f9508cfbb006095a48770e0a1c5ca", size = 366546, upload-time = "2025-11-14T08:58:11.575Z" }, - { url = "https://files.pythonhosted.org/packages/db/67/65af89c4a64b13df70dcf9f09fc42623f490e5b4f4854577679e781c5c32/optree-0.18.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b4da3223c5b4cf694822752d0fbb6bf34c3f41648af1bd1b443cc3d68cc55106", size = 358524, upload-time = "2025-11-14T08:58:12.967Z" }, - { url = "https://files.pythonhosted.org/packages/83/63/58efd0916d8ccddfb5f786aa67c68937512e11d066eb424a5a9451e72fb3/optree-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f04286908654ffb05455254ebf72fe69473fc4560fc7ea49410df94dea6783a2", size = 353280, upload-time = "2025-11-14T08:58:14.205Z" }, - { url = "https://files.pythonhosted.org/packages/e5/53/84bcac1e05ea49a5bfa3d52712fb788b507c2248a9d4266939d4c1d0188a/optree-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:421b839c7ff30df5791e66c89b2e9c2f68191dd6a5d6927c32bcc6b887090df8", size = 330590, upload-time = "2025-11-14T08:58:15.283Z" }, - { url = "https://files.pythonhosted.org/packages/79/1c/8782c71b73db1d217b18f62cda888f6f976662ed630caa61fe9dd6fa2e8a/optree-0.18.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d20765efa494a80a8fd91c4de8890f34de8e9f234da5516e8f34f55703cfb93d", size = 349539, upload-time = "2025-11-14T08:58:16.359Z" }, - { url = "https://files.pythonhosted.org/packages/b8/26/1c3b2a77a0dbd58d9488ddabd66bcf9745e9eb64922433ed42f0224752e8/optree-0.18.0-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:30f95279188f6b9300e17c1557989baa991c2d6f519013bd8fea13462a0e6a45", size = 404850, upload-time = "2025-11-14T08:58:17.409Z" }, - { url = "https://files.pythonhosted.org/packages/e7/4e/2b05367bc3a86179772bd1545647521254d00a10b2b546c9abea54ad646e/optree-0.18.0-cp39-cp39-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bda4572392ac1dff3fc67b6d9a4b1084e1637972e8135ad3788b4ce7cf0a90f5", size = 402379, upload-time = "2025-11-14T08:58:18.508Z" }, - { url = "https://files.pythonhosted.org/packages/da/d4/04c5c3a978db8e8ab2f75c27cfdce69dd8098a4f19daf0de2288ba11745f/optree-0.18.0-cp39-cp39-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b45d7172c67fc8d2b69f77b384998b39793ee91f8b3b46c609297b781fb7eea5", size = 397642, upload-time = "2025-11-14T08:58:19.755Z" }, - { url = "https://files.pythonhosted.org/packages/25/f0/3d322678cf6e691e8c044153506b699c6886df4f13adb4d885cc8922cdf4/optree-0.18.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56bb19ff827c9a443202b52bf103705ce96ef14d045e0a30d0d7ee7dbcef6a0d", size = 386870, upload-time = "2025-11-14T08:58:20.79Z" }, - { url = "https://files.pythonhosted.org/packages/c6/94/da38e68e20fb34737d83808b2ad05422496f05ab98c550af6cebabdaadc4/optree-0.18.0-cp39-cp39-manylinux_2_39_riscv64.whl", hash = "sha256:b7aa0de08bbbfcef6e49c107f9f397f5d4742548500f16e3e6c5e0b9e4ff0faa", size = 347826, upload-time = "2025-11-14T08:58:22.008Z" }, - { url = "https://files.pythonhosted.org/packages/f2/fa/07a4b30a5e8b268c2f0e083812c568e3db9abf4392ea2d1611716737474a/optree-0.18.0-cp39-cp39-win32.whl", hash = "sha256:31539dec60af84e16e99574634811d38e34e1fb381f40d6f489a2e582bf41f03", size = 277949, upload-time = "2025-11-14T08:58:23.162Z" }, - { url = "https://files.pythonhosted.org/packages/ba/81/d419aa8ad240878e934e88546b51e0764faf0d68caf055fb3c53f90c2e16/optree-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:7699957183f8d45402edd6266e175510317f5fcd7f0e623510f2eb7e1ebfc667", size = 309380, upload-time = "2025-11-14T08:58:24.229Z" }, - { url = "https://files.pythonhosted.org/packages/79/5d/3c654144031d4dc28442bca8343bc02d177ab5da4c38ed521cad59e5d4b0/optree-0.18.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:30fefc84975ac41d9075993196c64ce0c240510f0539cff121d63b709e03846f", size = 371229, upload-time = "2025-11-14T08:58:25.702Z" }, - { url = "https://files.pythonhosted.org/packages/d2/da/4ddcf4fbb75c5779f58b1f85657209f5e38eb34d41c038502e1d0bdae68d/optree-0.18.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ba23caafd0e0c911bb7eab54e5cf69644af864d153e4b2abdab83ff0ef357ba1", size = 346077, upload-time = "2025-11-14T08:58:26.762Z" }, - { url = "https://files.pythonhosted.org/packages/50/56/55e565d593d91a47ee70b91b693a763362aafac3bde06288036309c87949/optree-0.18.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10f29662d637b80363dc620da46ddc58def7acf7935e20595b23e216ea912367", size = 363316, upload-time = "2025-11-14T08:58:27.888Z" }, - { url = "https://files.pythonhosted.org/packages/b0/c9/19d5a8d99934f80bfa02d503587a5697a374324f95ccc4c1656a8a74fc3b/optree-0.18.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff7326f36ed70d84c3fd62fb39bc6858f699640b8ab238c3cb8dafe1e200af59", size = 403560, upload-time = "2025-11-14T08:58:28.994Z" }, - { url = "https://files.pythonhosted.org/packages/13/61/016ff1dcf63b97bfd182af8705f156e128a1e3adfcd94a2283fe04cf95d7/optree-0.18.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:01b79aaee544adf5bfa573db32b943030dfeb9fd1c6e7a97aa417db56a8127e7", size = 314844, upload-time = "2025-11-14T08:58:30.146Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/3d/63/7b078bc36d5a206c21b03565a818ede38ff0fbf014e92085ec467ef10adb/optree-0.19.0.tar.gz", hash = "sha256:bc1991a948590756409e76be4e29efd4a487a185056d35db6c67619c19ea27a1", size = 175199, upload-time = "2026-02-23T01:56:37.752Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/7f/265404c1d3f8f6ea7ac9926e5d9cffb34e9e4650fc053df3b6157748c7d5/optree-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ebf33aa7487b1b8f7a9c6ac6298f41614955b30124133de72b82ae1714e2451a", size = 392338, upload-time = "2026-02-23T01:54:38.226Z" }, + { url = "https://files.pythonhosted.org/packages/27/10/372f936347c1812de938377102d410f900a1e46fa770921b46013b4c0493/optree-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5606d57a1601d8f2a9dcfa6ea774b91fe4eb4877a961040e8b61ef11244fafa", size = 363922, upload-time = "2026-02-23T01:54:39.907Z" }, + { url = "https://files.pythonhosted.org/packages/11/17/b967abd72f5c0bc6f94146ba861114ef93e87bb45a340532a0c22f5254bd/optree-0.19.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c1d2059b0005ff0d3ceff9d35b777f82014cde7e841f981b3518d419d76e814b", size = 383890, upload-time = "2026-02-23T01:54:40.948Z" }, + { url = "https://files.pythonhosted.org/packages/f8/a9/bd693caf9f249a9616ad31beca80f25d7ba43caadb2cffe10d2048b1e68e/optree-0.19.0-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:66ac5ffb43e7db4d2035f6c7dff459601e047929dbee503cc83515ff07def236", size = 439934, upload-time = "2026-02-23T01:54:42.079Z" }, + { url = "https://files.pythonhosted.org/packages/05/d4/ed9d13ff27bce911ed51dca99dd75ec774e55d6634b363bd08a2b7be6e18/optree-0.19.0-cp310-cp310-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1058a0ece1dd8e2f1ed19e1b5f2a4195b08b248421e9a1ca643ce678cf78fdaf", size = 441453, upload-time = "2026-02-23T01:54:43.125Z" }, + { url = "https://files.pythonhosted.org/packages/f0/df/2c238f389b02843f4088d40f0af5bbf6a888e77067841f49588da6ea7c4a/optree-0.19.0-cp310-cp310-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:36b81df071a4e6e9c4dcccf85405867bc12796ac270b665d7a1474e576f15b01", size = 436456, upload-time = "2026-02-23T01:54:44.143Z" }, + { url = "https://files.pythonhosted.org/packages/cb/08/eb95702a6ce8aba8cc5e58a8e47e4cd61c1c3639ac26914e8e84019edb7c/optree-0.19.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0230ed9ba60fbe97ee23c000850c54308009ba51a129a1060137f329f460e42e", size = 419682, upload-time = "2026-02-23T01:54:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a3/e49cacd8a3b1ca32c98aff8c54bd37db99b8ffecd4967652dc84b4256dd5/optree-0.19.0-cp310-cp310-manylinux_2_39_riscv64.whl", hash = "sha256:1b0c90272aa51e5043e17455f0467eb3426b09d130187f901c9c0bab62b2490b", size = 384818, upload-time = "2026-02-23T01:54:46.958Z" }, + { url = "https://files.pythonhosted.org/packages/46/0e/73ab9446b5e40e408ee3526cc25c7a7ff4f75a251ed815711e7aa184ee48/optree-0.19.0-cp310-cp310-win32.whl", hash = "sha256:4fad3d69591eedaf75a8acb7505db844b1a06c998d41a3a3e6dd4d3b764b1b55", size = 297879, upload-time = "2026-02-23T01:54:48.038Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c7/54a69ca41ce4ed2f10cf2e980386a629c740572ae58db79a9babe60dc28c/optree-0.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:cf381c451ae2dd7a88552f208890e5e36399510fe53718cc15e8ef10c7a95732", size = 321784, upload-time = "2026-02-23T01:54:49.282Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5e/5967a15ed63d98717815a51265bd73fb5ec5a4ca9938c59be909ef12f66f/optree-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e245b03f4edc90971dcf01691775105084f433393e6ba19a6fd0f151d5f39b58", size = 408515, upload-time = "2026-02-23T01:54:50.888Z" }, + { url = "https://files.pythonhosted.org/packages/43/5b/4e062008bfd2f836079afec06dd9cba76a01010aaffa8eae0d1915fa0ef6/optree-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:304ebd0449ace6ab0c18489baea1163a915b9aeb43a8635c6373ec09767fa1ba", size = 378935, upload-time = "2026-02-23T01:54:52.372Z" }, + { url = "https://files.pythonhosted.org/packages/bf/da/67424dad1834ba4bfe1ca16c1ca754e5fd32d7f8e7a488a70346c1bf1160/optree-0.19.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48366b810f0d37e2fa7bcb758d2a3198d62f0a4a60ac7e14287768345a4a95a9", size = 400165, upload-time = "2026-02-23T01:54:53.579Z" }, + { url = "https://files.pythonhosted.org/packages/65/af/f49c516de0ca47ce6a7a8f74499e63ba0a07744eae78958a2e6de0902fd6/optree-0.19.0-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:b511556fdc5a558ccd65f841710e9103f120cb575735f87ddfa3728b4098e606", size = 460863, upload-time = "2026-02-23T01:54:55.718Z" }, + { url = "https://files.pythonhosted.org/packages/92/cb/01e0a0551d6934f1158a7f90f690b4a68c1a08ba519ea4ab113e4064e499/optree-0.19.0-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba4925025a47b12237ff8acdcf8b9e972e7f36acafb291373d4de318a4b1b12c", size = 459863, upload-time = "2026-02-23T01:54:57.045Z" }, + { url = "https://files.pythonhosted.org/packages/b2/65/16b9c07e894a51e807404855e717e7e8221ce55f7fec3df46e9dc67c378c/optree-0.19.0-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a8d873ae944fb7d11797bb2ea2666379e78a4608e918d69a4c97fef4779f7da4", size = 459552, upload-time = "2026-02-23T01:54:58.159Z" }, + { url = "https://files.pythonhosted.org/packages/c3/34/3c9e11011c73457dc00e031713df6822eb0690cb849f479796073e070a56/optree-0.19.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6437d23f2708fe0fcdba9d3d75a1b82022caf2c15248059b8bf7e563422fb71", size = 441096, upload-time = "2026-02-23T01:54:59.536Z" }, + { url = "https://files.pythonhosted.org/packages/9c/39/0932e49b76fc3354a9c39c8bb57b04661e83b28fa6c74be09b520d98915b/optree-0.19.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:c2e01d07b2368f25932d5c6071d270a1d4db9e6cb2014eaafeded3d4c73ee406", size = 404747, upload-time = "2026-02-23T01:55:00.591Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d4/11ba9195b65bd77f8955bad5aca8c5fbfb0b273e52c4e7b29b3416c587f0/optree-0.19.0-cp311-cp311-win32.whl", hash = "sha256:846d07372ebc2ca959e9660b28d7c59ae34a02aac2fb73d0f3ec3de79af2fcb0", size = 306775, upload-time = "2026-02-23T01:55:01.659Z" }, + { url = "https://files.pythonhosted.org/packages/d9/13/ee6a65060edde25517151eff8a9e1468e6fd65054fabd14a441161eeafd6/optree-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:29d757b394641703098e1f9a62203c46a5f5cfa260cb8546bd05c8a2cdaf8754", size = 331592, upload-time = "2026-02-23T01:55:03.089Z" }, + { url = "https://files.pythonhosted.org/packages/cf/5e/3a4a66d0ecee599b948d2ef17010fcb9c111c69753a3911e4301005169a8/optree-0.19.0-cp311-cp311-win_arm64.whl", hash = "sha256:fd425b501420b437d5e925708898fae927660fd433836ac282adb20d54561c63", size = 343392, upload-time = "2026-02-23T01:55:04.201Z" }, + { url = "https://files.pythonhosted.org/packages/2d/bf/5cbbf61a27f94797c3d9786f6230223023a943b60f5e893d52368f10b8b1/optree-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7ec4b2ce49622c6be2c8634712b6c63cc274835bac89a56e3ab2ca863a32ff4b", size = 418100, upload-time = "2026-02-23T01:55:05.282Z" }, + { url = "https://files.pythonhosted.org/packages/00/9e/65899e6470f5df289ccdbe9e228fb0cd0ae45ccda8e32c92d6efae1530ef/optree-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f0978603623b4b1f794f05f6bbed0645cb7e219f4a5a349b2a2bd4514d84ac82", size = 388582, upload-time = "2026-02-23T01:55:06.628Z" }, + { url = "https://files.pythonhosted.org/packages/d1/dc/f4826835be660181f1b4444ac92b51dda96d4634d3c2271e14598da7bf2a/optree-0.19.0-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c9e52c50ed3f3f8b1cf4e47a20a7c5e77175b4f84b2ecf390a76f0d1dd91da6", size = 407457, upload-time = "2026-02-23T01:55:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b0/89283ac1dd1ead3aa3d7a6b45a26846f457bded79a83b6828fc1ed9a6db3/optree-0.19.0-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:3fe3e5f7a30a7d08ddba0a34e48f5483f6c4d7bb710375434ad3633170c73c48", size = 471230, upload-time = "2026-02-23T01:55:09.244Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a2/47f620f87b0544b2e0eb0b3c661682bd0ea1c79f6e38f9147bc0f835c973/optree-0.19.0-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8315527e1f14a91173fe6871847da7b949048ec61ff8b3e507fc286e75b0aa3c", size = 469442, upload-time = "2026-02-23T01:55:10.387Z" }, + { url = "https://files.pythonhosted.org/packages/84/e9/b9ae18404135de53809fb994b754ac0eac838d8c4dfa8a10a811d8dec91d/optree-0.19.0-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:938fb15d140ab65148f4e6975048facbef83a9210353fbedd471ac39e7544339", size = 468840, upload-time = "2026-02-23T01:55:11.419Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e5/a77df15a62b37bb14c81b5757e2a0573f57e7c06d125a410ad2cd7cefb72/optree-0.19.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b8209570340135a7e586c90f393f3c6359e8a49c40d783196721cc487e51d9c", size = 451408, upload-time = "2026-02-23T01:55:12.501Z" }, + { url = "https://files.pythonhosted.org/packages/8c/43/1aa431cee19cd98c4229e468767021f9a92195d9431857e28198a3a3ce2f/optree-0.19.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:1397dc925026917531a43fda32054ae1e77e5ed9bf8284bcae6354c19c26e14a", size = 412544, upload-time = "2026-02-23T01:55:14.048Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/b94fd3a116b80951d692a82f4135ae84b3d78bd1b092250aff76a3366138/optree-0.19.0-cp312-cp312-win32.whl", hash = "sha256:68f58e8f8b75c76c51e61e3dc2d9e94609bafb0e1a6459e6d525ced905cd9a74", size = 312033, upload-time = "2026-02-23T01:55:15.101Z" }, + { url = "https://files.pythonhosted.org/packages/9e/7f/31fa1b2311038bfc355ad6e4e4e63d028719cb67fb3ebe6fb76ff2124105/optree-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:5c44ca0f579ed3e0ca777a5711d4a6c1b374feacf1bb4fe9cfe85297b0c8d237", size = 335374, upload-time = "2026-02-23T01:55:16.094Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/863bc3f42f83113f5c6a5beaf4fec3c3481a76872f3244d0e64fb9ebd3b0/optree-0.19.0-cp312-cp312-win_arm64.whl", hash = "sha256:0461f796b4ade3fab519d821b0fa521f07e2af70206b76aac75fcfdc2e051fca", size = 345868, upload-time = "2026-02-23T01:55:18.006Z" }, + { url = "https://files.pythonhosted.org/packages/ee/61/d79c7eeb87e98d08bc8d95ed08dee83bedb4e55371a7d2ae3c874ec02608/optree-0.19.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:1eea5b7be833c6d555d08ff68046d3dd2112dfb39e6f1eb09887ab6c617a6d64", size = 923043, upload-time = "2026-02-23T01:55:19.018Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ed/e80504f65e7e80fdcd129258428d7976ea9f03bf9dad56a5293c44d563ad/optree-0.19.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:4d9cf9dfa0ac051e0ed82869d782f0affdbdb1daa5f2e851d37ea8625c60071a", size = 385597, upload-time = "2026-02-23T01:55:20.586Z" }, + { url = "https://files.pythonhosted.org/packages/65/e5/d1926a2f0e0240f6800ff385c8486879f7da0a5a030b7aa5d84e44e9c9ca/optree-0.19.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:43c4f8ba5755d56d046be2cb1380cbc362234ad93fd9933384c6dd7fdebe6c4a", size = 392265, upload-time = "2026-02-23T01:55:21.662Z" }, + { url = "https://files.pythonhosted.org/packages/61/88/9c598325e89bbed29b37a381ebb2b94f1d9d769c973b879b3e9766b4b16d/optree-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36b1134680ee3f9768ede290da653e1604a8083bce69fef8fb4e46863346d5c8", size = 423763, upload-time = "2026-02-23T01:55:22.97Z" }, + { url = "https://files.pythonhosted.org/packages/6b/d2/fcba2a1826d362a64cb36ec9f675ed6dcddee47099948913122b0aafbe44/optree-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c9f7e7e7bf2ef011d0be1c2e87c96f5dc543dad1ac34430c2f606938c9ec5135", size = 392720, upload-time = "2026-02-23T01:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/eb/43/5e6d51d8c203a79cff084efa9f04a745b8ef5cf4c86dbb127e7b192f14d9/optree-0.19.0-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bb5752f17afa017b08b0cbac8a383d4bb90035b353bef7a25fe03cda69a21d33", size = 411481, upload-time = "2026-02-23T01:55:25.215Z" }, + { url = "https://files.pythonhosted.org/packages/4b/dc/dc09347136876287b463b8599239d6fa338298fd322ac629817bd2f4def4/optree-0.19.0-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:e9b6245993494b1aa54529eb7356aeefa6704c8b436e6e5f20b25c30f7af7620", size = 476695, upload-time = "2026-02-23T01:55:26.23Z" }, + { url = "https://files.pythonhosted.org/packages/ee/cc/5d2c9cf906bd3ae357e7221450bacefd0321d7b94e6171dec39552b346e6/optree-0.19.0-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7351a24b30568c963a92b19f543c9562b36b3222caed2a5ac3209ef910972bec", size = 471846, upload-time = "2026-02-23T01:55:27.288Z" }, + { url = "https://files.pythonhosted.org/packages/64/7f/75b10f88da994fc3da3dc1ab7d54bab7bd3a6fa5eb81b586f13f8bd6ab0e/optree-0.19.0-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2c6610a1d1d74af0f53c9bbabb7c265679a9a07e03783c8cc4a678ba3bb6f9a5", size = 473145, upload-time = "2026-02-23T01:55:28.941Z" }, + { url = "https://files.pythonhosted.org/packages/78/fc/753bf69b907652d54b7c6012ccb320d8c1a3161454e415331058b6f04246/optree-0.19.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37e07a5233be64329cbf41e20ab07c50da53bdc374109a2b376be49c4a34a37f", size = 456160, upload-time = "2026-02-23T01:55:30.515Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a8/70640f9998438f50a0a1c57f2a12aac856cd937f2c4c4feef5a3cfe8e9c7/optree-0.19.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:c23a25caff6b096b62379adb99e2c401805141497ebb8131f271a4c93f5ed5dc", size = 417116, upload-time = "2026-02-23T01:55:31.591Z" }, + { url = "https://files.pythonhosted.org/packages/ad/05/0b8bf4abf5d1a7cd9a19ba680e1ec64ad38eec3204e4e16a769e8aeaa4a2/optree-0.19.0-cp313-cp313-win32.whl", hash = "sha256:045cf112adaebc76c9c7cabde857c01babfc9fae8aa0a28d48f7c565fadf0cb9", size = 312101, upload-time = "2026-02-23T01:55:33.002Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c7/9ce83f115d7f4a47741827a037067b9026c29996ad7913bc40277924c773/optree-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:bc0c6c9f99fb90e3a20a8b94c219e6b03e585f65ab9a11c9acd1511a5f885f79", size = 337944, upload-time = "2026-02-23T01:55:34.3Z" }, + { url = "https://files.pythonhosted.org/packages/17/fd/97c27d6e51c8b958b29f5c7b4cdcae4f2e7c9ef5b5465be459811a48876b/optree-0.19.0-cp313-cp313-win_arm64.whl", hash = "sha256:48f492363fa0f9ffe5029d0ecafd2fa30ffe0d5d52c8dd414123f47b743bd42e", size = 347153, upload-time = "2026-02-23T01:55:35.331Z" }, + { url = "https://files.pythonhosted.org/packages/46/45/9a2f05b5d033482b58ca36df6f41b0b28af3ccfa43267a82254c973dcd14/optree-0.19.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d6362b9e9a0f4dd7c5b88debe182a90541aba7f1ad02d00922d01c4df4b3c933", size = 463985, upload-time = "2026-02-23T01:55:36.681Z" }, + { url = "https://files.pythonhosted.org/packages/20/b7/5d0a013c5461e0933ce7385a06eed625358de12216c80da935138e6af205/optree-0.19.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:381096a293d385fd3135e5c707bb7e58c584bc9bd50f458237b49da21a621df3", size = 431307, upload-time = "2026-02-23T01:55:37.754Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2c/d3f2674411c8e3338e91e7446af239597ae6efd23f14e2039f29ced3d73e/optree-0.19.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9675007cc54371be544bb33fd7eb07b0773d88deacf8aa4cc72fa735c4a4d33", size = 426917, upload-time = "2026-02-23T01:55:39.122Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e9/009964734f19d6996291e77f2c1da5d35a743defc4e89aefb01260e2f9d6/optree-0.19.0-cp313-cp313t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:406b355d6f29f99535efa97ea16eda70414968271a894c99f48cd91848723706", size = 490603, upload-time = "2026-02-23T01:55:40.123Z" }, + { url = "https://files.pythonhosted.org/packages/2b/4c/96706f855c6b623259e754f751020acfb3452e412f7c85330629ab4b9ecc/optree-0.19.0-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d05e5bf6ce30258cda643ea50cc424038e5107905e9fc11d19a04453a8d2ee27", size = 486388, upload-time = "2026-02-23T01:55:41.746Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e4/9b23a27c9bd211d22a2e55a5a66e62afe5c75ff98b81fc7d000d879e75e6/optree-0.19.0-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b6e11479d98690fc9efd15d65195af37608269bb1e176b5a836b066440f9c52f", size = 489090, upload-time = "2026-02-23T01:55:42.913Z" }, + { url = "https://files.pythonhosted.org/packages/15/3b/462582f0050508f1ce0734f1dffd19078fb013fa12ccf0761c208ab6f756/optree-0.19.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8d523ffc6d3e22851ed25bec806a6c78d68340259e79941059752209b07a75ec", size = 469601, upload-time = "2026-02-23T01:55:44.084Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c6/843c6a33b700ef88407bd5840813e53c6986b6130d94c75c49ff7a2e31f9/optree-0.19.0-cp313-cp313t-manylinux_2_39_riscv64.whl", hash = "sha256:ca148527b6e5d59c25c733e66d4165fbcf85102f4ea10f096370fda533fe77d1", size = 436195, upload-time = "2026-02-23T01:55:45.147Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ed/13f938444de70bec2ff0edef8917a08160d41436a3cad976e541d21747f5/optree-0.19.0-cp313-cp313t-win32.whl", hash = "sha256:40d067cf87e76ad21b8ee2e6ba0347c517c88c2ce7190d666b30b4057e4de5ba", size = 343123, upload-time = "2026-02-23T01:55:46.201Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a2/5074dedbc1be5deca76fe57285ec3e7d5d475922572f92a90f3b3a4f21c5/optree-0.19.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b133e1b9a30ec0bca3f875cfa68c2ce88c0b9e08b21f97f687bb669266411f4a", size = 376560, upload-time = "2026-02-23T01:55:47.58Z" }, + { url = "https://files.pythonhosted.org/packages/49/3a/ea23a29f63d8eadab4e030ebc1329906d44f631076cd1da4751388649960/optree-0.19.0-cp313-cp313t-win_arm64.whl", hash = "sha256:45184b3c73e2147b26b139f34f15c2111cde54b8893b1104a00281c3f283b209", size = 381649, upload-time = "2026-02-23T01:55:48.709Z" }, + { url = "https://files.pythonhosted.org/packages/81/46/643ea3d06c24d351888edfef387e611e550b64a14758169eaeb1d285e658/optree-0.19.0-cp314-cp314-android_24_arm64_v8a.whl", hash = "sha256:adf611b95d3159209c5d1eafcb2eb669733aaf75f9b6754f92d2d8b749192579", size = 921595, upload-time = "2026-02-23T01:55:49.779Z" }, + { url = "https://files.pythonhosted.org/packages/d7/10/8717b93d93fcc3c42a6ee0e0a1a222fe25bc749b32a9e353b039dab836ce/optree-0.19.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:bad7bb78baa83f950bb3c59b09d7ca93d30f6bb975a1a7ce8c5f3dfe65fc834d", size = 384552, upload-time = "2026-02-23T01:55:51.244Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5e/8263600ef51ae2decb3e31776c810b8c6b5f8927697046c4434b17346d9d/optree-0.19.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:73f122e8acf2f1fd346e9c08f771bc1f7394359793fe632a8e1040733bdbcbec", size = 391280, upload-time = "2026-02-23T01:55:52.681Z" }, + { url = "https://files.pythonhosted.org/packages/04/3c/40774378ebf423d7f074dfd7169f0466eb9de734f0ea5fbb368eddcb1e49/optree-0.19.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:36e426e96b3e1773e879189b12c306b58ae70052efc4087e3f14545701c7ac35", size = 421408, upload-time = "2026-02-23T01:55:54.171Z" }, + { url = "https://files.pythonhosted.org/packages/08/67/2e19866a03a6e75eb62194a5b55e1e3154ca1517478c300232b0229f8c2a/optree-0.19.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d22b947603be4768c2bd73a59652c94d63465f928b3099e9035f9c48dfc61953", size = 391712, upload-time = "2026-02-23T01:55:55.249Z" }, + { url = "https://files.pythonhosted.org/packages/45/a5/7c059f643bc34c70cc5ebe63c82ae6c33b6b746219f96757d840ea1e2dcd/optree-0.19.0-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14cc72d0c3a3c0d0b13c66801f2adc6583a01f8499fd151caaa649aabb7f99b9", size = 413471, upload-time = "2026-02-23T01:55:56.371Z" }, + { url = "https://files.pythonhosted.org/packages/67/1a/2c5041cf476fb4b2a27f6644934ac2d079e3e4491f609cba411b3d890291/optree-0.19.0-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:5369ac9584ef3fbb703699be694e84dbc78b730bd6d00c48c0c5a588617a1980", size = 477335, upload-time = "2026-02-23T01:55:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/40/a0/abcd7bc3218e1108d253d6783f3e610f0ac3d0e63b2720bff94eb4ed4689/optree-0.19.0-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80b3dca5607f04316a9dcb2bb46df2f04abf4da71731bd4a53a1559c0bee6181", size = 473739, upload-time = "2026-02-23T01:55:58.842Z" }, + { url = "https://files.pythonhosted.org/packages/82/49/7983e66210c78965bc75e386c329ec34854370d337a9ebdc4c8aede3a0b3/optree-0.19.0-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1bb36da9b95b165c7b77fd3ff0af36a30b802cd1c020da3bcdc8aa029991c4ea", size = 475459, upload-time = "2026-02-23T01:55:59.882Z" }, + { url = "https://files.pythonhosted.org/packages/fe/16/00261f20f467b9e8950a76ec1749f01359bf47f2fc3dac5e206de99835c0/optree-0.19.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb220bb85128c8de71aeffb9c38be817569e4bca413b38d5e0de11ba6471ef4a", size = 456859, upload-time = "2026-02-23T01:56:01.181Z" }, + { url = "https://files.pythonhosted.org/packages/18/31/5e78a451ba9a6ed4b0903b10080dc028e3c9b9c5797cce0ca73990fb5604/optree-0.19.0-cp314-cp314-manylinux_2_39_riscv64.whl", hash = "sha256:5d2b83a37f150f827b8b0bc2c486056f9b2203e7b0bee699d2ee96a36c090f3a", size = 418187, upload-time = "2026-02-23T01:56:02.474Z" }, + { url = "https://files.pythonhosted.org/packages/9c/03/1516cb4fdb753cd76e5dc595217f84df48372bdabe1a7fb740a5b2530f5c/optree-0.19.0-cp314-cp314-win32.whl", hash = "sha256:b0c23d50b7f6a7c80f642307c87eee841cf513239706f2f60bd9480304170054", size = 319744, upload-time = "2026-02-23T01:56:03.493Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c3/587cc9aa8d4742cd690da79460081e7d834499e07e8b2bd2ccc4c66928df/optree-0.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:ff773c852122cef6dcae68b5e252a20aaf5d2986f78e278d747e226e7829d44e", size = 345744, upload-time = "2026-02-23T01:56:04.898Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9b/c17c74ef6b85ad1a2687de8a08d1b56e3a27154b4db6c3ef1e9c2c53a96c/optree-0.19.0-cp314-cp314-win_arm64.whl", hash = "sha256:259ac2a426816d53d576c143b8dca87176af45fc8efd5dfe09db50d74a2fa0a5", size = 355307, upload-time = "2026-02-23T01:56:06.115Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4c/e881fb840cef2cead7582ee36c0e0348e66730cb2a2af1938338c72b1bf3/optree-0.19.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:428fdc8cf5dc43fa32496be6aa84fc0d8f549f899062dd9dd0aa7e3aa7f77ae9", size = 463079, upload-time = "2026-02-23T01:56:07.234Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6b/0a8538815abe28e4307dd98385d4991d36555b841b060df3295a8408b856/optree-0.19.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d1b497032b5823a09625b118fd4df84199fb0895afb78af536d638ce7645beb6", size = 431401, upload-time = "2026-02-23T01:56:08.336Z" }, + { url = "https://files.pythonhosted.org/packages/71/0c/d70a513fa93dbaa0e3e8c9b218b3805efb7083369cd14e1340bd2c0bc910/optree-0.19.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e5f05fecbca17b48451ba3455198cec9db20802c0ffbbba51eaeb421bd846a1c", size = 426111, upload-time = "2026-02-23T01:56:09.376Z" }, + { url = "https://files.pythonhosted.org/packages/77/04/bd30c9f4e694f7b6585f333208ac7894578c1fa30dc5c938f22155df7859/optree-0.19.0-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a51d0ad4e9dd089f317c94d95b7fa360e87491324e2bfa83d9c4f18dd928d4e1", size = 489872, upload-time = "2026-02-23T01:56:10.538Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/aba83aa0e8bf31c00cdd3863c2a05854ce414426a69c094ae51210b76677/optree-0.19.0-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:108ab83937d91658ef96c4f70a6c76b36038754f4779907ee8f127780575740f", size = 485172, upload-time = "2026-02-23T01:56:11.629Z" }, + { url = "https://files.pythonhosted.org/packages/e0/da/52e684c42dc29d3b4d52f2029545742ef43e151cea112d9093d2ad164f53/optree-0.19.0-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a39fdd614f46bcaf810b2bb1ed940e82b8a19e654bc325df0cc6554e25c3b7eb", size = 484506, upload-time = "2026-02-23T01:56:12.723Z" }, + { url = "https://files.pythonhosted.org/packages/2d/f7/0d41edf484e11ba5357f91dba8d85ce06ca9d840ac7d95e58b856a49b13b/optree-0.19.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfc1bcba22f182f39f1a80ae3ac511ebfa4daea62c3058edd021ce7a5cda3009", size = 468846, upload-time = "2026-02-23T01:56:13.826Z" }, + { url = "https://files.pythonhosted.org/packages/79/5e/a8f49cfd6c3ae0e59dcb1155cd49f1e5ba41889c9388360264c8369589c6/optree-0.19.0-cp314-cp314t-manylinux_2_39_riscv64.whl", hash = "sha256:afe595a052cc45d3addb6045f04a3ca7e1fb664de032ecbbb2bfd76dfe1fcb61", size = 433899, upload-time = "2026-02-23T01:56:14.889Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1b/4105e562d86b2de7eb3f240164a7dd3948e268878a9ee8925bfe1ad1da4f/optree-0.19.0-cp314-cp314t-win32.whl", hash = "sha256:b15ab972e2133e70570259386684624a17128daab7fb353a0a7435e9dd2c7354", size = 351719, upload-time = "2026-02-23T01:56:15.946Z" }, + { url = "https://files.pythonhosted.org/packages/c4/43/bbc4c7a1f37f1a0ed6efe07a5c44b2835e81d1f6ce1cca6a395a2339e60f/optree-0.19.0-cp314-cp314t-win_amd64.whl", hash = "sha256:c90c15a80c325c2c6e03e20c95350df5db4591d35e8e4a35a40d2f865c260193", size = 391937, upload-time = "2026-02-23T01:56:17.04Z" }, + { url = "https://files.pythonhosted.org/packages/62/12/6758b43dbddc6911e3225a15ca686c913959fb63c267840b54f0002be503/optree-0.19.0-cp314-cp314t-win_arm64.whl", hash = "sha256:a1e7b358df8fc4b97a05380d446e87b08eac899c1f34d9846b9afa0be7f96bc7", size = 389259, upload-time = "2026-02-23T01:56:18.237Z" }, + { url = "https://files.pythonhosted.org/packages/42/22/e680b8ef78ce0bbeb8c25d1cd1d8569b14f781fcafe5576116861b18fd77/optree-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e9bbb064c0f61f7abb4784710190ffd4ab2d61bd7b5da9723bb183e90386095", size = 392556, upload-time = "2026-02-23T01:56:19.363Z" }, + { url = "https://files.pythonhosted.org/packages/86/b5/942972724075934c65400beef12b4eca62a3cca787b9a91934bad8e4d2e3/optree-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ffcae609fa4947ffd70ed2ad5ff486cabc9999c3c9f431652319d76592cb5a5", size = 364043, upload-time = "2026-02-23T01:56:20.437Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1a/f5dbb6ae339001fd42dae39572487dbb7cff26099cb480bcdb32c43dbc45/optree-0.19.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b08c6c04fb459c983b2414b7aadb9c8c4d697ba208d0b38d130fa55649cf4f6", size = 384465, upload-time = "2026-02-23T01:56:21.617Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/ccc4dac201af5d1f0c6bbcd9d0c282820cfe548edd44f8e072fb307f51ae/optree-0.19.0-cp39-cp39-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:d6dfa10842d2007d8dccf7a1ee74a6e790b1e8a18eddd3987af4fc04b61821c7", size = 440071, upload-time = "2026-02-23T01:56:23Z" }, + { url = "https://files.pythonhosted.org/packages/61/bc/2ca98f463c7946a36ec11141f4aa10a52890c3beac267e555f7296b60051/optree-0.19.0-cp39-cp39-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f486f37a9e280b2964b4ddd9860f4be6d78fc7e35f03aa37bf4dcfe0bbba4c19", size = 442044, upload-time = "2026-02-23T01:56:24.098Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/585608cbb9641f2cd8f647245047ac1d88752e74705f2bba84bcfa4cba69/optree-0.19.0-cp39-cp39-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efbd1f4db6f63bffca48e4456295f735579a8f79d437c9ab1d1629106e809449", size = 435461, upload-time = "2026-02-23T01:56:25.384Z" }, + { url = "https://files.pythonhosted.org/packages/a3/1c/09b4619b1deff6111e5756cc2c890bf1883bc9438caddc53ecd24606e733/optree-0.19.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:693d5de8225b1787679c7a39a4883d8c02923399a9c45098a1e5adae43beb6cc", size = 419973, upload-time = "2026-02-23T01:56:26.493Z" }, + { url = "https://files.pythonhosted.org/packages/7c/1f/f8ac6543d98994880fbf49c6eaa5c56ac05dbd697992b3656fb128353bc9/optree-0.19.0-cp39-cp39-manylinux_2_39_riscv64.whl", hash = "sha256:179759eb592e5eb81e26d7b6d0177ec6574b04e711dde9445170d21ea4ffa299", size = 385159, upload-time = "2026-02-23T01:56:27.948Z" }, + { url = "https://files.pythonhosted.org/packages/3f/1a/b2711552b30acd55177d3a08456bcbb82d7dcca883f4491f479436b7a3f7/optree-0.19.0-cp39-cp39-win32.whl", hash = "sha256:e209e47d558255aafa3315b2ee21cc684e7699a369fbf74c52d5e6006f7ef0c0", size = 297993, upload-time = "2026-02-23T01:56:29.299Z" }, + { url = "https://files.pythonhosted.org/packages/07/65/33434cabb5d728ef5b961093017f47c007c8944b7030b86751f47adc9cfd/optree-0.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:7ada0574c52f5c50a54ae669eb9664896174ffcdcc796d1aa6d5fa6548107658", size = 327964, upload-time = "2026-02-23T01:56:30.698Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/a31c2b7ee12f2c7a24a8d57aeecbbcd4c513152212aa1958fff736900033/optree-0.19.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2806e7baafc3c2985b71a5202fe3bde75838c7f09c0df0aa43a2e7ae12e65587", size = 411712, upload-time = "2026-02-23T01:56:31.829Z" }, + { url = "https://files.pythonhosted.org/packages/e7/db/33b61930bf2a879d9b114e63723ccf3965404d4fb5caffad5bee1a5be61c/optree-0.19.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:c39df2d664e3d54faaabfeda3ccfc4a768fc1314578b9ef741055387e13ef3bb", size = 384082, upload-time = "2026-02-23T01:56:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/81/b1/64d04f0169feba3555434013ba2328019c2bde4d5d14600acfdcb91102a9/optree-0.19.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8bdb37b57863f18fad5601af4b94e8bc0ac145040b0ecbbc87f93d27fd6a676e", size = 403543, upload-time = "2026-02-23T01:56:33.983Z" }, + { url = "https://files.pythonhosted.org/packages/39/5c/cc9683a8c9eb54ba48739b7c394503d7df7a22b9f4bd2463ace46bbec593/optree-0.19.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b279f6d5e1cfe681b8bc8678529048081fe5ddaf49333a8bb13f3fb978dfdea", size = 444272, upload-time = "2026-02-23T01:56:35.453Z" }, + { url = "https://files.pythonhosted.org/packages/10/a7/9eb8b63ea378274517233ba0f2f0091a4cf38d92f91c9b1e6e646af31a8b/optree-0.19.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7bcb4e3e0b2b6db57e0882f0ba6b03a07d1489887e7e75d75c42b0e2be9f0019", size = 335393, upload-time = "2026-02-23T01:56:36.659Z" }, ] [[package]] @@ -3692,7 +3705,7 @@ wheels = [ [[package]] name = "posthog" -version = "7.9.3" +version = "7.9.4" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -3717,33 +3730,33 @@ dependencies = [ { name = "six", marker = "python_full_version >= '3.10'" }, { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/06/bcffcd262c861695fbaa74490b872e37d6fc41d3dcc1a43207d20525522f/posthog-7.9.3.tar.gz", hash = "sha256:55f7580265d290936ac4c112a4e2031a41743be4f90d4183ac9f85b721ff13ae", size = 172336, upload-time = "2026-02-18T22:20:24.085Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/50/5c0d9232118fdc1434c1b7bbc1a14de5b310498ede09a7e2123ae1f5f8bd/posthog-7.9.4.tar.gz", hash = "sha256:50acc94ef6267d7030575d2ff54e89e748fac2e98525ac672aeb0423160f77cf", size = 172973, upload-time = "2026-02-25T15:28:47.065Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/7e/0e06a96823fa7c11ce73920e6ff77e82445db62ac4eae0b6f211edb4c4c2/posthog-7.9.3-py3-none-any.whl", hash = "sha256:2ddcacdef6c4afb124ebfcf27d7be58388943a7e24f8d4a51a52732c9b90bad6", size = 197819, upload-time = "2026-02-18T22:20:22.015Z" }, + { url = "https://files.pythonhosted.org/packages/df/6f/794a4e94e3640282e75013ce18e65f0a01afc8d71f733664b4a272f98bce/posthog-7.9.4-py3-none-any.whl", hash = "sha256:414125ddd7a48b9c67feb24d723df1f666af41ad10f8a9a8bbaf5e3b536a2e26", size = 198651, upload-time = "2026-02-25T15:28:45.398Z" }, ] [[package]] name = "prek" -version = "0.3.3" +version = "0.3.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bf/f1/7613dc8347a33e40fc5b79eec6bc7d458d8bbc339782333d8433b665f86f/prek-0.3.3.tar.gz", hash = "sha256:117bd46ebeb39def24298ce021ccc73edcf697b81856fcff36d762dd56093f6f", size = 343697, upload-time = "2026-02-15T13:33:28.723Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/51/2324eaad93a4b144853ca1c56da76f357d3a70c7b4fd6659e972d7bb8660/prek-0.3.4.tar.gz", hash = "sha256:56a74d02d8b7dfe3c774ecfcd8c1b4e5f1e1b84369043a8003e8e3a779fce72d", size = 356633, upload-time = "2026-02-28T03:47:13.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/8b/dce13d2a3065fd1e8ffce593a0e51c4a79c3cde9c9a15dc0acc8d9d1573d/prek-0.3.3-py3-none-linux_armv6l.whl", hash = "sha256:e8629cac4bdb131be8dc6e5a337f0f76073ad34a8305f3fe2bc1ab6201ede0a4", size = 4644636, upload-time = "2026-02-15T13:33:43.609Z" }, - { url = "https://files.pythonhosted.org/packages/01/30/06ab4dbe7ce02a8ce833e92deb1d9a8e85ae9d40e33d1959a2070b7494c6/prek-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4b9e819b9e4118e1e785047b1c8bd9aec7e4d836ed034cb58b7db5bcaaf49437", size = 4651410, upload-time = "2026-02-15T13:33:34.277Z" }, - { url = "https://files.pythonhosted.org/packages/d4/fc/da3bc5cb38471e7192eda06b7a26b7c24ef83e82da2c1dbc145f2bf33640/prek-0.3.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bf29db3b5657c083eb8444c25aadeeec5167dc492e9019e188f87932f01ea50a", size = 4273163, upload-time = "2026-02-15T13:33:42.106Z" }, - { url = "https://files.pythonhosted.org/packages/b4/74/47839395091e2937beced81a5dd2f8ea9c8239c853da8611aaf78ee21a8b/prek-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:ae09736149815b26e64a9d350ca05692bab32c2afdf2939114d3211aaad68a3e", size = 4631808, upload-time = "2026-02-15T13:33:20.076Z" }, - { url = "https://files.pythonhosted.org/packages/e2/89/3f5ef6f7c928c017cb63b029349d6bc03598ab7f6979d4a770ce02575f82/prek-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:856c2b55c51703c366bb4ce81c6a91102b70573a9fc8637db2ac61c66e4565f9", size = 4548959, upload-time = "2026-02-15T13:33:36.325Z" }, - { url = "https://files.pythonhosted.org/packages/b2/18/80002c4c4475f90ca025f27739a016927a0e5d905c60612fc95da1c56ab7/prek-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3acdf13a018f685beaff0a71d4b0d2ccbab4eaa1aced6d08fd471c1a654183eb", size = 4862256, upload-time = "2026-02-15T13:33:37.754Z" }, - { url = "https://files.pythonhosted.org/packages/c5/25/648bf084c2468fa7cfcdbbe9e59956bbb31b81f36e113bc9107d80af26a7/prek-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f035667a8bd0a77b2bfa2b2e125da8cb1793949e9eeef0d8daab7f8ac8b57fe", size = 5404486, upload-time = "2026-02-15T13:33:39.239Z" }, - { url = "https://files.pythonhosted.org/packages/8b/43/261fb60a11712a327da345912bd8b338dc5a050199de800faafa278a6133/prek-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d09b2ad14332eede441d977de08eb57fb3f61226ed5fd2ceb7aadf5afcdb6794", size = 4887513, upload-time = "2026-02-15T13:33:40.702Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2c/581e757ee57ec6046b32e0ee25660fc734bc2622c319f57119c49c0cab58/prek-0.3.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:c0c3ffac16e37a9daba43a7e8316778f5809b70254be138761a8b5b9ef0df28e", size = 4632336, upload-time = "2026-02-15T13:33:25.867Z" }, - { url = "https://files.pythonhosted.org/packages/d5/d8/aa276ce5d11b77882da4102ca0cb7161095831105043ae7979bbfdcc3dc4/prek-0.3.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a3dc7720b580c07c0386e17af2486a5b4bc2f6cc57034a288a614dcbc4abe555", size = 4679370, upload-time = "2026-02-15T13:33:22.247Z" }, - { url = "https://files.pythonhosted.org/packages/70/19/9d4fa7bde428e58d9f48a74290c08736d42aeb5690dcdccc7a713e34a449/prek-0.3.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:60e0fa15da5020a03df2ee40268145ec5b88267ec2141a205317ad4df8c992d6", size = 4540316, upload-time = "2026-02-15T13:33:24.088Z" }, - { url = "https://files.pythonhosted.org/packages/25/b5/973cce29257e0b47b16cc9b4c162772ea01dbb7c080791ea0c068e106e05/prek-0.3.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:553515da9586d9624dc42db32b744fdb91cf62b053753037a0cadb3c2d8d82a2", size = 4724566, upload-time = "2026-02-15T13:33:29.832Z" }, - { url = "https://files.pythonhosted.org/packages/d6/8b/ad8b2658895a8ed2b0bc630bf38686fe38b7ff2c619c58953a80e4de3048/prek-0.3.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9512cf370e0d1496503463a4a65621480efb41b487841a9e9ff1661edf14b238", size = 4995072, upload-time = "2026-02-15T13:33:27.417Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b7/0540c101c00882adb9d30319d22d8f879413598269ecc60235e41875efd4/prek-0.3.3-py3-none-win32.whl", hash = "sha256:b2b328c7c6dc14ccdc79785348589aa39850f47baff33d8f199f2dee80ff774c", size = 4293144, upload-time = "2026-02-15T13:33:46.013Z" }, - { url = "https://files.pythonhosted.org/packages/97/c7/e4f11da653093040efba2d835aa0995d78940aea30887287aeaebe34a545/prek-0.3.3-py3-none-win_amd64.whl", hash = "sha256:3d7d7acf7ca8db65ba0943c52326c898f84bab0b1c26a35c87e0d177f574ca5f", size = 4652761, upload-time = "2026-02-15T13:33:32.962Z" }, - { url = "https://files.pythonhosted.org/packages/11/e4/d99dec54c6a5fb2763488bff6078166383169a93f3af27d2edae88379a39/prek-0.3.3-py3-none-win_arm64.whl", hash = "sha256:8aa87ee7628cd74482c0dd6537a3def1f162b25cd642d78b1b35dd3e81817f60", size = 4367520, upload-time = "2026-02-15T13:33:31.664Z" }, + { url = "https://files.pythonhosted.org/packages/09/20/1a964cb72582307c2f1dc7f583caab90f42810ad41551e5220592406a4c3/prek-0.3.4-py3-none-linux_armv6l.whl", hash = "sha256:c35192d6e23fe7406bd2f333d1c7dab1a4b34ab9289789f453170f33550aa74d", size = 4641915, upload-time = "2026-02-28T03:47:03.772Z" }, + { url = "https://files.pythonhosted.org/packages/c5/cb/4a21f37102bac37e415b61818344aa85de8d29a581253afa7db8c08d5a33/prek-0.3.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f784d78de72a8bbe58a5fe7bde787c364ae88f0aff5222c5c5c7287876c510a", size = 4649166, upload-time = "2026-02-28T03:47:06.164Z" }, + { url = "https://files.pythonhosted.org/packages/85/9c/a7c0d117a098d57931428bdb60fcb796e0ebc0478c59288017a2e22eca96/prek-0.3.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50a43f522625e8c968e8c9992accf9e29017abad6c782d6d176b73145ad680b7", size = 4274422, upload-time = "2026-02-28T03:46:59.356Z" }, + { url = "https://files.pythonhosted.org/packages/59/84/81d06df1724d09266df97599a02543d82fde7dfaefd192f09d9b2ccb092f/prek-0.3.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:4bbb1d3912a88935f35c6ba4466b4242732e3e3a8c608623c708e83cea85de00", size = 4629873, upload-time = "2026-02-28T03:46:56.419Z" }, + { url = "https://files.pythonhosted.org/packages/09/cd/bb0aefa25cfacd8dbced75b9a9d9945707707867fa5635fb69ae1bbc2d88/prek-0.3.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ca4d4134db8f6e8de3c418317becdf428957e3cab271807f475318105fd46d04", size = 4552507, upload-time = "2026-02-28T03:47:05.004Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c0/578a7af4861afb64ec81c03bfdcc1bb3341bb61f2fff8a094ecf13987a56/prek-0.3.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fb6395f6eb76133bb1e11fc718db8144522466cdc2e541d05e7813d1bbcae7d", size = 4865929, upload-time = "2026-02-28T03:47:09.231Z" }, + { url = "https://files.pythonhosted.org/packages/fc/48/f169406590028f7698ef2e1ff5bffd92ca05e017636c1163a2f5ef0f8275/prek-0.3.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae17813239ddcb4ae7b38418de4d49afff740f48f8e0556029c96f58e350412", size = 5390286, upload-time = "2026-02-28T03:47:10.796Z" }, + { url = "https://files.pythonhosted.org/packages/05/c5/98a73fec052059c3ae06ce105bef67caca42334c56d84e9ef75df72ba152/prek-0.3.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a621a690d9c127afc3d21c275030d364d1fbef3296c095068d3ae80a59546e", size = 4891028, upload-time = "2026-02-28T03:47:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/a3/b4/029966e35e59b59c142be7e1d2208ad261709ac1a66aa4a3ce33c5b9f91f/prek-0.3.4-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:d978c31bc3b1f0b3d58895b7c6ac26f077e0ea846da54f46aeee4c7088b1b105", size = 4633986, upload-time = "2026-02-28T03:47:14.351Z" }, + { url = "https://files.pythonhosted.org/packages/1d/27/d122802555745b6940c99fcb41496001c192ddcdf56ec947ec10a0298e05/prek-0.3.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8e089a030f0a023c22a4bb2ec4ff3fcc153585d701cff67acbfca2f37e173ae", size = 4680722, upload-time = "2026-02-28T03:47:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/34/40/92318c96b3a67b4e62ed82741016ede34d97ea9579d3cc1332b167632222/prek-0.3.4-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:8060c72b764f0b88112616763da9dd3a7c293e010f8520b74079893096160a2f", size = 4535623, upload-time = "2026-02-28T03:46:52.221Z" }, + { url = "https://files.pythonhosted.org/packages/df/f5/6b383d94e722637da4926b4f609d36fe432827bb6f035ad46ee02bde66b6/prek-0.3.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:65b23268456b5a763278d4e1ec532f2df33918f13ded85869a1ddff761eb9697", size = 4729879, upload-time = "2026-02-28T03:46:57.886Z" }, + { url = "https://files.pythonhosted.org/packages/79/f8/fdc705b807d813fd713ffa4f67f96741542ed1dafbb221206078c06f3df4/prek-0.3.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:3975c61139c7b3200e38dc3955e050b0f2615701d3deb9715696a902e850509e", size = 5001569, upload-time = "2026-02-28T03:47:00.892Z" }, + { url = "https://files.pythonhosted.org/packages/84/92/b007a41f58e8192a1e611a21b396ad870d51d7873b7af12068ebae7fc15f/prek-0.3.4-py3-none-win32.whl", hash = "sha256:37449ae82f4dc08b72e542401e3d7318f05d1163e87c31ab260a40f425d6516e", size = 4297057, upload-time = "2026-02-28T03:47:02.219Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dc/bcb02de9b11461e8e0c7d3c8fdf8cfa15ac6efe73472a4375549ba5defd2/prek-0.3.4-py3-none-win_amd64.whl", hash = "sha256:60e9aa86ca65de963510ae28c5d94b9d7a97bcbaa6e4cdb5bf5083ed4c45dc71", size = 4655174, upload-time = "2026-02-28T03:46:53.749Z" }, + { url = "https://files.pythonhosted.org/packages/0b/86/98f5598569f4cd3de7161e266fab6a8981e65555f79d4704810c1502ad0a/prek-0.3.4-py3-none-win_arm64.whl", hash = "sha256:486bdae8f4512d3b4f6eb61b83e5b7595da2adca385af4b2b7823c0ab38d1827", size = 4367817, upload-time = "2026-02-28T03:46:55.264Z" }, ] [[package]] @@ -3760,19 +3773,17 @@ wheels = [ [[package]] name = "protobuf" -version = "6.33.5" +version = "7.34.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/00/04a2ab36b70a52d0356852979e08b44edde0435f2115dc66e25f2100f3ab/protobuf-7.34.0.tar.gz", hash = "sha256:3871a3df67c710aaf7bb8d214cc997342e63ceebd940c8c7fc65c9b3d697591a", size = 454726, upload-time = "2026-02-27T00:30:25.421Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, - { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, - { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, - { url = "https://files.pythonhosted.org/packages/08/60/84d5f6dcda9165e4d6a56ac8433c9f40a8906bf2966150b8a0cfde097d78/protobuf-6.33.5-cp39-cp39-win32.whl", hash = "sha256:a3157e62729aafb8df6da2c03aa5c0937c7266c626ce11a278b6eb7963c4e37c", size = 425892, upload-time = "2026-01-29T21:51:30.382Z" }, - { url = "https://files.pythonhosted.org/packages/68/19/33d7dc2dc84439587fa1e21e1c0026c01ad2af0a62f58fd54002a7546307/protobuf-6.33.5-cp39-cp39-win_amd64.whl", hash = "sha256:8f04fa32763dcdb4973d537d6b54e615cc61108c7cb38fe59310c3192d29510a", size = 437137, upload-time = "2026-01-29T21:51:31.456Z" }, - { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, + { url = "https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e329966799f2c271d5e05e236459fe1cbfdb8755aaa3b0914fa60947ddea408", size = 429248, upload-time = "2026-02-27T00:30:14.924Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/b029bbbc61e8937545da5b79aa405ab2d9cf307a728f8c9459ad60d7a481/protobuf-7.34.0-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:9d7a5005fb96f3c1e64f397f91500b0eb371b28da81296ae73a6b08a5b76cdd6", size = 325753, upload-time = "2026-02-27T00:30:17.247Z" }, + { url = "https://files.pythonhosted.org/packages/cc/79/09f02671eb75b251c5550a1c48e7b3d4b0623efd7c95a15a50f6f9fc1e2e/protobuf-7.34.0-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:4a72a8ec94e7a9f7ef7fe818ed26d073305f347f8b3b5ba31e22f81fd85fca02", size = 340200, upload-time = "2026-02-27T00:30:18.672Z" }, + { url = "https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:964cf977e07f479c0697964e83deda72bcbc75c3badab506fb061b352d991b01", size = 324268, upload-time = "2026-02-27T00:30:20.088Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3e/38ff2ddee5cc946f575c9d8cc822e34bde205cf61acf8099ad88ef19d7d2/protobuf-7.34.0-cp310-abi3-win32.whl", hash = "sha256:f791ec509707a1d91bd02e07df157e75e4fb9fbdad12a81b7396201ec244e2e3", size = 426628, upload-time = "2026-02-27T00:30:21.555Z" }, + { url = "https://files.pythonhosted.org/packages/cb/71/7c32eaf34a61a1bae1b62a2ac4ffe09b8d1bb0cf93ad505f42040023db89/protobuf-7.34.0-cp310-abi3-win_amd64.whl", hash = "sha256:9f9079f1dde4e32342ecbd1c118d76367090d4aaa19da78230c38101c5b3dd40", size = 437901, upload-time = "2026-02-27T00:30:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/a4/e7/14dc9366696dcb53a413449881743426ed289d687bcf3d5aee4726c32ebb/protobuf-7.34.0-py3-none-any.whl", hash = "sha256:e3b914dd77fa33fa06ab2baa97937746ab25695f389869afdf03e81f34e45dc7", size = 170716, upload-time = "2026-02-27T00:30:23.994Z" }, ] [[package]] @@ -4789,27 +4800,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" }, - { url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" }, - { url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" }, - { url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" }, - { url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" }, - { url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" }, - { url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" }, - { url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" }, - { url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" }, - { url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" }, - { url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" }, - { url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" }, - { url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" }, +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" }, + { url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" }, + { url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" }, + { url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" }, + { url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" }, + { url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" }, + { url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" }, + { url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" }, + { url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" }, + { url = "https://files.pythonhosted.org/packages/1b/fa/2ef715a1cd329ef47c1a050e10dee91a9054b7ce2fcfdd6a06d139afb7ec/ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22", size = 10506664, upload-time = "2026-02-26T20:03:50.56Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a8/c688ef7e29983976820d18710f955751d9f4d4eb69df658af3d006e2ba3e/ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f", size = 11651048, upload-time = "2026-02-26T20:04:17.191Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0a/9e1be9035b37448ce2e68c978f0591da94389ade5a5abafa4cf99985d1b2/ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453", size = 10966776, upload-time = "2026-02-26T20:03:56.908Z" }, ] [[package]] @@ -4955,7 +4966,7 @@ wheels = [ [[package]] name = "scipy" -version = "1.17.0" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -4974,81 +4985,81 @@ resolution-markers = [ dependencies = [ { name = "numpy", version = "2.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/4b/c89c131aa87cad2b77a54eb0fb94d633a842420fa7e919dc2f922037c3d8/scipy-1.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd", size = 31381316, upload-time = "2026-01-10T21:24:33.42Z" }, - { url = "https://files.pythonhosted.org/packages/5e/5f/a6b38f79a07d74989224d5f11b55267714707582908a5f1ae854cf9a9b84/scipy-1.17.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558", size = 27966760, upload-time = "2026-01-10T21:24:38.911Z" }, - { url = "https://files.pythonhosted.org/packages/c1/20/095ad24e031ee8ed3c5975954d816b8e7e2abd731e04f8be573de8740885/scipy-1.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7", size = 20138701, upload-time = "2026-01-10T21:24:43.249Z" }, - { url = "https://files.pythonhosted.org/packages/89/11/4aad2b3858d0337756f3323f8960755704e530b27eb2a94386c970c32cbe/scipy-1.17.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6", size = 22480574, upload-time = "2026-01-10T21:24:47.266Z" }, - { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, - { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, - { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, - { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, - { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, - { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, - { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, - { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, - { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, - { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, - { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, - { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, - { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, - { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, - { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, - { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, - { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, - { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, - { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, - { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, - { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, - { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, - { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, - { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, - { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, - { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, - { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, - { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, - { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, - { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, - { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, - { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, - { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, - { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, - { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, - { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, - { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, - { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, - { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, - { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, - { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, - { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, - { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, - { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, - { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, - { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, - { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, - { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7a/97/5a3609c4f8d58b039179648e62dd220f89864f56f7357f5d4f45c29eb2cc/scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0", size = 30573822, upload-time = "2026-02-23T00:26:24.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/75/b4ce781849931fef6fd529afa6b63711d5a733065722d0c3e2724af9e40a/scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec", size = 31613675, upload-time = "2026-02-23T00:16:00.13Z" }, + { url = "https://files.pythonhosted.org/packages/f7/58/bccc2861b305abdd1b8663d6130c0b3d7cc22e8d86663edbc8401bfd40d4/scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696", size = 28162057, upload-time = "2026-02-23T00:16:09.456Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ee/18146b7757ed4976276b9c9819108adbc73c5aad636e5353e20746b73069/scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee", size = 20334032, upload-time = "2026-02-23T00:16:17.358Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e6/cef1cf3557f0c54954198554a10016b6a03b2ec9e22a4e1df734936bd99c/scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd", size = 22709533, upload-time = "2026-02-23T00:16:25.791Z" }, + { url = "https://files.pythonhosted.org/packages/4d/60/8804678875fc59362b0fb759ab3ecce1f09c10a735680318ac30da8cd76b/scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c", size = 33062057, upload-time = "2026-02-23T00:16:36.931Z" }, + { url = "https://files.pythonhosted.org/packages/09/7d/af933f0f6e0767995b4e2d705a0665e454d1c19402aa7e895de3951ebb04/scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4", size = 35349300, upload-time = "2026-02-23T00:16:49.108Z" }, + { url = "https://files.pythonhosted.org/packages/b4/3d/7ccbbdcbb54c8fdc20d3b6930137c782a163fa626f0aef920349873421ba/scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444", size = 35127333, upload-time = "2026-02-23T00:17:01.293Z" }, + { url = "https://files.pythonhosted.org/packages/e8/19/f926cb11c42b15ba08e3a71e376d816ac08614f769b4f47e06c3580c836a/scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082", size = 37741314, upload-time = "2026-02-23T00:17:12.576Z" }, + { url = "https://files.pythonhosted.org/packages/95/da/0d1df507cf574b3f224ccc3d45244c9a1d732c81dcb26b1e8a766ae271a8/scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff", size = 36607512, upload-time = "2026-02-23T00:17:23.424Z" }, + { url = "https://files.pythonhosted.org/packages/68/7f/bdd79ceaad24b671543ffe0ef61ed8e659440eb683b66f033454dcee90eb/scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d", size = 24599248, upload-time = "2026-02-23T00:17:34.561Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/b992b488d6f299dbe3f11a20b24d3dda3d46f1a635ede1c46b5b17a7b163/scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8", size = 31610954, upload-time = "2026-02-23T00:17:49.855Z" }, + { url = "https://files.pythonhosted.org/packages/b2/02/cf107b01494c19dc100f1d0b7ac3cc08666e96ba2d64db7626066cee895e/scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76", size = 28172662, upload-time = "2026-02-23T00:18:01.64Z" }, + { url = "https://files.pythonhosted.org/packages/cf/a9/599c28631bad314d219cf9ffd40e985b24d603fc8a2f4ccc5ae8419a535b/scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086", size = 20344366, upload-time = "2026-02-23T00:18:12.015Z" }, + { url = "https://files.pythonhosted.org/packages/35/f5/906eda513271c8deb5af284e5ef0206d17a96239af79f9fa0aebfe0e36b4/scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b", size = 22704017, upload-time = "2026-02-23T00:18:21.502Z" }, + { url = "https://files.pythonhosted.org/packages/da/34/16f10e3042d2f1d6b66e0428308ab52224b6a23049cb2f5c1756f713815f/scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21", size = 32927842, upload-time = "2026-02-23T00:18:35.367Z" }, + { url = "https://files.pythonhosted.org/packages/01/8e/1e35281b8ab6d5d72ebe9911edcdffa3f36b04ed9d51dec6dd140396e220/scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458", size = 35235890, upload-time = "2026-02-23T00:18:49.188Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/9d7f4c88bea6e0d5a4f1bc0506a53a00e9fcb198de372bfe4d3652cef482/scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb", size = 35003557, upload-time = "2026-02-23T00:18:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/7698add8f276dbab7a9de9fb6b0e02fc13ee61d51c7c3f85ac28b65e1239/scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea", size = 37625856, upload-time = "2026-02-23T00:19:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/dc08d77fbf3d87d3ee27f6a0c6dcce1de5829a64f2eae85a0ecc1f0daa73/scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87", size = 36549682, upload-time = "2026-02-23T00:19:07.67Z" }, + { url = "https://files.pythonhosted.org/packages/bc/98/fe9ae9ffb3b54b62559f52dedaebe204b408db8109a8c66fdd04869e6424/scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3", size = 24547340, upload-time = "2026-02-23T00:19:12.024Z" }, + { url = "https://files.pythonhosted.org/packages/76/27/07ee1b57b65e92645f219b37148a7e7928b82e2b5dbeccecb4dff7c64f0b/scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c", size = 31590199, upload-time = "2026-02-23T00:19:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ae/db19f8ab842e9b724bf5dbb7db29302a91f1e55bc4d04b1025d6d605a2c5/scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f", size = 28154001, upload-time = "2026-02-23T00:19:22.241Z" }, + { url = "https://files.pythonhosted.org/packages/5b/58/3ce96251560107b381cbd6e8413c483bbb1228a6b919fa8652b0d4090e7f/scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d", size = 20325719, upload-time = "2026-02-23T00:19:26.329Z" }, + { url = "https://files.pythonhosted.org/packages/b2/83/15087d945e0e4d48ce2377498abf5ad171ae013232ae31d06f336e64c999/scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b", size = 22683595, upload-time = "2026-02-23T00:19:30.304Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e0/e58fbde4a1a594c8be8114eb4aac1a55bcd6587047efc18a61eb1f5c0d30/scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6", size = 32896429, upload-time = "2026-02-23T00:19:35.536Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5f/f17563f28ff03c7b6799c50d01d5d856a1d55f2676f537ca8d28c7f627cd/scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464", size = 35203952, upload-time = "2026-02-23T00:19:42.259Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a5/9afd17de24f657fdfe4df9a3f1ea049b39aef7c06000c13db1530d81ccca/scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950", size = 34979063, upload-time = "2026-02-23T00:19:47.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/88b1d2384b424bf7c924f2038c1c409f8d88bb2a8d49d097861dd64a57b2/scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369", size = 37598449, upload-time = "2026-02-23T00:19:53.238Z" }, + { url = "https://files.pythonhosted.org/packages/35/e5/d6d0e51fc888f692a35134336866341c08655d92614f492c6860dc45bb2c/scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448", size = 36510943, upload-time = "2026-02-23T00:20:50.89Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fd/3be73c564e2a01e690e19cc618811540ba5354c67c8680dce3281123fb79/scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87", size = 24545621, upload-time = "2026-02-23T00:20:55.871Z" }, + { url = "https://files.pythonhosted.org/packages/6f/6b/17787db8b8114933a66f9dcc479a8272e4b4da75fe03b0c282f7b0ade8cd/scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a", size = 31936708, upload-time = "2026-02-23T00:19:58.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/2e/524405c2b6392765ab1e2b722a41d5da33dc5c7b7278184a8ad29b6cb206/scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0", size = 28570135, upload-time = "2026-02-23T00:20:03.934Z" }, + { url = "https://files.pythonhosted.org/packages/fd/c3/5bd7199f4ea8556c0c8e39f04ccb014ac37d1468e6cfa6a95c6b3562b76e/scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce", size = 20741977, upload-time = "2026-02-23T00:20:07.935Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b8/8ccd9b766ad14c78386599708eb745f6b44f08400a5fd0ade7cf89b6fc93/scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6", size = 23029601, upload-time = "2026-02-23T00:20:12.161Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a0/3cb6f4d2fb3e17428ad2880333cac878909ad1a89f678527b5328b93c1d4/scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e", size = 33019667, upload-time = "2026-02-23T00:20:17.208Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c3/2d834a5ac7bf3a0c806ad1508efc02dda3c8c61472a56132d7894c312dea/scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475", size = 35264159, upload-time = "2026-02-23T00:20:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/77/d3ed4becfdbd217c52062fafe35a72388d1bd82c2d0ba5ca19d6fcc93e11/scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50", size = 35102771, upload-time = "2026-02-23T00:20:28.636Z" }, + { url = "https://files.pythonhosted.org/packages/bd/12/d19da97efde68ca1ee5538bb261d5d2c062f0c055575128f11a2730e3ac1/scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca", size = 37665910, upload-time = "2026-02-23T00:20:34.743Z" }, + { url = "https://files.pythonhosted.org/packages/06/1c/1172a88d507a4baaf72c5a09bb6c018fe2ae0ab622e5830b703a46cc9e44/scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c", size = 36562980, upload-time = "2026-02-23T00:20:40.575Z" }, + { url = "https://files.pythonhosted.org/packages/70/b0/eb757336e5a76dfa7911f63252e3b7d1de00935d7705cf772db5b45ec238/scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49", size = 24856543, upload-time = "2026-02-23T00:20:45.313Z" }, + { url = "https://files.pythonhosted.org/packages/cf/83/333afb452af6f0fd70414dc04f898647ee1423979ce02efa75c3b0f2c28e/scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717", size = 31584510, upload-time = "2026-02-23T00:21:01.015Z" }, + { url = "https://files.pythonhosted.org/packages/ed/a6/d05a85fd51daeb2e4ea71d102f15b34fedca8e931af02594193ae4fd25f7/scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9", size = 28170131, upload-time = "2026-02-23T00:21:05.888Z" }, + { url = "https://files.pythonhosted.org/packages/db/7b/8624a203326675d7746a254083a187398090a179335b2e4a20e2ddc46e83/scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b", size = 20342032, upload-time = "2026-02-23T00:21:09.904Z" }, + { url = "https://files.pythonhosted.org/packages/c9/35/2c342897c00775d688d8ff3987aced3426858fd89d5a0e26e020b660b301/scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866", size = 22678766, upload-time = "2026-02-23T00:21:14.313Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f2/7cdb8eb308a1a6ae1e19f945913c82c23c0c442a462a46480ce487fdc0ac/scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350", size = 32957007, upload-time = "2026-02-23T00:21:19.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2e/7eea398450457ecb54e18e9d10110993fa65561c4f3add5e8eccd2b9cd41/scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118", size = 35221333, upload-time = "2026-02-23T00:21:25.278Z" }, + { url = "https://files.pythonhosted.org/packages/d9/77/5b8509d03b77f093a0d52e606d3c4f79e8b06d1d38c441dacb1e26cacf46/scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068", size = 35042066, upload-time = "2026-02-23T00:21:31.358Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/18f80fb99df40b4070328d5ae5c596f2f00fffb50167e31439e932f29e7d/scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118", size = 37612763, upload-time = "2026-02-23T00:21:37.247Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/f0e8ea762a764a9dc52aa7dabcfad51a354819de1f0d4652b6a1122424d6/scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19", size = 37290984, upload-time = "2026-02-23T00:22:35.023Z" }, + { url = "https://files.pythonhosted.org/packages/7c/56/fe201e3b0f93d1a8bcf75d3379affd228a63d7e2d80ab45467a74b494947/scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293", size = 25192877, upload-time = "2026-02-23T00:22:39.798Z" }, + { url = "https://files.pythonhosted.org/packages/96/ad/f8c414e121f82e02d76f310f16db9899c4fcde36710329502a6b2a3c0392/scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6", size = 31949750, upload-time = "2026-02-23T00:21:42.289Z" }, + { url = "https://files.pythonhosted.org/packages/7c/b0/c741e8865d61b67c81e255f4f0a832846c064e426636cd7de84e74d209be/scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1", size = 28585858, upload-time = "2026-02-23T00:21:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1b/3985219c6177866628fa7c2595bfd23f193ceebbe472c98a08824b9466ff/scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39", size = 20757723, upload-time = "2026-02-23T00:21:52.039Z" }, + { url = "https://files.pythonhosted.org/packages/c0/19/2a04aa25050d656d6f7b9e7b685cc83d6957fb101665bfd9369ca6534563/scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca", size = 23043098, upload-time = "2026-02-23T00:21:56.185Z" }, + { url = "https://files.pythonhosted.org/packages/86/f1/3383beb9b5d0dbddd030335bf8a8b32d4317185efe495374f134d8be6cce/scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad", size = 33030397, upload-time = "2026-02-23T00:22:01.404Z" }, + { url = "https://files.pythonhosted.org/packages/41/68/8f21e8a65a5a03f25a79165ec9d2b28c00e66dc80546cf5eb803aeeff35b/scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a", size = 35281163, upload-time = "2026-02-23T00:22:07.024Z" }, + { url = "https://files.pythonhosted.org/packages/84/8d/c8a5e19479554007a5632ed7529e665c315ae7492b4f946b0deb39870e39/scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4", size = 35116291, upload-time = "2026-02-23T00:22:12.585Z" }, + { url = "https://files.pythonhosted.org/packages/52/52/e57eceff0e342a1f50e274264ed47497b59e6a4e3118808ee58ddda7b74a/scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2", size = 37682317, upload-time = "2026-02-23T00:22:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/11/2f/b29eafe4a3fbc3d6de9662b36e028d5f039e72d345e05c250e121a230dd4/scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484", size = 37345327, upload-time = "2026-02-23T00:22:24.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/39/338d9219c4e87f3e708f18857ecd24d22a0c3094752393319553096b98af/scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21", size = 25489165, upload-time = "2026-02-23T00:22:29.563Z" }, ] [[package]] name = "sentry-sdk" -version = "2.53.0" +version = "2.54.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d3/06/66c8b705179bc54087845f28fd1b72f83751b6e9a195628e2e9af9926505/sentry_sdk-2.53.0.tar.gz", hash = "sha256:6520ef2c4acd823f28efc55e43eb6ce2e6d9f954a95a3aa96b6fd14871e92b77", size = 412369, upload-time = "2026-02-16T11:11:14.743Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/e9/2e3a46c304e7fa21eaa70612f60354e32699c7102eb961f67448e222ad7c/sentry_sdk-2.54.0.tar.gz", hash = "sha256:2620c2575128d009b11b20f7feb81e4e4e8ae08ec1d36cbc845705060b45cc1b", size = 413813, upload-time = "2026-03-02T15:12:41.355Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/d4/2fdf854bc3b9c7f55219678f812600a20a138af2dd847d99004994eada8f/sentry_sdk-2.53.0-py2.py3-none-any.whl", hash = "sha256:46e1ed8d84355ae54406c924f6b290c3d61f4048625989a723fd622aab838899", size = 437908, upload-time = "2026-02-16T11:11:13.227Z" }, + { url = "https://files.pythonhosted.org/packages/53/39/be412cc86bc6247b8f69e9383d7950711bd86f8d0a4a4b0fe8fad685bc21/sentry_sdk-2.54.0-py2.py3-none-any.whl", hash = "sha256:fd74e0e281dcda63afff095d23ebcd6e97006102cdc8e78a29f19ecdf796a0de", size = 439198, upload-time = "2026-03-02T15:12:39.546Z" }, ] [[package]] @@ -5338,7 +5349,7 @@ resolution-markers = [ ] dependencies = [ { name = "cuda-bindings", marker = "python_full_version >= '3.10' and platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "filelock", version = "3.24.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "filelock", version = "3.25.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "jinja2", marker = "python_full_version >= '3.10'" }, { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, @@ -5641,26 +5652,26 @@ wheels = [ [[package]] name = "ty" -version = "0.0.17" +version = "0.0.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/c3/41ae6346443eedb65b96761abfab890a48ce2aa5a8a27af69c5c5d99064d/ty-0.0.17.tar.gz", hash = "sha256:847ed6c120913e280bf9b54d8eaa7a1049708acb8824ad234e71498e8ad09f97", size = 5167209, upload-time = "2026-02-13T13:26:36.835Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/95/8de69bb98417227b01f1b1d743c819d6456c9fd140255b6124b05b17dfd6/ty-0.0.20.tar.gz", hash = "sha256:ebba6be7974c14efbb2a9adda6ac59848f880d7259f089dfa72a093039f1dcc6", size = 5262529, upload-time = "2026-03-02T15:51:36.587Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/01/0ef15c22a1c54b0f728ceff3f62d478dbf8b0dcf8ff7b80b954f79584f3e/ty-0.0.17-py3-none-linux_armv6l.whl", hash = "sha256:64a9a16555cc8867d35c2647c2f1afbd3cae55f68fd95283a574d1bb04fe93e0", size = 10192793, upload-time = "2026-02-13T13:27:13.943Z" }, - { url = "https://files.pythonhosted.org/packages/0f/2c/f4c322d9cded56edc016b1092c14b95cf58c8a33b4787316ea752bb9418e/ty-0.0.17-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:eb2dbd8acd5c5a55f4af0d479523e7c7265a88542efe73ed3d696eb1ba7b6454", size = 10051977, upload-time = "2026-02-13T13:26:57.741Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a5/43746c1ff81e784f5fc303afc61fe5bcd85d0fcf3ef65cb2cef78c7486c7/ty-0.0.17-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f18f5fd927bc628deb9ea2df40f06b5f79c5ccf355db732025a3e8e7152801f6", size = 9564639, upload-time = "2026-02-13T13:26:42.781Z" }, - { url = "https://files.pythonhosted.org/packages/d6/b8/280b04e14a9c0474af574f929fba2398b5e1c123c1e7735893b4cd73d13c/ty-0.0.17-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5383814d1d7a5cc53b3b07661856bab04bb2aac7a677c8d33c55169acdaa83df", size = 10061204, upload-time = "2026-02-13T13:27:00.152Z" }, - { url = "https://files.pythonhosted.org/packages/2a/d7/493e1607d8dfe48288d8a768a2adc38ee27ef50e57f0af41ff273987cda0/ty-0.0.17-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c20423b8744b484f93e7bf2ef8a9724bca2657873593f9f41d08bd9f83444c9", size = 10013116, upload-time = "2026-02-13T13:26:34.543Z" }, - { url = "https://files.pythonhosted.org/packages/80/ef/22f3ed401520afac90dbdf1f9b8b7755d85b0d5c35c1cb35cf5bd11b59c2/ty-0.0.17-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6f5b1aba97db9af86517b911674b02f5bc310750485dc47603a105bd0e83ddd", size = 10533623, upload-time = "2026-02-13T13:26:31.449Z" }, - { url = "https://files.pythonhosted.org/packages/75/ce/744b15279a11ac7138832e3a55595706b4a8a209c9f878e3ab8e571d9032/ty-0.0.17-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:488bce1a9bea80b851a97cd34c4d2ffcd69593d6c3f54a72ae02e5c6e47f3d0c", size = 11069750, upload-time = "2026-02-13T13:26:48.638Z" }, - { url = "https://files.pythonhosted.org/packages/f2/be/1133c91f15a0e00d466c24f80df486d630d95d1b2af63296941f7473812f/ty-0.0.17-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8df66b91ec84239420985ec215e7f7549bfda2ac036a3b3c065f119d1c06825a", size = 10870862, upload-time = "2026-02-13T13:26:54.715Z" }, - { url = "https://files.pythonhosted.org/packages/3e/4a/a2ed209ef215b62b2d3246e07e833081e07d913adf7e0448fc204be443d6/ty-0.0.17-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:002139e807c53002790dfefe6e2f45ab0e04012e76db3d7c8286f96ec121af8f", size = 10628118, upload-time = "2026-02-13T13:26:45.439Z" }, - { url = "https://files.pythonhosted.org/packages/b3/0c/87476004cb5228e9719b98afffad82c3ef1f84334bde8527bcacba7b18cb/ty-0.0.17-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6c4e01f05ce82e5d489ab3900ca0899a56c4ccb52659453780c83e5b19e2b64c", size = 10038185, upload-time = "2026-02-13T13:27:02.693Z" }, - { url = "https://files.pythonhosted.org/packages/46/4b/98f0b3ba9aef53c1f0305519536967a4aa793a69ed72677b0a625c5313ac/ty-0.0.17-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2b226dd1e99c0d2152d218c7e440150d1a47ce3c431871f0efa073bbf899e881", size = 10047644, upload-time = "2026-02-13T13:27:05.474Z" }, - { url = "https://files.pythonhosted.org/packages/93/e0/06737bb80aa1a9103b8651d2eb691a7e53f1ed54111152be25f4a02745db/ty-0.0.17-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8b11f1da7859e0ad69e84b3c5ef9a7b055ceed376a432fad44231bdfc48061c2", size = 10231140, upload-time = "2026-02-13T13:27:10.844Z" }, - { url = "https://files.pythonhosted.org/packages/7c/79/e2a606bd8852383ba9abfdd578f4a227bd18504145381a10a5f886b4e751/ty-0.0.17-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c04e196809ff570559054d3e011425fd7c04161529eb551b3625654e5f2434cb", size = 10718344, upload-time = "2026-02-13T13:26:51.66Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2d/2663984ac11de6d78f74432b8b14ba64d170b45194312852b7543cf7fd56/ty-0.0.17-py3-none-win32.whl", hash = "sha256:305b6ed150b2740d00a817b193373d21f0767e10f94ac47abfc3b2e5a5aec809", size = 9672932, upload-time = "2026-02-13T13:27:08.522Z" }, - { url = "https://files.pythonhosted.org/packages/de/b5/39be78f30b31ee9f5a585969930c7248354db90494ff5e3d0756560fb731/ty-0.0.17-py3-none-win_amd64.whl", hash = "sha256:531828267527aee7a63e972f54e5eee21d9281b72baf18e5c2850c6b862add83", size = 10542138, upload-time = "2026-02-13T13:27:17.084Z" }, - { url = "https://files.pythonhosted.org/packages/40/b7/f875c729c5d0079640c75bad2c7e5d43edc90f16ba242f28a11966df8f65/ty-0.0.17-py3-none-win_arm64.whl", hash = "sha256:de9810234c0c8d75073457e10a84825b9cd72e6629826b7f01c7a0b266ae25b1", size = 10023068, upload-time = "2026-02-13T13:26:39.637Z" }, + { url = "https://files.pythonhosted.org/packages/0b/2c/718abe48393e521bf852cd6b0f984766869b09c258d6e38a118768a91731/ty-0.0.20-py3-none-linux_armv6l.whl", hash = "sha256:7cc12769c169c9709a829c2248ee2826b7aae82e92caeac813d856f07c021eae", size = 10333656, upload-time = "2026-03-02T15:51:56.461Z" }, + { url = "https://files.pythonhosted.org/packages/41/0e/eb1c4cc4a12862e2327b72657bcebb10b7d9f17046f1bdcd6457a0211615/ty-0.0.20-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b777c1bf13bc0a95985ebb8a324b8668a4a9b2e514dde5ccf09e4d55d2ff232", size = 10168505, upload-time = "2026-03-02T15:51:51.895Z" }, + { url = "https://files.pythonhosted.org/packages/89/7f/10230798e673f0dd3094dfd16e43bfd90e9494e7af6e8e7db516fb431ddf/ty-0.0.20-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b2a4a7db48bf8cba30365001bc2cad7fd13c1a5aacdd704cc4b7925de8ca5eb3", size = 9678510, upload-time = "2026-03-02T15:51:48.451Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/59d9159577494edd1728f7db77b51bb07884bd21384f517963114e3ab5f6/ty-0.0.20-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6846427b8b353a43483e9c19936dc6a25612573b44c8f7d983dfa317e7f00d4c", size = 10162926, upload-time = "2026-03-02T15:51:40.558Z" }, + { url = "https://files.pythonhosted.org/packages/9c/a8/b7273eec3e802f78eb913fbe0ce0c16ef263723173e06a5776a8359b2c66/ty-0.0.20-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245ceef5bd88df366869385cf96411cb14696334f8daa75597cf7e41c3012eb8", size = 10171702, upload-time = "2026-03-02T15:51:44.069Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/5f1144f2f04a275109db06e3498450c4721554215b80ae73652ef412eeab/ty-0.0.20-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4d21d1cdf67a444d3c37583c17291ddba9382a9871021f3f5d5735e09e85efe", size = 10682552, upload-time = "2026-03-02T15:51:33.102Z" }, + { url = "https://files.pythonhosted.org/packages/6a/db/9f1f637310792f12bd6ed37d5fc8ab39ba1a9b0c6c55a33865e9f1cad840/ty-0.0.20-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd4ffd907d1bd70e46af9e9a2f88622f215e1bf44658ea43b32c2c0b357299e4", size = 11242605, upload-time = "2026-03-02T15:51:34.895Z" }, + { url = "https://files.pythonhosted.org/packages/1a/68/cc9cae2e732fcfd20ccdffc508407905a023fc8493b8771c392d915528dc/ty-0.0.20-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6594b58d8b0e9d16a22b3045fc1305db4b132c8d70c17784ab8c7a7cc986807", size = 10974655, upload-time = "2026-03-02T15:51:46.011Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c1/b9e3e3f28fe63486331e653f6aeb4184af8b1fe80542fcf74d2dda40a93d/ty-0.0.20-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3662f890518ce6cf4d7568f57d03906912d2afbf948a01089a28e325b1ef198c", size = 10761325, upload-time = "2026-03-02T15:51:26.818Z" }, + { url = "https://files.pythonhosted.org/packages/39/9e/67db935bdedf219a00fb69ec5437ba24dab66e0f2e706dd54a4eca234b84/ty-0.0.20-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0e3ffbae58f9f0d17cdc4ac6d175ceae560b7ed7d54f9ddfb1c9f31054bcdc2c", size = 10145793, upload-time = "2026-03-02T15:51:38.562Z" }, + { url = "https://files.pythonhosted.org/packages/c7/de/b0eb815d4dc5a819c7e4faddc2a79058611169f7eef07ccc006531ce228c/ty-0.0.20-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:176e52bc8bb00b0e84efd34583962878a447a3a0e34ecc45fd7097a37554261b", size = 10189640, upload-time = "2026-03-02T15:51:50.202Z" }, + { url = "https://files.pythonhosted.org/packages/b8/71/63734923965cbb70df1da3e93e4b8875434e326b89e9f850611122f279bf/ty-0.0.20-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2bc73025418e976ca4143dde71fb9025a90754a08ac03e6aa9b80d4bed1294b", size = 10370568, upload-time = "2026-03-02T15:51:42.295Z" }, + { url = "https://files.pythonhosted.org/packages/32/a0/a532c2048533347dff48e9ca98bd86d2c224356e101688a8edaf8d6973fb/ty-0.0.20-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d52f7c9ec6e363e094b3c389c344d5a140401f14a77f0625e3f28c21918552f5", size = 10853999, upload-time = "2026-03-02T15:51:58.963Z" }, + { url = "https://files.pythonhosted.org/packages/48/88/36c652c658fe96658043e4abc8ea97801de6fb6e63ab50aaa82807bff1d8/ty-0.0.20-py3-none-win32.whl", hash = "sha256:c7d32bfe93f8fcaa52b6eef3f1b930fd7da410c2c94e96f7412c30cfbabf1d17", size = 9744206, upload-time = "2026-03-02T15:51:54.183Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a7/a4a13bed1d7fd9d97aaa3c5bb5e6d3e9a689e6984806cbca2ab4c9233cac/ty-0.0.20-py3-none-win_amd64.whl", hash = "sha256:a5e10f40fc4a0a1cbcb740a4aad5c7ce35d79f030836ea3183b7a28f43170248", size = 10711999, upload-time = "2026-03-02T15:51:29.212Z" }, + { url = "https://files.pythonhosted.org/packages/8d/7e/6bfd748a9f4ff9267ed3329b86a0f02cdf6ab49f87bc36c8a164852f99fc/ty-0.0.20-py3-none-win_arm64.whl", hash = "sha256:53f7a5c12c960e71f160b734f328eff9a35d578af4b67a36b0bb5990ac5cdc27", size = 10150143, upload-time = "2026-03-02T15:51:31.283Z" }, ] [[package]] @@ -5698,11 +5709,39 @@ wheels = [ name = "types-docutils" version = "0.22.3.20251115" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] sdist = { url = "https://files.pythonhosted.org/packages/eb/d7/576ec24bf61a280f571e1f22284793adc321610b9bcfba1bf468cf7b334f/types_docutils-0.22.3.20251115.tar.gz", hash = "sha256:0f79ea6a7bd4d12d56c9f824a0090ffae0ea4204203eb0006392906850913e16", size = 56828, upload-time = "2025-11-15T02:59:57.371Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/9c/01/61ac9eb38f1f978b47443dc6fd2e0a3b0f647c2da741ddad30771f1b2b6f/types_docutils-0.22.3.20251115-py3-none-any.whl", hash = "sha256:c6e53715b65395d00a75a3a8a74e352c669bc63959e65a207dffaa22f4a2ad6e", size = 91951, upload-time = "2025-11-15T02:59:56.413Z" }, ] +[[package]] +name = "types-docutils" +version = "0.22.3.20260223" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/80/33/92c0129283363e3b3ba270bf6a2b7d077d949d2f90afc4abaf6e73578563/types_docutils-0.22.3.20260223.tar.gz", hash = "sha256:e90e868da82df615ea2217cf36dff31f09660daa15fc0f956af53f89c1364501", size = 57230, upload-time = "2026-02-23T04:11:21.806Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/c7/a4ae6a75d5b07d63089d5c04d450a0de4a5d48ffcb84b95659b22d3885fe/types_docutils-0.22.3.20260223-py3-none-any.whl", hash = "sha256:cc2d6b7560a28e351903db0989091474aa619ad287843a018324baee9c4d9a8f", size = 91969, upload-time = "2026-02-23T04:11:20.966Z" }, +] + [[package]] name = "types-gevent" version = "24.11.0.20250401" @@ -5770,7 +5809,8 @@ name = "types-pygments" version = "2.19.0.20251121" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "types-docutils" }, + { name = "types-docutils", version = "0.22.3.20251115", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "types-docutils", version = "0.22.3.20260223", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/90/3b/cd650700ce9e26b56bd1a6aa4af397bbbc1784e22a03971cb633cdb0b601/types_pygments-2.19.0.20251121.tar.gz", hash = "sha256:eef114fde2ef6265365522045eac0f8354978a566852f69e75c531f0553822b1", size = 18590, upload-time = "2025-11-21T03:03:46.623Z" } wheels = [ @@ -5781,11 +5821,39 @@ wheels = [ name = "types-python-dateutil" version = "2.9.0.20260124" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] sdist = { url = "https://files.pythonhosted.org/packages/fe/41/4f8eb1ce08688a9e3e23709ed07089ccdeaf95b93745bfb768c6da71197d/types_python_dateutil-2.9.0.20260124.tar.gz", hash = "sha256:7d2db9f860820c30e5b8152bfe78dbdf795f7d1c6176057424e8b3fdd1f581af", size = 16596, upload-time = "2026-01-24T03:18:42.975Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5a/c2/aa5e3f4103cc8b1dcf92432415dde75d70021d634ecfd95b2e913cf43e17/types_python_dateutil-2.9.0.20260124-py3-none-any.whl", hash = "sha256:f802977ae08bf2260142e7ca1ab9d4403772a254409f7bbdf652229997124951", size = 18266, upload-time = "2026-01-24T03:18:42.155Z" }, ] +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20260302" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/06/7d/4eb84ea2d4ea72b14f180ed2a5c2e7ac3c8e9fd425f7d69a6516cf127f3b/types_python_dateutil-2.9.0.20260302.tar.gz", hash = "sha256:05a3580c790e6ccad228411ed45245ed739c81e78ba49b1cfdbeb075f42bcab0", size = 16885, upload-time = "2026-03-02T04:02:05.012Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/91/80dca6ca3da5078de2a808b648aec2a27c83b3dee1b832ae394a683ebe51/types_python_dateutil-2.9.0.20260302-py3-none-any.whl", hash = "sha256:6e7e65e190fb78c267e58a7426b00f0dd41a6dfb02c12aab910263cfa0bcc3ca", size = 18334, upload-time = "2026-03-02T04:02:04.01Z" }, +] + [[package]] name = "types-pytz" version = "2025.2.0.20251108" @@ -5799,11 +5867,39 @@ wheels = [ name = "types-regex" version = "2026.1.15.20260116" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] sdist = { url = "https://files.pythonhosted.org/packages/c4/1a/fefad12cbe6214303d30027933a3e521188d9f283e383a183d9fda5c62fb/types_regex-2026.1.15.20260116.tar.gz", hash = "sha256:7151a9bcc5bbf9ecfccf8335c451aca8204f5a0992e0622aafaf482876cee4f7", size = 12877, upload-time = "2026-01-16T03:21:49.461Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7c/d4/0d47227ea84365bea532dca287fe73cba985d6e1d3a31a71849a8aa91370/types_regex-2026.1.15.20260116-py3-none-any.whl", hash = "sha256:b20786eacbde2f2a261cbe7f5096f483da995488d196f81e585ffd2dffc555e0", size = 11099, upload-time = "2026-01-16T03:21:48.647Z" }, ] +[[package]] +name = "types-regex" +version = "2026.2.28.20260301" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/ed/106958cb686316113b748ed4209fa363fd92b15759d5409c3930fed36606/types_regex-2026.2.28.20260301.tar.gz", hash = "sha256:644c231db3f368908320170c14905731a7ae5fabdac0f60f5d6d12ecdd3bc8dd", size = 13157, upload-time = "2026-03-01T04:11:13.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/bb/9bc26fcf5155bd25efeca35f8ba6bffb8b3c9da2baac8bf40067606418f3/types_regex-2026.2.28.20260301-py3-none-any.whl", hash = "sha256:7da7a1fe67528238176a5844fd435ca90617cf605341308686afbc579fdea5c0", size = 11130, upload-time = "2026-03-01T04:11:11.454Z" }, +] + [[package]] name = "types-requests" version = "2.32.4.20260107" @@ -5873,15 +5969,15 @@ wheels = [ [[package]] name = "typeshed-client" -version = "2.8.2" +version = "2.9.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-resources" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/3e/4074d3505b4700a6bf13cb1bb2d1848bb8c78e902e3f9fe5916274c5d284/typeshed_client-2.8.2.tar.gz", hash = "sha256:9d8e29fb74574d87bf9a719f77131dc40f2aeea20e97d25d4a3dc2cc30debd31", size = 501617, upload-time = "2025-07-16T01:49:49.299Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/34/e9fcb7ebbace96b6ab0f397df47dad7e42d8819aa091bc6c4ea1e7f9226b/typeshed_client-2.9.0.tar.gz", hash = "sha256:9c2659a4ba11a9d8597d63770416b42c69861189bf861809f6443d329c84be3a", size = 521553, upload-time = "2026-03-01T18:25:57.658Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/db/e7474719e90062df673057e865f94f67da2d0b4f671d8051020c74962c77/typeshed_client-2.8.2-py3-none-any.whl", hash = "sha256:4cf886d976c777689cd31889f13abf5bfb7797c82519b07e5969e541380c75ee", size = 760467, upload-time = "2025-07-16T01:49:47.758Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/42c0cadd4d62b0d98929db479346b7da6e4ab8346a4de39ed80176fb39b7/typeshed_client-2.9.0-py3-none-any.whl", hash = "sha256:9383660241a4864fd4af971e533b735bd8c5b3d2f88f7ac279e41699ebe1369c", size = 786547, upload-time = "2026-03-01T18:25:55.976Z" }, ] [[package]] @@ -5947,27 +6043,27 @@ wheels = [ [[package]] name = "uv" -version = "0.10.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c2/bb/dfd872ab6515e5609dc899acb65ccaf8cbedddefa3e34e8da0a5b3e13070/uv-0.10.4.tar.gz", hash = "sha256:b9ecf9f9145b95ddd6627b106e2e74f4204393b41bea2488079872699c03612e", size = 3875347, upload-time = "2026-02-17T22:01:22.28Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/a3/565e5e45b5006c108ccd596682768c00be988421a83be92193c90bd889e4/uv-0.10.4-py3-none-linux_armv6l.whl", hash = "sha256:97cd6856145dec1d50821468bb6a10c14f3d71015eb97bb657163c837b5ffe79", size = 22352134, upload-time = "2026-02-17T22:01:30.071Z" }, - { url = "https://files.pythonhosted.org/packages/3e/c6/b86f3fdcde9f270e6dc1ff631a4fe73971bf4162c4dd169c7621110361b8/uv-0.10.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:44dd91ef224cfce2203716ecf244c3d3641269d1c99996aab852248caf2aeba4", size = 21417697, upload-time = "2026-02-17T22:01:51.162Z" }, - { url = "https://files.pythonhosted.org/packages/63/91/c4ddf7e55e05394967615050cc364a999157a44c008d0e1e9db2ed49a11c/uv-0.10.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:751959135a62f006ef51f3fcc5d02ec67986defa0424d470cce0918eede36a55", size = 20082236, upload-time = "2026-02-17T22:01:43.025Z" }, - { url = "https://files.pythonhosted.org/packages/25/92/606701b147d421ba2afe327d25f1ec5f59e519157b7e530d09cf61781d22/uv-0.10.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:c184891b496c5fa04a7e1396d7f1953f52c97a5635636330854ab68f9e8ec212", size = 21921200, upload-time = "2026-02-17T22:01:24.131Z" }, - { url = "https://files.pythonhosted.org/packages/c3/79/942e75d0920a9e4cac76257cd3e2c238f1963d7e45423793f92e84eaa480/uv-0.10.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:5b8a2170ecc700d82ed322fa056789ae2281353fef094e44f563c2f32ab8f438", size = 21974822, upload-time = "2026-02-17T22:01:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/60/71/e5b1140c5c7296f935037a967717a82591522bbc93b4e67c4554dfbb4380/uv-0.10.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:decaf620106efa0d09ca27a8301dd83b8a5371e42649cd2704cfd11fe31af7d7", size = 21953309, upload-time = "2026-02-17T22:01:38.225Z" }, - { url = "https://files.pythonhosted.org/packages/70/a3/03ac1ff2058413c2c7d347f3b3396f291e192b096d2625a201c00bd962c6/uv-0.10.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d1035db05ac5b94387395428bdcbfce685f6c8eb2b711b66a5a1b397111913", size = 23217053, upload-time = "2026-02-17T22:01:09.278Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/9b02140e8ff29d9b575335662288493cdcde5f123337613c04613017cf23/uv-0.10.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e754f9c8fd7532a28da7deaa6e400de5e7b459f7846bd5320db215a074fa8664", size = 24053086, upload-time = "2026-02-17T22:01:32.722Z" }, - { url = "https://files.pythonhosted.org/packages/f8/80/7023e1b0f9180226f8c3aa3e207383671cb524eb8bbd8a8eecf1c0cfe867/uv-0.10.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d419ef8d4fbd5be0af952a60c76d4f6183acb827cc729095d11c63e7dfaec24c", size = 23121689, upload-time = "2026-02-17T22:01:26.835Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b3/4b9580d62e1245df52e8516cf3e404ff39cc72634d2d749d47b1dada4161/uv-0.10.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82978155e571f2ac3dd57077bd746bfe41b65fa19accc3c92d1f09632cd36c63", size = 23136767, upload-time = "2026-02-17T22:01:40.729Z" }, - { url = "https://files.pythonhosted.org/packages/bd/4e/058976e2a5513f11954e09595a1821d5db1819e96e00bafded19c6a470e9/uv-0.10.4-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:8437e56a7d0f8ecd7421e8b84024dd8153179b8f1371ca1bd66b79fa7fb4c2c1", size = 22003202, upload-time = "2026-02-17T22:01:12.447Z" }, - { url = "https://files.pythonhosted.org/packages/41/c5/da0fc5b732f7dd1f99116ce19e3c1cae7dfa7d04528a0c38268f20643edf/uv-0.10.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ff1c6a465ec035dfe2dfd745b2e85061f47ab3c5cc626eead491994c028eacc6", size = 22720004, upload-time = "2026-02-17T22:01:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/71/17/13c24dd56c135553645c2c62543eba928e88479fdd2d8356fdf35a0113bc/uv-0.10.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:525dc49a02b78fcd77431f013f2c48b2a152e31808e792c0d1aee4600495a320", size = 22401692, upload-time = "2026-02-17T22:01:35.368Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b2/7a5fdbc0bfd8364e6290457794127d5e766dbc6d44bb15d1a9e318bc356b/uv-0.10.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:7d514b30877fda6e83874ccbd1379e0249cfa064511c5858433edcf697d0d4e3", size = 23330968, upload-time = "2026-02-17T22:01:15.237Z" }, - { url = "https://files.pythonhosted.org/packages/d1/df/004e32be4cd24338422842dd93383f2df0be4554efb6872fef37997ff3ca/uv-0.10.4-py3-none-win32.whl", hash = "sha256:4aed1237847dbd694475c06e8608f2f5f6509181ac148ee35694400d382a3784", size = 21373394, upload-time = "2026-02-17T22:01:20.362Z" }, - { url = "https://files.pythonhosted.org/packages/31/dd/1900452678d46f6a649ab8167bededb02500b0561fc9f69e1f52607895c7/uv-0.10.4-py3-none-win_amd64.whl", hash = "sha256:4a1c595cf692fa611019a7ad9bf4b0757fccd0a3f838ca05e53db82912ddaa39", size = 23813606, upload-time = "2026-02-17T22:01:17.733Z" }, - { url = "https://files.pythonhosted.org/packages/7b/e8/c6ba7ceee3ec58d21156b4968449e6a12af15eea8d26308b3b3ffeef2baf/uv-0.10.4-py3-none-win_arm64.whl", hash = "sha256:28c59a02d7a648b75a9c2ea735773d9d357a1eee773b78593c275b0bef1a4b73", size = 22180241, upload-time = "2026-02-17T22:01:56.305Z" }, +version = "0.10.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/ec/b324a43b55fe59577505478a396cb1d2758487a2e2270c81ccfa4ac6c96d/uv-0.10.7.tar.gz", hash = "sha256:7c3b0133c2d6bd725d5a35ec5e109ebf0d75389943abe826f3d9ea6d6667a375", size = 3922193, upload-time = "2026-02-27T12:33:58.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/1b/decff24553325561850d70b75c737076e6fcbcfbf233011a27a33f06e4d9/uv-0.10.7-py3-none-linux_armv6l.whl", hash = "sha256:6a0af6c7a90fd2053edfa2c8ee719078ea906a2d9f4798d3fb3c03378726209a", size = 22497542, upload-time = "2026-02-27T12:33:39.425Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b5/51152c87921bc2576fecb982df4a02ac9cfd7fc934e28114a1232b99eed4/uv-0.10.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3b7db0cab77232a7c8856062904fc3b9db22383f1dec7e97a9588fb6c8470f6a", size = 21558860, upload-time = "2026-02-27T12:34:03.362Z" }, + { url = "https://files.pythonhosted.org/packages/5e/15/8365dc2ded350a4ee5fcbbf9b15195cb2b45855114f2a154b5effb6fa791/uv-0.10.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d872d2ff9c9dfba989b5f05f599715bc0f19b94cd0dbf8ae4ad22f8879a66c8c", size = 20212775, upload-time = "2026-02-27T12:33:55.365Z" }, + { url = "https://files.pythonhosted.org/packages/53/a0/ccf25e897f3907b5a6fd899007ff9a80b5bbf151b3a75a375881005611fd/uv-0.10.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:d9b40d03693efda80a41e5d18ac997efdf1094b27fb75471c1a8f51a9ebeffb3", size = 22015584, upload-time = "2026-02-27T12:33:47.374Z" }, + { url = "https://files.pythonhosted.org/packages/fa/3a/5099747954e7774768572d30917bb6bda6b8d465d7a3c49c9bbf7af2a812/uv-0.10.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:e74fe4df9cf31fe84f20b84a0054874635077d31ce20e7de35ff0dd64d498d7b", size = 22100376, upload-time = "2026-02-27T12:34:06.169Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1a/75897fd966b871803cf78019fa31757ced0d54af5ffd7f57bce8b01d64f3/uv-0.10.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c76659fc8bb618dd35cd83b2f479c6f880555a16630a454a251045c4c118ea4", size = 22105202, upload-time = "2026-02-27T12:34:16.972Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1e/0b8caedd66ca911533e18fd051da79a213c792404138812c66043d529b9e/uv-0.10.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d160cceb9468024ca40dc57a180289dfd2024d98e42f2284b9ec44355723b0a", size = 23335601, upload-time = "2026-02-27T12:34:11.161Z" }, + { url = "https://files.pythonhosted.org/packages/69/94/b741af277e39a92e0da07fe48c338eee1429c2607e7a192e41345208bb24/uv-0.10.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c775975d891cb60cf10f00953e61e643fcb9a9139e94c9ef5c805fe36e90477f", size = 24152851, upload-time = "2026-02-27T12:33:33.904Z" }, + { url = "https://files.pythonhosted.org/packages/27/b2/da351ccd02f0fb1aec5f992b886bea1374cce44276a78904348e2669dd78/uv-0.10.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a709e75583231cc1f39567fb3d8d9b4077ff94a64046eb242726300144ed1a4a", size = 23276444, upload-time = "2026-02-27T12:33:36.891Z" }, + { url = "https://files.pythonhosted.org/packages/71/a9/2735cc9dc39457c9cf64d1ce2ba5a9a8ecbb103d0fb64b052bf33ba3d669/uv-0.10.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89de2504407dcf04aece914c6ca3b9d8e60cf9ff39a13031c1df1f7c040cea81", size = 23218464, upload-time = "2026-02-27T12:34:00.904Z" }, + { url = "https://files.pythonhosted.org/packages/20/5f/5f204e9c3f04f5fc844d2f98d80a7de64b6b304af869644ab478d909f6ff/uv-0.10.7-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:9945de1d11c4a5ad77e9c4f36f8b5f9e7c9c3c32999b8bc0e7e579145c3b641c", size = 22092562, upload-time = "2026-02-27T12:34:14.155Z" }, + { url = "https://files.pythonhosted.org/packages/dd/a4/16bebf106e3289a29cc1e1482d551c49bd220983e9b4bc5960142389ad3f/uv-0.10.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dbe43527f478e2ffa420516aa465f82057763936bbea56f814fd054a9b7f961f", size = 22851312, upload-time = "2026-02-27T12:34:08.651Z" }, + { url = "https://files.pythonhosted.org/packages/d1/7a/953b1da589225d98ca8668412f665c3192f6deed2a0f4bb782b0df18f611/uv-0.10.7-py3-none-musllinux_1_1_i686.whl", hash = "sha256:c0783f327631141501bdc5f31dd2b4c748df7e7f5dc5cdbfc0fbb82da86cc9ca", size = 22543775, upload-time = "2026-02-27T12:33:30.935Z" }, + { url = "https://files.pythonhosted.org/packages/8b/67/e133afdabf76e43989448be1c2ef607f13afc32aa1ee9f6897115dec8417/uv-0.10.7-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:eba438899010522812d3497af586e6eedc94fa2b0ced028f51812f0c10aafb30", size = 23431187, upload-time = "2026-02-27T12:33:42.131Z" }, + { url = "https://files.pythonhosted.org/packages/ba/40/6ffb58ec88a33d6cbe9a606966f9558807f37a50f7be7dc756824df2d04c/uv-0.10.7-py3-none-win32.whl", hash = "sha256:b56d1818aafb2701d92e94f552126fe71d30a13f28712d99345ef5cafc53d874", size = 21524397, upload-time = "2026-02-27T12:33:44.579Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/74f4d625db838f716a555908d41777b6357bacc141ddef117a01855e5ef9/uv-0.10.7-py3-none-win_amd64.whl", hash = "sha256:ad0d0ddd9f5407ad8699e3b20fe6c18406cd606336743e246b16914801cfd8b0", size = 23999929, upload-time = "2026-02-27T12:33:49.839Z" }, + { url = "https://files.pythonhosted.org/packages/48/4e/20cbfbcb1a0f48c5c1ca94f6baa0fa00754aafda365da9160c15e3b9c277/uv-0.10.7-py3-none-win_arm64.whl", hash = "sha256:edf732de80c1a9701180ef8c7a2fa926a995712e4a34ae8c025e090f797c2e0b", size = 22353084, upload-time = "2026-02-27T12:33:52.792Z" }, ] [[package]]