From e797064af9b20cdc893cc47ec859fcc6dc7c171e Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:41:10 -0800 Subject: [PATCH 1/9] Add project support for pytest execution --- docs/test-plan-project-based-execution.md | 511 +++++ plan-project-based-exec.md | 217 +++ src/client/testing/common/debugLauncher.ts | 139 +- src/client/testing/common/types.ts | 10 + .../common/projectTestExecution.ts | 239 +++ .../testing/testController/controller.ts | 12 + .../pytest/pytestExecutionAdapter.ts | 18 +- .../unittest/testExecutionAdapter.ts | 12 +- .../testing/common/debugLauncher.unit.test.ts | 1672 ++++++++++------- .../common/projectTestExecution.unit.test.ts | 773 ++++++++ .../pytestExecutionAdapter.unit.test.ts | 219 +++ 11 files changed, 3100 insertions(+), 722 deletions(-) create mode 100644 docs/test-plan-project-based-execution.md create mode 100644 plan-project-based-exec.md create mode 100644 src/client/testing/testController/common/projectTestExecution.ts create mode 100644 src/test/testing/testController/common/projectTestExecution.unit.test.ts diff --git a/docs/test-plan-project-based-execution.md b/docs/test-plan-project-based-execution.md new file mode 100644 index 000000000000..8de1f1ae66bc --- /dev/null +++ b/docs/test-plan-project-based-execution.md @@ -0,0 +1,511 @@ +# Test Plan: Project-Based Pytest Execution + +This document outlines the testing strategy for the project-based pytest execution feature, including scenarios, edge cases, and test implementations. + +## Table of Contents +1. [Overview](#overview) +2. [Test Architecture Summary](#test-architecture-summary) +3. [Unit Tests - New Functions](#unit-tests---new-functions) +4. [Unit Tests - Modified Functions](#unit-tests---modified-functions) +5. [Integration/Higher-Level Tests](#integrationhigher-level-tests) +6. [Edge Cases & Error Scenarios](#edge-cases--error-scenarios) +7. [Implementation Recommendations](#implementation-recommendations) + +--- + +## Overview + +The project-based execution feature introduces: +- **`projectTestExecution.ts`** - New file with execution orchestration functions +- **`pytestExecutionAdapter.ts`** - Modified to accept `ProjectAdapter` parameter +- **`debugLauncher.ts`** - New debug session isolation with unique markers +- **`controller.ts`** - Integration point calling `executeTestsForProjects()` + +--- + +## Test Architecture Summary + +### Existing Patterns to Reuse + +| Pattern | Location | Description | +|---------|----------|-------------| +| TypeMoq mocking | `pytestExecutionAdapter.unit.test.ts` | Mock services, exec factory, debug launcher | +| Sinon stubs for utilities | `workspaceTestAdapter.unit.test.ts` | Stub `util.*` functions | +| Deferred promises | `testCancellationRunAdapters.unit.test.ts` | Test async flows and cancellation | +| TestItem mocking | `testExecutionHandler.unit.test.ts` | Create mock test items with children | +| ProjectAdapter creation | `testProjectRegistry.unit.test.ts` | Mock Python projects and environments | +| Debug service mocking | `debugLauncher.unit.test.ts` | Mock `IDebugService`, session handling | + +### Testing Tools Used +- **Mocha** - Test framework (suite/test) +- **TypeMoq** - Interface mocking +- **Sinon** - Stubs, spies, fakes +- **Chai** - Assertions (expect/assert) + +--- + +## Unit Tests - New Functions + +### File: `projectTestExecution.unit.test.ts` (NEW) + +#### 1. `groupTestItemsByProject()` + +**Function Signature:** +```typescript +groupTestItemsByProject( + testItems: TestItem[], + projects: ProjectAdapter[] +): Map +``` + +**Test Cases:** + +| Test Name | Scenario | Expected Behavior | +|-----------|----------|-------------------| +| `should group single item to single project` | 1 test item, 1 project | Map has 1 entry with 1 item | +| `should group multiple items to single project` | 3 items same project | Map has 1 entry with 3 items | +| `should group items across multiple projects` | 3 items, 2 projects | Map has 2 entries, items split correctly | +| `should return empty map when no test items` | 0 items, 2 projects | Empty map | +| `should handle items with no matching project` | Item outside all project paths | Item not included, logged as verbose | +| `should match to most specific project (longest path)` | Nested projects `/a` and `/a/b` | Item in `/a/b/test.py` → project `/a/b` | +| `should handle Windows paths` | `C:\workspace\project` paths | Correct grouping | + +**Mock Setup:** +```typescript +// Create mock test items with URIs +function createMockTestItem(id: string, uri: Uri): TestItem { + return { + id, + uri, + children: { size: 0, forEach: () => {} } + } as unknown as TestItem; +} + +// Create mock ProjectAdapter +function createMockProject(projectPath: string): ProjectAdapter { + return { + projectUri: Uri.file(projectPath), + projectName: path.basename(projectPath), + // ... other required properties + } as unknown as ProjectAdapter; +} +``` + +--- + +#### 2. `findProjectForTestItem()` + +**Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should return undefined for item with no URI` | `item.uri = undefined` | `undefined` | +| `should return project when item path starts with project path` | `/proj/tests/test.py` → `/proj` | Returns project | +| `should return undefined when no project matches` | `/other/test.py` vs `/proj` | `undefined` | +| `should return most specific project for nested paths` | `/ws/a/b/test.py` with projects `/ws/a` and `/ws/a/b` | `/ws/a/b` project | +| `should handle exact path match` | Item at `/proj/test.py`, project at `/proj` | Returns project | + +--- + +#### 3. `getTestCaseNodesRecursive()` + +**Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should return single item when no children` | Leaf test case | `[item]` | +| `should return all leaf nodes from nested structure` | File → Class → Methods | All method nodes | +| `should handle deeply nested structure` | 4 levels deep | All leaf nodes | +| `should return empty array for item with empty children` | Item with `children.size = 0` | `[item]` | + +**Mock Setup:** +```typescript +function createNestedTestItem( + id: string, + childIds: string[] +): TestItem { + const children = new Map(); + childIds.forEach(cid => { + children.set(cid, createMockTestItem(cid, Uri.file('/test.py'))); + }); + return { + id, + uri: Uri.file('/test.py'), + children: { + size: children.size, + forEach: (cb) => children.forEach(cb) + } + } as unknown as TestItem; +} +``` + +--- + +#### 4. `executeTestsForProject()` + +**Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should call executionAdapter.runTests with correct parameters` | Normal execution | Adapter called with projectUri, testIds, project | +| `should mark all test items as started` | 3 test items | `runInstance.started()` called 3 times | +| `should collect testIds from resultResolver.vsIdToRunId` | Test items with mapped IDs | Correct IDs passed to adapter | +| `should handle empty testIds gracefully` | No mapped IDs found | Returns early, logs verbose | +| `should pass project to execution adapter` | Project-based mode | `project` parameter is the ProjectAdapter | + +--- + +#### 5. `executeTestsForProjects()` + +**Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should execute tests for multiple projects in parallel` | 3 projects, 9 tests | All 3 executionAdapter.runTests called | +| `should skip execution if cancellation requested before start` | Token cancelled | No adapters called | +| `should skip project if cancellation requested mid-execution` | Cancel after 1st project | 2nd project skipped | +| `should handle empty projects array` | 0 projects | Returns early, logs error | +| `should setup coverage when profile kind is Coverage` | Coverage profile | `loadDetailedCoverage` set on profile | +| `should send telemetry for each project execution` | 2 projects | 2 telemetry events | +| `should continue other projects if one fails` | 1 project throws | Other projects still execute | +| `should not log cancellation as error` | Cancelled during execution | No error logged | + +--- + +#### 6. `setupCoverageForProject()` + +**Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should set loadDetailedCoverage on profile` | Coverage profile kind | Function assigned | +| `should do nothing for non-coverage profile` | Run profile kind | No changes to profile | +| `should return details from project.resultResolver.detailedCoverageMap` | Coverage data exists | Returns coverage details | +| `should return empty array when no coverage data` | No data for file | Returns `[]` | + +--- + +## Unit Tests - Modified Functions + +### File: `pytestExecutionAdapter.unit.test.ts` (EXTEND) + +**New Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should set PROJECT_ROOT_PATH env var when project provided` | Project-based execution | `PROJECT_ROOT_PATH` set to project.projectUri.fsPath | +| `should use project's Python environment when available` | Project with pythonEnv | `execService` created with project's env | +| `should pass debugSessionName in LaunchOptions for debug` | Debug mode with project | `debugSessionName` = project.projectName | +| `should fall back to execFactory when no project environment` | No project.pythonEnvironment | Uses execFactory.createActivatedEnvironment | + +**Mock Setup Addition:** +```typescript +const mockProject: ProjectAdapter = { + projectUri: Uri.file('/workspace/myproject'), + projectName: 'myproject (Python 3.11)', + pythonEnvironment: { + execInfo: { run: { executable: '/usr/bin/python3' } } + }, + // ... other fields +} as unknown as ProjectAdapter; + +// Test with project +adapter.runTests(uri, testIds, kind, testRun, execFactory, debugLauncher, undefined, mockProject); +``` + +--- + +### File: `debugLauncher.unit.test.ts` (EXTEND) + +**New Test Cases for Session Isolation:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should add unique session marker to launch config` | Any debug launch | `config[TEST_SESSION_MARKER_KEY]` is unique | +| `should only terminate matching session on callback` | Multiple sessions | Only session with matching marker terminates | +| `should use debugSessionName in config name when provided` | `options.debugSessionName` set | `config.name` includes session name | +| `should use pythonPath when provided` | `options.pythonPath` set | `config.python` = pythonPath | +| `should handle parallel debug sessions independently` | 2 concurrent launches | Each completes independently | +| `should dispose listener when session terminates` | Session ends | `onDidTerminateDebugSession` listener disposed | +| `should resolve deferred on matching session termination` | Correct session ends | Promise resolves | +| `should not resolve deferred on non-matching session termination` | Different session ends | Promise still pending | + +**Mock Setup for Parallel Sessions:** +```typescript +test('should handle parallel debug sessions independently', async () => { + const sessions: DebugSession[] = []; + let terminateCallback: (session: DebugSession) => void; + + debugService + .setup(d => d.startDebugging(typemoq.It.isAny(), typemoq.It.isAny(), undefined)) + .callback((_, config) => { + const mockSession = { + id: `session-${sessions.length}`, + configuration: config + }; + sessions.push(mockSession); + }) + .returns(() => Promise.resolve(true)); + + debugService + .setup(d => d.onDidTerminateDebugSession(typemoq.It.isAny())) + .callback((cb) => { terminateCallback = cb; }) + .returns(() => ({ dispose: () => {} })); + + // Launch two sessions in parallel + const launch1 = debugLauncher.launchDebugger(options1); + const launch2 = debugLauncher.launchDebugger(options2); + + // Terminate first session + terminateCallback(sessions[0]); + + // Verify only first resolved + await launch1; // Should resolve + // launch2 should still be pending +}); +``` + +--- + +## Integration/Higher-Level Tests + +### File: `projectBasedExecution.integration.test.ts` (NEW) + +These tests verify the complete flow from controller through to execution adapters. + +#### Test Suite: Multi-Project Workspace Execution + +| Test Name | Scenario | Verifications | +|-----------|----------|---------------| +| `should discover and execute tests across 3 projects` | Multi-project workspace | Each project's adapter called with correct tests | +| `should use correct Python environment per project` | Projects with different Pythons | Environment matches project config | +| `should handle mixed test selection across projects` | 2 tests from proj1, 1 from proj2 | Correct grouping and execution | +| `should isolate results per project` | Results from multiple projects | ResultResolver receives per-project data | + +#### Test Suite: Debug Mode Multi-Project + +| Test Name | Scenario | Verifications | +|-----------|----------|---------------| +| `should launch separate debug session per project` | 2 projects in debug mode | 2 debug sessions started | +| `should name debug sessions with project names` | Debug with named projects | Session names include project names | +| `should allow stopping one session without affecting others` | Stop project A | Project B continues | +| `should handle debug session errors per project` | One project fails to debug | Other projects still debug | + +#### Test Suite: Cancellation Flow + +| Test Name | Scenario | Verifications | +|-----------|----------|---------------| +| `should cancel all projects when token cancelled` | Cancel mid-run | All projects stop gracefully | +| `should not start pending projects after cancellation` | Cancel after 1 project | Remaining projects not started | +| `should propagate cancellation to debug sessions` | Cancel during debug | Debug sessions terminate | +| `should cleanup named pipes on cancellation` | Cancel during execution | Server disposed, pipes cleaned | + +--- + +### File: `controller.unit.test.ts` (EXTEND) + +**New Test Cases:** + +| Test Name | Scenario | Expected | +|-----------|----------|----------| +| `should call executeTestsForProjects when projects registered` | Project-based mode | `executeTestsForProjects()` called | +| `should fall back to legacy execution when no projects` | Legacy mode | `workspaceTestAdapter.executeTests()` called | +| `should pass correct dependencies to executeTestsForProjects` | Valid deps | pythonExecFactory, debugLauncher, registry passed | + +--- + +## Edge Cases & Error Scenarios + +### Edge Case Matrix + +| Category | Edge Case | Test Location | Expected Behavior | +|----------|-----------|---------------|-------------------| +| **Empty Input** | No test items selected | `executeTestsForProjects` | Returns early, no errors | +| **Empty Input** | No projects in registry | `executeTestsForProjects` | Logs error, returns early | +| **Empty Input** | Test items with no URIs | `findProjectForTestItem` | Returns undefined, item skipped | +| **Path Matching** | Nested projects (parent/child) | `groupTestItemsByProject` | Uses most specific match | +| **Path Matching** | Sibling projects | `groupTestItemsByProject` | Correct assignment | +| **Path Matching** | Windows vs Unix paths | `findProjectForTestItem` | Handles both | +| **Cancellation** | Cancelled before start | `executeTestsForProjects` | Immediate return | +| **Cancellation** | Cancelled mid-project | `executeTestsForProject` | Stops gracefully | +| **Cancellation** | Cancelled during debug | `debugLauncher` | Session terminated | +| **Debug Sessions** | Multiple simultaneous | `debugLauncher` | Independent isolation | +| **Debug Sessions** | One fails to start | `executeTestsForProjects` | Others continue | +| **Debug Sessions** | Session terminated externally | `debugLauncher` | Deferred resolves | +| **Environment** | Project missing Python env | `pytestExecutionAdapter` | Falls back to workspace env | +| **Environment** | Invalid Python path | `pytestExecutionAdapter` | Error reported | +| **Results** | Mixed pass/fail across projects | `executeTestsForProjects` | All results processed | +| **Results** | One project times out | `executeTestsForProjects` | Others complete | + +### Error Scenarios + +| Error Type | Test | Expected Outcome | +|------------|------|------------------| +| Adapter throws exception | `executeTestsForProject` catches | Error logged, other projects continue | +| Debug launcher rejects | `executeTestsForProjects` | Error logged, not cancellation error | +| Named pipe fails | `pytestExecutionAdapter` | Test run fails gracefully | +| Result resolver not found | `executeTestsForProject` | Test IDs empty, returns early | + +--- + +## Implementation Recommendations + +### 1. New Test File Structure + +``` +src/test/testing/testController/ +├── common/ +│ ├── projectTestExecution.unit.test.ts <-- NEW +│ ├── testProjectRegistry.unit.test.ts (existing, extend if needed) +│ └── projectUtils.unit.test.ts (existing) +├── pytest/ +│ └── pytestExecutionAdapter.unit.test.ts (extend) +├── debugLauncher.unit.test.ts (extend in common/) +└── controller.unit.test.ts (extend) +``` + +### 2. Shared Test Utilities + +Create a helper file for project-based test utilities: + +```typescript +// src/test/testing/testController/common/projectTestHelpers.ts + +import { TestItem, Uri } from 'vscode'; +import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; + +export function createMockTestItem(id: string, uriPath: string, children?: TestItem[]): TestItem { + const childMap = new Map(); + children?.forEach(c => childMap.set(c.id, c)); + + return { + id, + uri: Uri.file(uriPath), + children: { + size: childMap.size, + forEach: (cb: (item: TestItem) => void) => childMap.forEach(cb) + } + } as unknown as TestItem; +} + +export function createMockProjectAdapter(config: { + projectPath: string; + projectName: string; + pythonPath?: string; + testProvider?: 'pytest' | 'unittest'; +}): ProjectAdapter { + return { + projectUri: Uri.file(config.projectPath), + projectName: config.projectName, + testProvider: config.testProvider ?? 'pytest', + pythonEnvironment: config.pythonPath ? { + execInfo: { run: { executable: config.pythonPath } } + } : undefined, + executionAdapter: { + runTests: sinon.stub().resolves() + }, + resultResolver: { + vsIdToRunId: new Map(), + detailedCoverageMap: new Map() + } + } as unknown as ProjectAdapter; +} + +export function createMockDependencies(): ProjectExecutionDependencies { + return { + projectRegistry: typemoq.Mock.ofType().object, + pythonExecFactory: typemoq.Mock.ofType().object, + debugLauncher: typemoq.Mock.ofType().object + }; +} +``` + +### 3. Test Priority Order + +1. **HIGH PRIORITY** - Core logic tests: + - `groupTestItemsByProject()` - All cases + - `findProjectForTestItem()` - All cases + - `executeTestsForProject()` - Basic flow + - Debug session isolation tests + +2. **MEDIUM PRIORITY** - Integration tests: + - Multi-project execution flow + - Cancellation propagation + - Error handling + +3. **LOWER PRIORITY** - Edge cases: + - Windows path handling + - Coverage setup + - Telemetry verification + +### 4. Mocking Strategy + +| Component | Mock Type | Reason | +|-----------|-----------|--------| +| `TestItem` | Custom object | Simple interface | +| `ProjectAdapter` | Custom object | Many optional fields | +| `TestRun` | TypeMoq | Verify method calls | +| `IPythonExecutionFactory` | TypeMoq | Interface with promises | +| `ITestDebugLauncher` | TypeMoq | Interface with callbacks | +| `IDebugService` | TypeMoq | Complex async behavior | +| Utility functions (`util.*`) | Sinon stub | Replace implementation | + +### 5. Async Testing Patterns + +```typescript +// Pattern for testing cancellation +test('should stop on cancellation', async () => { + const token = new CancellationTokenSource(); + const deferredExecution = createDeferred(); + + mockAdapter.runTests.callsFake(async () => { + token.cancel(); // Cancel during execution + await deferredExecution.promise; + }); + + // Should complete without hanging + await executeTestsForProjects(projects, items, runInstance, request, token.token, deps); + + // Verify correct behavior + expect(log).to.include('cancelled'); +}); + +// Pattern for parallel execution verification +test('should execute projects in parallel', async () => { + const executionOrder: string[] = []; + const deferreds = projects.map(() => createDeferred()); + + projects.forEach((p, i) => { + p.executionAdapter.runTests.callsFake(async () => { + executionOrder.push(`start-${i}`); + await deferreds[i].promise; + executionOrder.push(`end-${i}`); + }); + }); + + const executePromise = executeTestsForProjects(...); + + // All should have started before any completed + await new Promise(r => setTimeout(r, 10)); + expect(executionOrder).to.deep.equal(['start-0', 'start-1', 'start-2']); + + // Resolve all + deferreds.forEach(d => d.resolve()); + await executePromise; +}); +``` + +--- + +## Summary + +| Test Category | Estimated Test Count | Effort | +|---------------|---------------------|--------| +| `projectTestExecution.unit.test.ts` (new) | ~25 tests | Medium | +| `pytestExecutionAdapter.unit.test.ts` (extend) | ~5 tests | Low | +| `debugLauncher.unit.test.ts` (extend) | ~8 tests | Medium | +| `controller.unit.test.ts` (extend) | ~3 tests | Low | +| Integration tests (optional) | ~10 tests | High | +| **Total** | **~50 tests** | - | + +The primary focus should be on the new `projectTestExecution.unit.test.ts` file, as it contains all the new orchestration logic. The debug launcher session isolation tests are also critical since they fix a real bug. diff --git a/plan-project-based-exec.md b/plan-project-based-exec.md new file mode 100644 index 000000000000..3a3d2a9040f5 --- /dev/null +++ b/plan-project-based-exec.md @@ -0,0 +1,217 @@ +# Plan: Project-Based Pytest Execution + +## Overview + +This plan describes the implementation of **project-based test execution for pytest**, enabling multi-project workspace support where each Python project within a workspace can execute tests using its own Python environment. This builds on top of the project-based discovery work from PR #25760. + +## Problem to Solve + +In a multi-project workspace (e.g., a monorepo with multiple Python services), users currently cannot: +- Run tests with the correct Python interpreter for each project +- Have separate test trees per project in the Test Explorer +- Properly handle nested projects (parent/child) + +## Architecture + +### Key Components to Add + +| Component | File | Purpose | +| ----------------------- | -------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------- | +| **TestProjectRegistry** | [testProjectRegistry.ts](../src/client/testing/testController/common/testProjectRegistry.ts) | Registry that discovers and manages Python projects per workspace | +| **ProjectAdapter** | [projectAdapter.ts](../src/client/testing/testController/common/projectAdapter.ts) | Interface representing a single Python project with its test infrastructure | +| **projectUtils** | [projectUtils.ts](../src/client/testing/testController/common/projectUtils.ts) | Utility functions for project ID generation and adapter creation | + +### How It Works + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ VS Code Workspace │ +│ ┌─────────────────────────────────────────────────────────────┐│ +│ │ TestController ││ +│ │ ┌───────────────────────────────────────────────────────┐ ││ +│ │ │ TestProjectRegistry │ ││ +│ │ │ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ ││ +│ │ │ │ ProjectA │ │ ProjectB │ │ ProjectC │ │ ││ +│ │ │ │ (Py 3.11) │ │ (Py 3.12) │ │ (Py 3.10) │ │ ││ +│ │ │ │ ┌─────────┐ │ │ ┌─────────┐ │ │ ┌─────────┐ │ │ ││ +│ │ │ │ │Discovery│ │ │ │Discovery│ │ │ │Discovery│ │ │ ││ +│ │ │ │ │Adapter │ │ │ │Adapter │ │ │ │Adapter │ │ │ ││ +│ │ │ │ ├─────────┤ │ │ ├─────────┤ │ │ ├─────────┤ │ │ ││ +│ │ │ │ │Execution│ │ │ │Execution│ │ │ │Execution│ │ │ ││ +│ │ │ │ │Adapter │ │ │ │Adapter │ │ │ │Adapter │ │ │ ││ +│ │ │ │ └─────────┘ │ │ └─────────┘ │ │ └─────────┘ │ │ ││ +│ │ │ └─────────────┘ └─────────────┘ └─────────────┘ │ ││ +│ │ └───────────────────────────────────────────────────────┘ ││ +│ └─────────────────────────────────────────────────────────────┘│ +└─────────────────────────────────────────────────────────────────┘ +``` + +### Execution Flow + +1. **User runs tests** → `TestRunRequest` with selected `TestItem`s arrives +2. **Controller** checks if project-based testing is enabled +3. **Group tests by project** → Tests are sorted by which `ProjectAdapter` they belong to (via URI matching) +4. **Execute per project** → Each project's `executionAdapter.runTests()` is called with: + - The project's Python environment + - `PROJECT_ROOT_PATH` environment variable set to project root +5. **Results collected** → Each project's `resultResolver` maps results back to test items + +### Required Changes by File + +#### Controller ([controller.ts](../src/client/testing/testController/controller.ts)) +- Add `TestProjectRegistry` integration +- New methods: `discoverForProject()`, `executeTestsForProjects()`, `groupTestItemsByProject()` +- Debug mode should handle multi-project scenarios by launching multiple debug sessions + +#### Pytest Execution Adapter ([pytestExecutionAdapter.ts](../src/client/testing/testController/pytest/pytestExecutionAdapter.ts)) +- Add `project?: ProjectAdapter` parameter to `runTests()` +- Set `PROJECT_ROOT_PATH` environment variable when project is provided +- Use project's Python environment instead of workspace environment +- Debug launches should use `pythonPath` from project when available + +#### Debug Launcher ([debugLauncher.ts](../src/client/testing/common/debugLauncher.ts)) +- Add optional `pythonPath` to `LaunchOptions` for project-specific interpreter +- Add optional `debugSessionName` to `LaunchOptions` for session identification +- Debug sessions should use explicit Python path when provided +- Use unique session markers to track individual debug sessions (avoids `activeDebugSession` race conditions) +- Properly dispose event handlers when debugging completes + +#### Python Side ([vscode_pytest/__init__.py](../python_files/vscode_pytest/__init__.py)) +- `get_test_root_path()` should return `PROJECT_ROOT_PATH` env var if set (otherwise cwd) +- Session node should use project root for test tree structure + +## Feature Behavior + +### Single Project Workspace +No change from existing behavior—tests run using the workspace's interpreter. + +### Multi-Project Workspace +- Each project has its own root node in Test Explorer +- Running tests uses the correct interpreter for each project +- Results are scoped to the correct project + +### Nested Projects +``` +workspace/ +└── parent-project/ + ├── tests/ + └── child-project/ + └── tests/ +``` +- Parent project discovery ignores child project via `--ignore` flags +- Execution receives specific test IDs, so no cross-contamination + +### Debug Mode +- **Single project**: Debug should proceed normally with project interpreter +- **Multiple projects**: Multiple debug sessions should be launched in parallel—one per project, each using its own interpreter +- **Session naming**: Each debug session includes the project name (e.g., "Debug Tests: alice (Python 3.11)") +- **Session isolation**: Each debug session is tracked independently using unique markers, so stopping one session doesn't affect others + +### Cancellation Handling + +Cancellation is handled at multiple levels to ensure proper cleanup across all parallel project executions: + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ User Clicks "Stop" │ +│ │ │ +│ ▼ │ +│ CancellationToken fires │ +│ │ │ +│ ┌─────────────────┼─────────────────┐ │ +│ ▼ ▼ ▼ │ +│ ┌───────────┐ ┌───────────┐ ┌───────────┐ │ +│ │ Project A │ │ Project B │ │ Project C │ │ +│ │ Execution │ │ Execution │ │ Execution │ │ +│ └─────┬─────┘ └─────┬─────┘ └─────┬─────┘ │ +│ │ │ │ │ +│ ▼ ▼ ▼ │ +│ Kill subprocess Kill subprocess Kill subprocess │ +│ Close pipes Close pipes Close pipes │ +│ Resolve deferred Resolve deferred Resolve deferred │ +└─────────────────────────────────────────────────────────────────┘ +``` + +#### Cancellation Levels + +1. **Project execution level** ([projectTestExecution.ts](src/client/testing/testController/common/projectTestExecution.ts)) + - Early exit if cancelled before starting + - Checks cancellation before starting each project's execution + - Projects not yet started are skipped gracefully + +2. **Execution adapter level** ([pytestExecutionAdapter.ts](src/client/testing/testController/pytest/pytestExecutionAdapter.ts)) + - `runInstance.token.onCancellationRequested` kills the subprocess + - Named pipe server is closed via the callback + - Deferred promises resolve to unblock waiting code + +3. **Debug launcher level** ([debugLauncher.ts](src/client/testing/common/debugLauncher.ts)) + - Token cancellation resolves the deferred and invokes cleanup callback + - Session termination events are filtered to only react to the correct session + - Event handlers are disposed when debugging completes + +#### Multi-Session Debug Independence + +When debugging multiple projects simultaneously, each `launchDebugger()` call must track its own debug session independently. The implementation uses a unique marker system: + +```typescript +// Each debug session gets a unique marker in its configuration +const sessionMarker = `test-${Date.now()}-${random}`; +launchArgs[TEST_SESSION_MARKER_KEY] = sessionMarker; + +// When sessions start/terminate, we match by marker (not activeDebugSession) +onDidStartDebugSession((session) => { + if (session.configuration[TEST_SESSION_MARKER_KEY] === sessionMarker) { + ourSession = session; // Found our specific session + } +}); +``` + +This avoids race conditions where the global `activeDebugSession` could be overwritten by another concurrent session start. + +### Legacy Fallback +When Python Environments API is unavailable, the system falls back to single-workspace adapter mode. + +## Files to Change + +| Category | Files | +| ----------------------- | ------------------------------------------------------------------------------------------------------------ | +| **Core Implementation** | `controller.ts`, `testProjectRegistry.ts`, `projectAdapter.ts`, `projectUtils.ts`, `projectTestExecution.ts` | +| **Adapters** | `pytestExecutionAdapter.ts`, `pytestDiscoveryAdapter.ts`, `resultResolver.ts` | +| **Types** | `types.ts` (common), `types.ts` (testController) | +| **Debug** | `debugLauncher.ts` | +| **Python** | `vscode_pytest/__init__.py` | +| **Tests** | `controller.unit.test.ts`, `testProjectRegistry.unit.test.ts`, `projectUtils.unit.test.ts` | + +## Testing + +### Unit Tests to Add +- `testProjectRegistry.unit.test.ts` - Registry lifecycle, project discovery, nested projects +- `controller.unit.test.ts` - Controller integration, debug scenarios, test grouping +- `projectUtils.unit.test.ts` - Utility functions + +### Test Scenarios to Cover +| Scenario | Coverage | +| ----------------------------- | --------------------------------------------- | +| Single project workspace | Unit tests + legacy flows | +| Multi-project workspace | New controller unit tests | +| Nested projects | Discovery tests + ignore behavior | +| Debug mode (single project) | Existing debug tests | +| Debug mode (multi-project) | Session isolation, independent cancellation | +| Legacy fallback | Existing controller tests | +| Test cancellation | Cancellation at all levels (see above) | + +## Out of Scope +- **Unittest support**: Project-based unittest execution will be handled in a separate PR +- **End-to-end tests**: Manual testing will be required for full validation +- **Multi-project coverage aggregation**: Deferred to future work + +## Expected User Experience + +### Debugging Across Multiple Projects +When debugging tests spanning multiple projects: +- Multiple debug sessions should be launched simultaneously—one per project +- Each debug session should use the project's configured Python interpreter +- All projects' tests should run in debug mode in parallel +- Users should be able to switch between debug sessions in VS Code's debug panel +- **Stopping one debug session should NOT affect other running sessions** +- Each debug session is named with its project (e.g., "Debug Tests: alice (Python 3.11)") diff --git a/src/client/testing/common/debugLauncher.ts b/src/client/testing/common/debugLauncher.ts index c28535b30644..51e46f04196e 100644 --- a/src/client/testing/common/debugLauncher.ts +++ b/src/client/testing/common/debugLauncher.ts @@ -1,6 +1,6 @@ import { inject, injectable, named } from 'inversify'; import * as path from 'path'; -import { DebugConfiguration, l10n, Uri, WorkspaceFolder, DebugSession, DebugSessionOptions } from 'vscode'; +import { DebugConfiguration, l10n, Uri, WorkspaceFolder, DebugSession, DebugSessionOptions, Disposable } from 'vscode'; import { IApplicationShell, IDebugService } from '../../common/application/types'; import { EXTENSION_ROOT_DIR } from '../../common/constants'; import * as internalScripts from '../../common/process/internal/scripts'; @@ -18,6 +18,13 @@ import { showErrorMessage } from '../../common/vscodeApis/windowApis'; import { createDeferred } from '../../common/utils/async'; import { addPathToPythonpath } from './helpers'; +/** + * Key used to mark debug configurations with a unique session identifier. + * This allows us to track which debug session belongs to which launchDebugger() call + * when multiple debug sessions are launched in parallel. + */ +const TEST_SESSION_MARKER_KEY = '__vscodeTestSessionMarker'; + @injectable() export class DebugLauncher implements ITestDebugLauncher { private readonly configService: IConfigurationService; @@ -31,6 +38,27 @@ export class DebugLauncher implements ITestDebugLauncher { this.configService = this.serviceContainer.get(IConfigurationService); } + /** + * Launches a debug session for test execution. + * + * **Cancellation handling:** + * Cancellation can occur from multiple sources, all properly handled: + * 1. **Pre-check**: If already cancelled before starting, returns immediately + * 2. **Token cancellation**: If the parent CancellationToken fires during debugging, + * the deferred resolves and the callback is invoked to clean up resources + * 3. **Session termination**: When the user stops debugging (via UI or completes), + * the onDidTerminateDebugSession event fires and we resolve + * + * **Multi-session support:** + * When debugging tests from multiple projects simultaneously, each launchDebugger() + * call needs to track its own debug session independently. We use a unique marker + * in the launch configuration to identify which session belongs to which call, + * avoiding race conditions with the global `activeDebugSession` property. + * + * @param options Launch configuration including test provider, args, and optional project info + * @param callback Called when the debug session ends (for cleanup like closing named pipes) + * @param sessionOptions VS Code debug session options (e.g., testRun association) + */ public async launchDebugger( options: LaunchOptions, callback?: () => void, @@ -38,18 +66,35 @@ export class DebugLauncher implements ITestDebugLauncher { ): Promise { const deferred = createDeferred(); let hasCallbackBeenCalled = false; + + // Collect disposables for cleanup when debugging completes + const disposables: Disposable[] = []; + + // Ensure callback is only invoked once, even if multiple termination paths fire + const callCallbackOnce = () => { + if (!hasCallbackBeenCalled) { + hasCallbackBeenCalled = true; + callback?.(); + } + }; + + // Early exit if already cancelled before we start if (options.token && options.token.isCancellationRequested) { - hasCallbackBeenCalled = true; - return undefined; + callCallbackOnce(); deferred.resolve(); - callback?.(); + return deferred.promise; } - options.token?.onCancellationRequested(() => { - deferred.resolve(); - callback?.(); - hasCallbackBeenCalled = true; - }); + // Listen for cancellation from the test run (e.g., user clicks stop in Test Explorer) + // This allows the caller to clean up resources even if the debug session is still running + if (options.token) { + disposables.push( + options.token.onCancellationRequested(() => { + deferred.resolve(); + callCallbackOnce(); + }), + ); + } const workspaceFolder = DebugLauncher.resolveWorkspaceFolder(options.cwd); const launchArgs = await this.getLaunchArgs( @@ -59,23 +104,54 @@ export class DebugLauncher implements ITestDebugLauncher { ); const debugManager = this.serviceContainer.get(IDebugService); - let activatedDebugSession: DebugSession | undefined; - debugManager.startDebugging(workspaceFolder, launchArgs, sessionOptions).then(() => { - // Save the debug session after it is started so we can check if it is the one that was terminated. - activatedDebugSession = debugManager.activeDebugSession; - }); - debugManager.onDidTerminateDebugSession((session) => { - traceVerbose(`Debug session terminated. sessionId: ${session.id}`); - // Only resolve no callback has been made and the session is the one that was started. - if ( - !hasCallbackBeenCalled && - activatedDebugSession !== undefined && - session.id === activatedDebugSession?.id - ) { - deferred.resolve(); - callback?.(); - } + // Generate a unique marker for this debug session. + // When multiple debug sessions start in parallel (e.g., debugging tests from + // multiple projects), we can't rely on debugManager.activeDebugSession because + // it's a global that could be overwritten by another concurrent session start. + // Instead, we embed a unique marker in our launch configuration and match it + // when the session starts to identify which session is ours. + const sessionMarker = `test-${Date.now()}-${Math.random().toString(36).substring(7)}`; + launchArgs[TEST_SESSION_MARKER_KEY] = sessionMarker; + + let ourSession: DebugSession | undefined; + + // Capture our specific debug session when it starts by matching the marker. + // This fires for ALL debug sessions, so we filter to only our marker. + disposables.push( + debugManager.onDidStartDebugSession((session) => { + if (session.configuration[TEST_SESSION_MARKER_KEY] === sessionMarker) { + ourSession = session; + traceVerbose(`[test-debug] Debug session started: ${session.name} (${session.id})`); + } + }), + ); + + // Handle debug session termination (user stops debugging, or tests complete). + // Only react to OUR session terminating - other parallel sessions should + // continue running independently. + disposables.push( + debugManager.onDidTerminateDebugSession((session) => { + if (ourSession && session.id === ourSession.id) { + traceVerbose(`[test-debug] Debug session terminated: ${session.name} (${session.id})`); + deferred.resolve(); + callCallbackOnce(); + } + }), + ); + + // Start the debug session + const started = await debugManager.startDebugging(workspaceFolder, launchArgs, sessionOptions); + if (!started) { + traceError('Failed to start debug session'); + deferred.resolve(); + callCallbackOnce(); + } + + // Clean up event subscriptions when debugging completes (success, failure, or cancellation) + deferred.promise.finally(() => { + disposables.forEach((d) => d.dispose()); }); + return deferred.promise; } @@ -108,6 +184,12 @@ export class DebugLauncher implements ITestDebugLauncher { subProcess: true, }; } + + // Use project name in debug session name if provided + if (options.debugSessionName) { + debugConfig.name = `Debug Tests: ${options.debugSessionName}`; + } + if (!debugConfig.rules) { debugConfig.rules = []; } @@ -257,6 +339,13 @@ export class DebugLauncher implements ITestDebugLauncher { // run via F5 style debugging. launchArgs.purpose = []; + // For project-based execution, use the explicit Python path if provided. + // This ensures debug sessions use the correct interpreter for each project. + if (options.pythonPath) { + launchArgs.python = options.pythonPath; + traceVerbose(`[test-by-project] Debug session using explicit Python path: ${options.pythonPath}`); + } + return launchArgs; } diff --git a/src/client/testing/common/types.ts b/src/client/testing/common/types.ts index 562005386633..49034d9f5f6f 100644 --- a/src/client/testing/common/types.ts +++ b/src/client/testing/common/types.ts @@ -26,6 +26,16 @@ export type LaunchOptions = { pytestPort?: string; pytestUUID?: string; runTestIdsPort?: string; + /** + * Optional explicit Python path for project-based execution. + * When provided, debug sessions should use this interpreter instead of the workspace default. + */ + pythonPath?: string; + /** + * Optional name for the debug session (e.g., project name). + * Used to identify debug sessions in the VS Code debug panel. + */ + debugSessionName?: string; }; export enum TestFilter { diff --git a/src/client/testing/testController/common/projectTestExecution.ts b/src/client/testing/testController/common/projectTestExecution.ts new file mode 100644 index 000000000000..12652f6de23c --- /dev/null +++ b/src/client/testing/testController/common/projectTestExecution.ts @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import { CancellationToken, FileCoverageDetail, TestItem, TestRun, TestRunProfileKind, TestRunRequest } from 'vscode'; +import { traceError, traceInfo, traceVerbose } from '../../../logging'; +import { sendTelemetryEvent } from '../../../telemetry'; +import { EventName } from '../../../telemetry/constants'; +import { IPythonExecutionFactory } from '../../../common/process/types'; +import { ITestDebugLauncher } from '../../common/types'; +import { ProjectAdapter } from './projectAdapter'; +import { TestProjectRegistry } from './testProjectRegistry'; +import { getProjectId } from './projectUtils'; + +/** + * Dependencies required for project-based test execution. + * Passed to execution functions to avoid tight coupling to the controller. + */ +export interface ProjectExecutionDependencies { + projectRegistry: TestProjectRegistry; + pythonExecFactory: IPythonExecutionFactory; + debugLauncher: ITestDebugLauncher; +} + +/** + * Executes tests for multiple projects within a workspace (project-based mode). + * Groups test items by their owning project and executes each project's tests + * using that project's Python environment. + * + * Cancellation is handled at multiple levels: + * 1. Before starting each project's execution (checked here) + * 2. Within each execution adapter (via runInstance.token) + */ +export async function executeTestsForProjects( + projects: ProjectAdapter[], + testItems: TestItem[], + runInstance: TestRun, + request: TestRunRequest, + token: CancellationToken, + deps: ProjectExecutionDependencies, +): Promise { + if (projects.length === 0) { + traceError(`[test-by-project] No projects provided for execution`); + return; + } + + // Early exit if already cancelled + if (token.isCancellationRequested) { + traceInfo(`[test-by-project] Execution cancelled before starting`); + return; + } + + // Group test items by project + const testsByProject = groupTestItemsByProject(testItems, projects); + + const isDebugMode = request.profile?.kind === TestRunProfileKind.Debug; + traceInfo(`[test-by-project] Executing tests across ${testsByProject.size} project(s), debug=${isDebugMode}`); + + // Execute tests for each project in parallel + // For debug mode, multiple debug sessions will be launched in parallel + // Each execution respects cancellation via runInstance.token + const executions = Array.from(testsByProject.entries()).map(async ([_projectId, { project, items }]) => { + // Check for cancellation before starting each project + if (token.isCancellationRequested) { + traceInfo(`[test-by-project] Skipping ${project.projectName} - cancellation requested`); + return; + } + + if (items.length === 0) return; + + traceInfo(`[test-by-project] Executing ${items.length} test item(s) for project: ${project.projectName}`); + + sendTelemetryEvent(EventName.UNITTEST_RUN, undefined, { + tool: 'pytest', + debugging: isDebugMode, + }); + + // Setup coverage for this project if needed + if (request.profile?.kind === TestRunProfileKind.Coverage) { + setupCoverageForProject(request, project); + } + + try { + await executeTestsForProject(project, items, runInstance, request, deps); + } catch (error) { + // Don't log cancellation as an error + if (!token.isCancellationRequested) { + traceError(`[test-by-project] Execution failed for project ${project.projectName}:`, error); + } + } + }); + + await Promise.all(executions); + + if (token.isCancellationRequested) { + traceInfo(`[test-by-project] Project executions cancelled`); + } else { + traceInfo(`[test-by-project] All project executions completed`); + } +} + +/** + * Groups test items by their owning project based on file path matching. + * Each test item's URI is matched against project root paths. + */ +export function groupTestItemsByProject( + testItems: TestItem[], + projects: ProjectAdapter[], +): Map { + const result = new Map(); + + // Initialize entries for all projects + for (const project of projects) { + result.set(getProjectId(project.projectUri), { project, items: [] }); + } + + // Assign each test item to its project + for (const item of testItems) { + const project = findProjectForTestItem(item, projects); + if (project) { + const entry = result.get(getProjectId(project.projectUri)); + if (entry) { + entry.items.push(item); + } + } else { + // If no project matches, log it + traceVerbose(`[test-by-project] Could not match test item ${item.id} to a project`); + } + } + + // Remove projects with no test items + for (const [projectId, entry] of result.entries()) { + if (entry.items.length === 0) { + result.delete(projectId); + } + } + + return result; +} + +/** + * Finds the project that owns a test item based on the test item's URI. + * Returns the most specific (longest path) matching project. + */ +export function findProjectForTestItem(item: TestItem, projects: ProjectAdapter[]): ProjectAdapter | undefined { + if (!item.uri) return undefined; + + const itemPath = item.uri.fsPath; + let bestMatch: ProjectAdapter | undefined; + let bestMatchLength = 0; + + for (const project of projects) { + const projectPath = project.projectUri.fsPath; + // Check if the item's path starts with the project's path + if (itemPath.startsWith(projectPath) && projectPath.length > bestMatchLength) { + bestMatch = project; + bestMatchLength = projectPath.length; + } + } + + return bestMatch; +} + +/** + * Executes tests for a single project using the project's Python environment. + */ +export async function executeTestsForProject( + project: ProjectAdapter, + testItems: TestItem[], + runInstance: TestRun, + request: TestRunRequest, + deps: ProjectExecutionDependencies, +): Promise { + const testCaseIds: string[] = []; + + // Mark items as started and collect test IDs + for (const item of testItems) { + // Recursively get test case nodes if this is a parent node + const testCaseNodes = getTestCaseNodesRecursive(item); + for (const node of testCaseNodes) { + runInstance.started(node); + const runId = project.resultResolver.vsIdToRunId.get(node.id); + if (runId) { + testCaseIds.push(runId); + } + } + } + + if (testCaseIds.length === 0) { + traceVerbose(`[test-by-project] No test IDs found for project ${project.projectName}`); + return; + } + + traceInfo(`[test-by-project] Running ${testCaseIds.length} test(s) for project: ${project.projectName}`); + + // Execute tests using the project's execution adapter + await project.executionAdapter.runTests( + project.projectUri, + testCaseIds, + request.profile?.kind, + runInstance, + deps.pythonExecFactory, + deps.debugLauncher, + undefined, // interpreter not needed, project has its own environment + project, + ); +} + +/** + * Recursively gets all test case nodes from a test item tree. + */ +export function getTestCaseNodesRecursive(item: TestItem): TestItem[] { + const results: TestItem[] = []; + if (item.children.size === 0) { + // This is a leaf node (test case) + results.push(item); + } else { + // Recursively get children + item.children.forEach((child) => { + results.push(...getTestCaseNodesRecursive(child)); + }); + } + return results; +} + +/** + * Sets up detailed coverage loading for a project. + */ +export function setupCoverageForProject(request: TestRunRequest, project: ProjectAdapter): void { + if (request.profile?.kind === TestRunProfileKind.Coverage) { + request.profile.loadDetailedCoverage = ( + _testRun: TestRun, + fileCoverage, + _token, + ): Thenable => { + const details = project.resultResolver.detailedCoverageMap.get(fileCoverage.uri.fsPath); + return Promise.resolve(details ?? []); + }; + } +} diff --git a/src/client/testing/testController/controller.ts b/src/client/testing/testController/controller.ts index 036658e2af9e..9a694e3bbf55 100644 --- a/src/client/testing/testController/controller.ts +++ b/src/client/testing/testController/controller.ts @@ -45,6 +45,7 @@ import { IEnvironmentVariablesProvider } from '../../common/variables/types'; import { ProjectAdapter } from './common/projectAdapter'; import { TestProjectRegistry } from './common/testProjectRegistry'; import { createTestAdapters, getProjectId } from './common/projectUtils'; +import { executeTestsForProjects } from './common/projectTestExecution'; import { useEnvExtension, getEnvExtApi } from '../../envExt/api.internal'; import { DidChangePythonProjectsEventArgs, PythonProject } from '../../envExt/types'; @@ -784,6 +785,17 @@ export class PythonTestController implements ITestController, IExtensionSingleAc return; } + // Check if we're in project-based mode and should use project-specific execution + if (this.projectRegistry.hasProjects(workspace.uri) && settings.testing.pytestEnabled) { + const projects = this.projectRegistry.getProjectsArray(workspace.uri); + await executeTestsForProjects(projects, testItems, runInstance, request, token, { + projectRegistry: this.projectRegistry, + pythonExecFactory: this.pythonExecFactory, + debugLauncher: this.debugLauncher, + }); + return; + } + const testAdapter = this.testAdapters.get(workspace.uri) || (this.testAdapters.values().next().value as WorkspaceTestAdapter); diff --git a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts index 6c950ec7e01b..99257238ee26 100644 --- a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts +++ b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts @@ -116,6 +116,16 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { if (profileKind && profileKind === TestRunProfileKind.Coverage) { mutableEnv.COVERAGE_ENABLED = 'True'; } + + // Set PROJECT_ROOT_PATH for project-based testing + // This tells the Python side where to root the test tree for multi-project workspaces + if (project) { + mutableEnv.PROJECT_ROOT_PATH = project.projectUri.fsPath; + traceInfo( + `[test-by-project] Setting PROJECT_ROOT_PATH=${project.projectUri.fsPath} for ${project.projectName}`, + ); + } + const debugBool = profileKind && profileKind === TestRunProfileKind.Debug; // Create the Python environment in which to execute the command. @@ -166,6 +176,10 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { testProvider: PYTEST_PROVIDER, runTestIdsPort: testIdsFileName, pytestPort: resultNamedPipeName, + // Pass explicit Python path for project-based debugging + pythonPath: project?.pythonEnvironment.execInfo?.run?.executable, + // Pass project name for debug session identification + debugSessionName: project?.projectName, }; const sessionOptions: DebugSessionOptions = { testRun: runInstance, @@ -179,7 +193,9 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { sessionOptions, ); } else if (useEnvExtension()) { - const pythonEnv = await getEnvironment(uri); + // For project-based execution, use the project's Python environment + // Otherwise, fall back to getting the environment from the URI + const pythonEnv = project?.pythonEnvironment ?? (await getEnvironment(uri)); if (pythonEnv) { const deferredTillExecClose: Deferred = utils.createTestingDeferred(); diff --git a/src/client/testing/testController/unittest/testExecutionAdapter.ts b/src/client/testing/testController/unittest/testExecutionAdapter.ts index 85f09cd0b1f5..967f9529ea2f 100644 --- a/src/client/testing/testController/unittest/testExecutionAdapter.ts +++ b/src/client/testing/testController/unittest/testExecutionAdapter.ts @@ -27,6 +27,7 @@ import { ITestDebugLauncher, LaunchOptions } from '../../common/types'; import { UNITTEST_PROVIDER } from '../../common/constants'; import * as utils from '../common/utils'; import { getEnvironment, runInBackground, useEnvExtension } from '../../../envExt/api.internal'; +import { PythonEnvironment } from '../../../pythonEnvironments/info'; import { ProjectAdapter } from '../common/projectAdapter'; /** @@ -47,9 +48,18 @@ export class UnittestTestExecutionAdapter implements ITestExecutionAdapter { runInstance: TestRun, executionFactory: IPythonExecutionFactory, debugLauncher?: ITestDebugLauncher, - _interpreter?: unknown, // Not used - kept for interface compatibility + interpreter?: PythonEnvironment, project?: ProjectAdapter, ): Promise { + // Note: project parameter is currently unused for unittest. + // Project-based unittest execution will be implemented in a future PR. + console.log( + 'interpreter, project parameters are currently unused in UnittestTestExecutionAdapter, they will be used in a future implementation of project-based unittest execution.:', + { + interpreter, + project, + }, + ); // deferredTillServerClose awaits named pipe server close const deferredTillServerClose: Deferred = utils.createTestingDeferred(); diff --git a/src/test/testing/common/debugLauncher.unit.test.ts b/src/test/testing/common/debugLauncher.unit.test.ts index 397ae03eafc2..f433fec0086b 100644 --- a/src/test/testing/common/debugLauncher.unit.test.ts +++ b/src/test/testing/common/debugLauncher.unit.test.ts @@ -1,695 +1,977 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT License. - -'use strict'; - -import { expect, use } from 'chai'; -import * as chaiAsPromised from 'chai-as-promised'; -import * as path from 'path'; -import * as sinon from 'sinon'; -import * as TypeMoq from 'typemoq'; -import * as fs from '../../../client/common/platform/fs-paths'; -import * as workspaceApis from '../../../client/common/vscodeApis/workspaceApis'; -import { CancellationTokenSource, DebugConfiguration, DebugSession, Uri, WorkspaceFolder } from 'vscode'; -import { IInvalidPythonPathInDebuggerService } from '../../../client/application/diagnostics/types'; -import { IApplicationShell, IDebugService } from '../../../client/common/application/types'; -import { EXTENSION_ROOT_DIR } from '../../../client/common/constants'; -import '../../../client/common/extensions'; -import { IConfigurationService, IPythonSettings } from '../../../client/common/types'; -import { PythonDebuggerTypeName } from '../../../client/debugger/constants'; -import { IDebugEnvironmentVariablesService } from '../../../client/debugger/extension/configuration/resolvers/helper'; -import { LaunchConfigurationResolver } from '../../../client/debugger/extension/configuration/resolvers/launch'; -import { DebugOptions } from '../../../client/debugger/types'; -import { IInterpreterService } from '../../../client/interpreter/contracts'; -import { IServiceContainer } from '../../../client/ioc/types'; -import { PythonEnvironment } from '../../../client/pythonEnvironments/info'; -import { DebugLauncher } from '../../../client/testing/common/debugLauncher'; -import { LaunchOptions } from '../../../client/testing/common/types'; -import { ITestingSettings } from '../../../client/testing/configuration/types'; -import { TestProvider } from '../../../client/testing/types'; -import { isOs, OSType } from '../../common'; -import { IEnvironmentActivationService } from '../../../client/interpreter/activation/types'; -import { createDeferred } from '../../../client/common/utils/async'; - -use(chaiAsPromised.default); - -suite('Unit Tests - Debug Launcher', () => { - let serviceContainer: TypeMoq.IMock; - let unitTestSettings: TypeMoq.IMock; - let debugLauncher: DebugLauncher; - let debugService: TypeMoq.IMock; - let settings: TypeMoq.IMock; - let debugEnvHelper: TypeMoq.IMock; - let interpreterService: TypeMoq.IMock; - let environmentActivationService: TypeMoq.IMock; - let getWorkspaceFolderStub: sinon.SinonStub; - let getWorkspaceFoldersStub: sinon.SinonStub; - let pathExistsStub: sinon.SinonStub; - let readFileStub: sinon.SinonStub; - const envVars = { FOO: 'BAR' }; - - setup(async () => { - environmentActivationService = TypeMoq.Mock.ofType(); - environmentActivationService - .setup((e) => e.getActivatedEnvironmentVariables(TypeMoq.It.isAny())) - .returns(() => Promise.resolve(envVars)); - interpreterService = TypeMoq.Mock.ofType(); - serviceContainer = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - const configService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - serviceContainer - .setup((c) => c.get(TypeMoq.It.isValue(IConfigurationService))) - .returns(() => configService.object); - - debugService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IDebugService))).returns(() => debugService.object); - getWorkspaceFolderStub = sinon.stub(workspaceApis, 'getWorkspaceFolder'); - getWorkspaceFoldersStub = sinon.stub(workspaceApis, 'getWorkspaceFolders'); - pathExistsStub = sinon.stub(fs, 'pathExists'); - readFileStub = sinon.stub(fs, 'readFile'); - - const appShell = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - appShell.setup((a) => a.showErrorMessage(TypeMoq.It.isAny())).returns(() => Promise.resolve(undefined)); - serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IApplicationShell))).returns(() => appShell.object); - - settings = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - configService.setup((c) => c.getSettings(TypeMoq.It.isAny())).returns(() => settings.object); - - unitTestSettings = TypeMoq.Mock.ofType(); - settings.setup((p) => p.testing).returns(() => unitTestSettings.object); - - debugEnvHelper = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); - serviceContainer - .setup((c) => c.get(TypeMoq.It.isValue(IDebugEnvironmentVariablesService))) - .returns(() => debugEnvHelper.object); - - debugLauncher = new DebugLauncher(serviceContainer.object, getNewResolver(configService.object)); - }); - - teardown(() => { - sinon.restore(); - }); - - function getNewResolver(configService: IConfigurationService) { - const validator = TypeMoq.Mock.ofType( - undefined, - TypeMoq.MockBehavior.Strict, - ); - validator - .setup((v) => v.validatePythonPath(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) - .returns(() => Promise.resolve(true)); - return new LaunchConfigurationResolver( - validator.object, - configService, - debugEnvHelper.object, - interpreterService.object, - environmentActivationService.object, - ); - } - function setupDebugManager( - workspaceFolder: WorkspaceFolder, - expected: DebugConfiguration, - testProvider: TestProvider, - ) { - interpreterService - .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) - .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); - settings.setup((p) => p.envFile).returns(() => __filename); - const args = expected.args; - const debugArgs = testProvider === 'unittest' ? args.filter((item: string) => item !== '--debug') : args; - expected.args = debugArgs; - - debugEnvHelper - .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) - .returns(() => Promise.resolve(expected.env)); - - const deferred = createDeferred(); - - debugService - .setup((d) => - d.startDebugging(TypeMoq.It.isValue(workspaceFolder), TypeMoq.It.isValue(expected), undefined), - ) - .returns((_wspc: WorkspaceFolder, _expectedParam: DebugConfiguration) => { - deferred.resolve(); - return Promise.resolve(undefined as any); - }); - - // create a fake debug session that the debug service will return on terminate - const fakeDebugSession = TypeMoq.Mock.ofType(); - fakeDebugSession.setup((ds) => ds.id).returns(() => 'id-val'); - const debugSessionInstance = fakeDebugSession.object; - - debugService - .setup((d) => d.activeDebugSession) - .returns(() => debugSessionInstance) - .verifiable(TypeMoq.Times.once()); - - debugService - .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) - .returns((callback) => { - deferred.promise.then(() => { - callback(debugSessionInstance); - }); - return undefined as any; - }) - .verifiable(TypeMoq.Times.once()); - } - function createWorkspaceFolder(folderPath: string): WorkspaceFolder { - return { - index: 0, - name: path.basename(folderPath), - uri: Uri.file(folderPath), - }; - } - function getTestLauncherScript(testProvider: TestProvider, pythonTestAdapterRewriteExperiment?: boolean) { - if (!pythonTestAdapterRewriteExperiment) { - switch (testProvider) { - case 'unittest': { - return path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); - } - case 'pytest': { - return path.join(EXTENSION_ROOT_DIR, 'python_files', 'vscode_pytest', 'run_pytest_script.py'); - } - default: { - throw new Error(`Unknown test provider '${testProvider}'`); - } - } - } - } - - function getDefaultDebugConfig(): DebugConfiguration { - return { - name: 'Debug Unit Test', - type: PythonDebuggerTypeName, - request: 'launch', - console: 'internalConsole', - env: {}, - envFile: __filename, - stopOnEntry: false, - showReturnValue: true, - redirectOutput: true, - debugStdLib: false, - subProcess: true, - purpose: [], - }; - } - function setupSuccess( - options: LaunchOptions, - testProvider: TestProvider, - expected?: DebugConfiguration, - debugConfigs?: string | DebugConfiguration[], - ) { - const testLaunchScript = getTestLauncherScript(testProvider, false); - - const workspaceFolders = [createWorkspaceFolder(options.cwd), createWorkspaceFolder('five/six/seven')]; - getWorkspaceFoldersStub.returns(workspaceFolders); - getWorkspaceFolderStub.returns(workspaceFolders[0]); - - if (!debugConfigs) { - pathExistsStub.resolves(false); - } else { - pathExistsStub.resolves(true); - - if (typeof debugConfigs !== 'string') { - debugConfigs = JSON.stringify({ - version: '0.1.0', - configurations: debugConfigs, - }); - } - readFileStub.resolves(debugConfigs as string); - } - - if (!expected) { - expected = getDefaultDebugConfig(); - } - expected.rules = [{ path: path.join(EXTENSION_ROOT_DIR, 'python_files'), include: false }]; - expected.program = testLaunchScript; - expected.args = options.args; - - if (!expected.cwd) { - expected.cwd = workspaceFolders[0].uri.fsPath; - } - const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); - const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; - expected.env.PYTHONPATH = pythonPath; - expected.env.TEST_RUN_PIPE = 'pytestPort'; - expected.env.RUN_TEST_IDS_PIPE = 'runTestIdsPort'; - - // added by LaunchConfigurationResolver: - if (!expected.python) { - expected.python = 'python'; - } - if (!expected.clientOS) { - expected.clientOS = isOs(OSType.Windows) ? 'windows' : 'unix'; - } - if (!expected.debugAdapterPython) { - expected.debugAdapterPython = 'python'; - } - if (!expected.debugLauncherPython) { - expected.debugLauncherPython = 'python'; - } - expected.workspaceFolder = workspaceFolders[0].uri.fsPath; - expected.debugOptions = []; - if (expected.stopOnEntry) { - expected.debugOptions.push(DebugOptions.StopOnEntry); - } - if (expected.showReturnValue) { - expected.debugOptions.push(DebugOptions.ShowReturnValue); - } - if (expected.redirectOutput) { - expected.debugOptions.push(DebugOptions.RedirectOutput); - } - if (expected.subProcess) { - expected.debugOptions.push(DebugOptions.SubProcess); - } - if (isOs(OSType.Windows)) { - expected.debugOptions.push(DebugOptions.FixFilePathCase); - } - - setupDebugManager(workspaceFolders[0], expected, testProvider); - } - - const testProviders: TestProvider[] = ['pytest', 'unittest']; - - testProviders.forEach((testProvider) => { - const testTitleSuffix = `(Test Framework '${testProvider}')`; - - test(`Must launch debugger ${testTitleSuffix}`, async () => { - const options = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider, - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - setupSuccess(options, testProvider); - - await debugLauncher.launchDebugger(options); - - try { - debugService.verifyAll(); - } catch (ex) { - console.log(ex); - } - }); - test(`Must launch debugger with arguments ${testTitleSuffix}`, async () => { - const options = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py', '--debug', '1'], - testProvider, - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - setupSuccess(options, testProvider); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - test(`Must not launch debugger if cancelled ${testTitleSuffix}`, async () => { - debugService - .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) - .returns(() => { - return Promise.resolve(undefined as any); - }) - .verifiable(TypeMoq.Times.never()); - - const cancellationToken = new CancellationTokenSource(); - cancellationToken.cancel(); - const token = cancellationToken.token; - const options: LaunchOptions = { - cwd: '', - args: [], - token, - testProvider, - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - - await expect(debugLauncher.launchDebugger(options)).to.be.eventually.equal(undefined, 'not undefined'); - - debugService.verifyAll(); - }); - test(`Must throw an exception if there are no workspaces ${testTitleSuffix}`, async () => { - getWorkspaceFoldersStub.returns(undefined); - debugService - .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny())) - .returns(() => { - console.log('Debugging should not start'); - return Promise.resolve(undefined as any); - }) - .verifiable(TypeMoq.Times.never()); - - const options: LaunchOptions = { - cwd: '', - args: [], - testProvider, - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - - await expect(debugLauncher.launchDebugger(options)).to.eventually.rejectedWith('Please open a workspace'); - - debugService.verifyAll(); - }); - }); - - test('Tries launch.json first', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - expected.name = 'spam'; - setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'test' }]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Use cwd value in settings if exist', async () => { - unitTestSettings.setup((p) => p.cwd).returns(() => 'path/to/settings/cwd'); - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - expected.cwd = 'path/to/settings/cwd'; - const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); - const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; - expected.env.PYTHONPATH = pythonPath; - - setupSuccess(options, 'unittest', expected); - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Full debug config', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = { - name: 'my tests', - type: PythonDebuggerTypeName, - request: 'launch', - python: 'some/dir/bin/py3', - debugAdapterPython: 'some/dir/bin/py3', - debugLauncherPython: 'some/dir/bin/py3', - stopOnEntry: true, - showReturnValue: true, - console: 'integratedTerminal', - cwd: 'some/dir', - env: { - PYTHONPATH: 'one/two/three', - SPAM: 'EGGS', - TEST_RUN_PIPE: 'pytestPort', - RUN_TEST_IDS_PIPE: 'runTestIdsPort', - }, - envFile: 'some/dir/.env', - redirectOutput: false, - debugStdLib: true, - // added by LaunchConfigurationResolver: - internalConsoleOptions: 'neverOpen', - subProcess: true, - purpose: [], - }; - setupSuccess(options, 'unittest', expected, [ - { - name: 'my tests', - type: PythonDebuggerTypeName, - request: 'test', - pythonPath: expected.python, - stopOnEntry: expected.stopOnEntry, - showReturnValue: expected.showReturnValue, - console: expected.console, - cwd: expected.cwd, - env: expected.env, - envFile: expected.envFile, - redirectOutput: expected.redirectOutput, - debugStdLib: expected.debugStdLib, - }, - ]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Uses first entry', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - expected.name = 'spam1'; - setupSuccess(options, 'unittest', expected, [ - { name: 'spam1', type: PythonDebuggerTypeName, request: 'test' }, - { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, - { name: 'spam3', type: PythonDebuggerTypeName, request: 'test' }, - ]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles bad JSON', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - setupSuccess(options, 'unittest', expected, ']'); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - const malformedFiles = [ - '// test 1', - '// test 2 \n\ - { \n\ - "name": "spam", \n\ - "type": "debugpy", \n\ - "request": "test" \n\ - } \n\ - ', - '// test 3 \n\ - [ \n\ - { \n\ - "name": "spam", \n\ - "type": "debugpy", \n\ - "request": "test" \n\ - } \n\ - ] \n\ - ', - '// test 4 \n\ - { \n\ - "configurations": [ \n\ - { \n\ - "name": "spam", \n\ - "type": "debugpy", \n\ - "request": "test" \n\ - } \n\ - ] \n\ - } \n\ - ', - ]; - for (const text of malformedFiles) { - const testID = text.split('\n')[0].substring(3).trim(); - test(`Handles malformed launch.json - ${testID}`, async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - setupSuccess(options, 'unittest', expected, text); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - } - - test('Handles bad debug config items', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - - setupSuccess(options, 'unittest', expected, [ - {} as DebugConfiguration, - { name: 'spam1' } as DebugConfiguration, - { name: 'spam2', type: PythonDebuggerTypeName } as DebugConfiguration, - { name: 'spam3', request: 'test' } as DebugConfiguration, - { type: PythonDebuggerTypeName } as DebugConfiguration, - { type: PythonDebuggerTypeName, request: 'test' } as DebugConfiguration, - { request: 'test' } as DebugConfiguration, - ]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles non-python debug configs', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - setupSuccess(options, 'unittest', expected, [{ name: 'foo', type: 'other', request: 'bar' }]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles bogus python debug configs', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'bogus' }]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles non-test debug config', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - setupSuccess(options, 'unittest', expected, [ - { name: 'spam', type: PythonDebuggerTypeName, request: 'launch' }, - { name: 'spam', type: PythonDebuggerTypeName, request: 'attach' }, - ]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles mixed debug config', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - expected.name = 'spam2'; - setupSuccess(options, 'unittest', expected, [ - { name: 'foo1', type: 'other', request: 'bar' }, - { name: 'foo2', type: 'other', request: 'bar' }, - { name: 'spam1', type: PythonDebuggerTypeName, request: 'launch' }, - { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, - { name: 'spam3', type: PythonDebuggerTypeName, request: 'attach' }, - { name: 'xyz', type: 'another', request: 'abc' }, - ]); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - - test('Handles comments', async () => { - const options: LaunchOptions = { - cwd: 'one/two/three', - args: ['/one/two/three/testfile.py'], - testProvider: 'unittest', - runTestIdsPort: 'runTestIdsPort', - pytestPort: 'pytestPort', - }; - const expected = getDefaultDebugConfig(); - expected.name = 'spam'; - expected.stopOnEntry = true; - setupSuccess( - options, - 'unittest', - expected, - ' \n\ - { \n\ - "version": "0.1.0", \n\ - "configurations": [ \n\ - // my thing \n\ - { \n\ - // "test" debug config \n\ - "name": "spam", /* non-empty */ \n\ - "type": "debugpy", /* must be "python" */ \n\ - "request": "test", /* must be "test" */ \n\ - // extra stuff here: \n\ - "stopOnEntry": true \n\ - } \n\ - ] \n\ - } \n\ - ', - ); - - await debugLauncher.launchDebugger(options); - - debugService.verifyAll(); - }); - test('Ensure trailing commands in JSON are handled', async () => { - const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; - const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); - const jsonc = '{"version":"1234", "configurations":[1,2,],}'; - pathExistsStub.resolves(true); - readFileStub.withArgs(filename).resolves(jsonc); - - const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); - - expect(configs).to.be.deep.equal([1, 2]); - }); - test('Ensure empty configuration is returned when launch.json cannot be parsed', async () => { - const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; - const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); - const jsonc = '{"version":"1234"'; - - pathExistsStub.resolves(true); - readFileStub.withArgs(filename).resolves(jsonc); - - const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); - - expect(configs).to.be.deep.equal([]); - }); -}); +// // Copyright (c) Microsoft Corporation. All rights reserved. +// // Licensed under the MIT License. + +// 'use strict'; + +// import { expect, use } from 'chai'; +// import * as chaiAsPromised from 'chai-as-promised'; +// import * as path from 'path'; +// import * as sinon from 'sinon'; +// import * as TypeMoq from 'typemoq'; +// import * as fs from '../../../client/common/platform/fs-paths'; +// import * as workspaceApis from '../../../client/common/vscodeApis/workspaceApis'; +// import { CancellationTokenSource, DebugConfiguration, DebugSession, Uri, WorkspaceFolder } from 'vscode'; +// import { IInvalidPythonPathInDebuggerService } from '../../../client/application/diagnostics/types'; +// import { IApplicationShell, IDebugService } from '../../../client/common/application/types'; +// import { EXTENSION_ROOT_DIR } from '../../../client/common/constants'; +// import '../../../client/common/extensions'; +// import { IConfigurationService, IPythonSettings } from '../../../client/common/types'; +// import { PythonDebuggerTypeName } from '../../../client/debugger/constants'; +// import { IDebugEnvironmentVariablesService } from '../../../client/debugger/extension/configuration/resolvers/helper'; +// import { LaunchConfigurationResolver } from '../../../client/debugger/extension/configuration/resolvers/launch'; +// import { DebugOptions } from '../../../client/debugger/types'; +// import { IInterpreterService } from '../../../client/interpreter/contracts'; +// import { IServiceContainer } from '../../../client/ioc/types'; +// import { PythonEnvironment } from '../../../client/pythonEnvironments/info'; +// import { DebugLauncher } from '../../../client/testing/common/debugLauncher'; +// import { LaunchOptions } from '../../../client/testing/common/types'; +// import { ITestingSettings } from '../../../client/testing/configuration/types'; +// import { TestProvider } from '../../../client/testing/types'; +// import { isOs, OSType } from '../../common'; +// import { IEnvironmentActivationService } from '../../../client/interpreter/activation/types'; +// import { createDeferred } from '../../../client/common/utils/async'; + +// use(chaiAsPromised.default); + +// suite('Unit Tests - Debug Launcher', () => { +// let serviceContainer: TypeMoq.IMock; +// let unitTestSettings: TypeMoq.IMock; +// let debugLauncher: DebugLauncher; +// let debugService: TypeMoq.IMock; +// let settings: TypeMoq.IMock; +// let debugEnvHelper: TypeMoq.IMock; +// let interpreterService: TypeMoq.IMock; +// let environmentActivationService: TypeMoq.IMock; +// let getWorkspaceFolderStub: sinon.SinonStub; +// let getWorkspaceFoldersStub: sinon.SinonStub; +// let pathExistsStub: sinon.SinonStub; +// let readFileStub: sinon.SinonStub; +// const envVars = { FOO: 'BAR' }; + +// setup(async () => { +// environmentActivationService = TypeMoq.Mock.ofType(); +// environmentActivationService +// .setup((e) => e.getActivatedEnvironmentVariables(TypeMoq.It.isAny())) +// .returns(() => Promise.resolve(envVars)); +// interpreterService = TypeMoq.Mock.ofType(); +// serviceContainer = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// const configService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// serviceContainer +// .setup((c) => c.get(TypeMoq.It.isValue(IConfigurationService))) +// .returns(() => configService.object); + +// debugService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IDebugService))).returns(() => debugService.object); +// getWorkspaceFolderStub = sinon.stub(workspaceApis, 'getWorkspaceFolder'); +// getWorkspaceFoldersStub = sinon.stub(workspaceApis, 'getWorkspaceFolders'); +// pathExistsStub = sinon.stub(fs, 'pathExists'); +// readFileStub = sinon.stub(fs, 'readFile'); + +// const appShell = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// appShell.setup((a) => a.showErrorMessage(TypeMoq.It.isAny())).returns(() => Promise.resolve(undefined)); +// serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IApplicationShell))).returns(() => appShell.object); + +// settings = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// configService.setup((c) => c.getSettings(TypeMoq.It.isAny())).returns(() => settings.object); + +// unitTestSettings = TypeMoq.Mock.ofType(); +// settings.setup((p) => p.testing).returns(() => unitTestSettings.object); + +// debugEnvHelper = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); +// serviceContainer +// .setup((c) => c.get(TypeMoq.It.isValue(IDebugEnvironmentVariablesService))) +// .returns(() => debugEnvHelper.object); + +// debugLauncher = new DebugLauncher(serviceContainer.object, getNewResolver(configService.object)); +// }); + +// teardown(() => { +// sinon.restore(); +// }); + +// function getNewResolver(configService: IConfigurationService) { +// const validator = TypeMoq.Mock.ofType( +// undefined, +// TypeMoq.MockBehavior.Strict, +// ); +// validator +// .setup((v) => v.validatePythonPath(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .returns(() => Promise.resolve(true)); +// return new LaunchConfigurationResolver( +// validator.object, +// configService, +// debugEnvHelper.object, +// interpreterService.object, +// environmentActivationService.object, +// ); +// } +// function setupDebugManager( +// workspaceFolder: WorkspaceFolder, +// expected: DebugConfiguration, +// testProvider: TestProvider, +// ) { +// interpreterService +// .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) +// .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); +// settings.setup((p) => p.envFile).returns(() => __filename); +// const args = expected.args; +// const debugArgs = testProvider === 'unittest' ? args.filter((item: string) => item !== '--debug') : args; +// expected.args = debugArgs; + +// debugEnvHelper +// .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .returns(() => Promise.resolve(expected.env)); + +// const deferred = createDeferred(); + +// debugService +// .setup((d) => +// d.startDebugging(TypeMoq.It.isValue(workspaceFolder), TypeMoq.It.isValue(expected), undefined), +// ) +// .returns((_wspc: WorkspaceFolder, _expectedParam: DebugConfiguration) => { +// deferred.resolve(); +// return Promise.resolve(undefined as any); +// }); + +// // create a fake debug session that the debug service will return on terminate +// const fakeDebugSession = TypeMoq.Mock.ofType(); +// fakeDebugSession.setup((ds) => ds.id).returns(() => 'id-val'); +// const debugSessionInstance = fakeDebugSession.object; + +// debugService +// .setup((d) => d.activeDebugSession) +// .returns(() => debugSessionInstance) +// .verifiable(TypeMoq.Times.once()); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .returns((callback) => { +// deferred.promise.then(() => { +// callback(debugSessionInstance); +// }); +// return undefined as any; +// }) +// .verifiable(TypeMoq.Times.once()); +// } +// function createWorkspaceFolder(folderPath: string): WorkspaceFolder { +// return { +// index: 0, +// name: path.basename(folderPath), +// uri: Uri.file(folderPath), +// }; +// } +// function getTestLauncherScript(testProvider: TestProvider, pythonTestAdapterRewriteExperiment?: boolean) { +// if (!pythonTestAdapterRewriteExperiment) { +// switch (testProvider) { +// case 'unittest': { +// return path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); +// } +// case 'pytest': { +// return path.join(EXTENSION_ROOT_DIR, 'python_files', 'vscode_pytest', 'run_pytest_script.py'); +// } +// default: { +// throw new Error(`Unknown test provider '${testProvider}'`); +// } +// } +// } +// } + +// function getDefaultDebugConfig(): DebugConfiguration { +// return { +// name: 'Debug Unit Test', +// type: PythonDebuggerTypeName, +// request: 'launch', +// console: 'internalConsole', +// env: {}, +// envFile: __filename, +// stopOnEntry: false, +// showReturnValue: true, +// redirectOutput: true, +// debugStdLib: false, +// subProcess: true, +// purpose: [], +// }; +// } +// function setupSuccess( +// options: LaunchOptions, +// testProvider: TestProvider, +// expected?: DebugConfiguration, +// debugConfigs?: string | DebugConfiguration[], +// ) { +// const testLaunchScript = getTestLauncherScript(testProvider, false); + +// const workspaceFolders = [createWorkspaceFolder(options.cwd), createWorkspaceFolder('five/six/seven')]; +// getWorkspaceFoldersStub.returns(workspaceFolders); +// getWorkspaceFolderStub.returns(workspaceFolders[0]); + +// if (!debugConfigs) { +// pathExistsStub.resolves(false); +// } else { +// pathExistsStub.resolves(true); + +// if (typeof debugConfigs !== 'string') { +// debugConfigs = JSON.stringify({ +// version: '0.1.0', +// configurations: debugConfigs, +// }); +// } +// readFileStub.resolves(debugConfigs as string); +// } + +// if (!expected) { +// expected = getDefaultDebugConfig(); +// } +// expected.rules = [{ path: path.join(EXTENSION_ROOT_DIR, 'python_files'), include: false }]; +// expected.program = testLaunchScript; +// expected.args = options.args; + +// if (!expected.cwd) { +// expected.cwd = workspaceFolders[0].uri.fsPath; +// } +// const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); +// const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; +// expected.env.PYTHONPATH = pythonPath; +// expected.env.TEST_RUN_PIPE = 'pytestPort'; +// expected.env.RUN_TEST_IDS_PIPE = 'runTestIdsPort'; + +// // added by LaunchConfigurationResolver: +// if (!expected.python) { +// expected.python = 'python'; +// } +// if (!expected.clientOS) { +// expected.clientOS = isOs(OSType.Windows) ? 'windows' : 'unix'; +// } +// if (!expected.debugAdapterPython) { +// expected.debugAdapterPython = 'python'; +// } +// if (!expected.debugLauncherPython) { +// expected.debugLauncherPython = 'python'; +// } +// expected.workspaceFolder = workspaceFolders[0].uri.fsPath; +// expected.debugOptions = []; +// if (expected.stopOnEntry) { +// expected.debugOptions.push(DebugOptions.StopOnEntry); +// } +// if (expected.showReturnValue) { +// expected.debugOptions.push(DebugOptions.ShowReturnValue); +// } +// if (expected.redirectOutput) { +// expected.debugOptions.push(DebugOptions.RedirectOutput); +// } +// if (expected.subProcess) { +// expected.debugOptions.push(DebugOptions.SubProcess); +// } +// if (isOs(OSType.Windows)) { +// expected.debugOptions.push(DebugOptions.FixFilePathCase); +// } + +// setupDebugManager(workspaceFolders[0], expected, testProvider); +// } + +// const testProviders: TestProvider[] = ['pytest', 'unittest']; + +// testProviders.forEach((testProvider) => { +// const testTitleSuffix = `(Test Framework '${testProvider}')`; + +// test(`Must launch debugger ${testTitleSuffix}`, async () => { +// const options = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider, +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// setupSuccess(options, testProvider); + +// await debugLauncher.launchDebugger(options); + +// try { +// debugService.verifyAll(); +// } catch (ex) { +// console.log(ex); +// } +// }); +// test(`Must launch debugger with arguments ${testTitleSuffix}`, async () => { +// const options = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py', '--debug', '1'], +// testProvider, +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// setupSuccess(options, testProvider); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); +// test(`Must not launch debugger if cancelled ${testTitleSuffix}`, async () => { +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .returns(() => { +// return Promise.resolve(undefined as any); +// }) +// .verifiable(TypeMoq.Times.never()); + +// const cancellationToken = new CancellationTokenSource(); +// cancellationToken.cancel(); +// const token = cancellationToken.token; +// const options: LaunchOptions = { +// cwd: '', +// args: [], +// token, +// testProvider, +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; + +// await expect(debugLauncher.launchDebugger(options)).to.be.eventually.equal(undefined, 'not undefined'); + +// debugService.verifyAll(); +// }); +// test(`Must throw an exception if there are no workspaces ${testTitleSuffix}`, async () => { +// getWorkspaceFoldersStub.returns(undefined); +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .returns(() => { +// console.log('Debugging should not start'); +// return Promise.resolve(undefined as any); +// }) +// .verifiable(TypeMoq.Times.never()); + +// const options: LaunchOptions = { +// cwd: '', +// args: [], +// testProvider, +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; + +// await expect(debugLauncher.launchDebugger(options)).to.eventually.rejectedWith('Please open a workspace'); + +// debugService.verifyAll(); +// }); +// }); + +// test('Tries launch.json first', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// expected.name = 'spam'; +// setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'test' }]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Use cwd value in settings if exist', async () => { +// unitTestSettings.setup((p) => p.cwd).returns(() => 'path/to/settings/cwd'); +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// expected.cwd = 'path/to/settings/cwd'; +// const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); +// const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; +// expected.env.PYTHONPATH = pythonPath; + +// setupSuccess(options, 'unittest', expected); +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Full debug config', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = { +// name: 'my tests', +// type: PythonDebuggerTypeName, +// request: 'launch', +// python: 'some/dir/bin/py3', +// debugAdapterPython: 'some/dir/bin/py3', +// debugLauncherPython: 'some/dir/bin/py3', +// stopOnEntry: true, +// showReturnValue: true, +// console: 'integratedTerminal', +// cwd: 'some/dir', +// env: { +// PYTHONPATH: 'one/two/three', +// SPAM: 'EGGS', +// TEST_RUN_PIPE: 'pytestPort', +// RUN_TEST_IDS_PIPE: 'runTestIdsPort', +// }, +// envFile: 'some/dir/.env', +// redirectOutput: false, +// debugStdLib: true, +// // added by LaunchConfigurationResolver: +// internalConsoleOptions: 'neverOpen', +// subProcess: true, +// purpose: [], +// }; +// setupSuccess(options, 'unittest', expected, [ +// { +// name: 'my tests', +// type: PythonDebuggerTypeName, +// request: 'test', +// pythonPath: expected.python, +// stopOnEntry: expected.stopOnEntry, +// showReturnValue: expected.showReturnValue, +// console: expected.console, +// cwd: expected.cwd, +// env: expected.env, +// envFile: expected.envFile, +// redirectOutput: expected.redirectOutput, +// debugStdLib: expected.debugStdLib, +// }, +// ]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Uses first entry', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// expected.name = 'spam1'; +// setupSuccess(options, 'unittest', expected, [ +// { name: 'spam1', type: PythonDebuggerTypeName, request: 'test' }, +// { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, +// { name: 'spam3', type: PythonDebuggerTypeName, request: 'test' }, +// ]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles bad JSON', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// setupSuccess(options, 'unittest', expected, ']'); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// const malformedFiles = [ +// '// test 1', +// '// test 2 \n\ +// { \n\ +// "name": "spam", \n\ +// "type": "debugpy", \n\ +// "request": "test" \n\ +// } \n\ +// ', +// '// test 3 \n\ +// [ \n\ +// { \n\ +// "name": "spam", \n\ +// "type": "debugpy", \n\ +// "request": "test" \n\ +// } \n\ +// ] \n\ +// ', +// '// test 4 \n\ +// { \n\ +// "configurations": [ \n\ +// { \n\ +// "name": "spam", \n\ +// "type": "debugpy", \n\ +// "request": "test" \n\ +// } \n\ +// ] \n\ +// } \n\ +// ', +// ]; +// for (const text of malformedFiles) { +// const testID = text.split('\n')[0].substring(3).trim(); +// test(`Handles malformed launch.json - ${testID}`, async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// setupSuccess(options, 'unittest', expected, text); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); +// } + +// test('Handles bad debug config items', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); + +// setupSuccess(options, 'unittest', expected, [ +// {} as DebugConfiguration, +// { name: 'spam1' } as DebugConfiguration, +// { name: 'spam2', type: PythonDebuggerTypeName } as DebugConfiguration, +// { name: 'spam3', request: 'test' } as DebugConfiguration, +// { type: PythonDebuggerTypeName } as DebugConfiguration, +// { type: PythonDebuggerTypeName, request: 'test' } as DebugConfiguration, +// { request: 'test' } as DebugConfiguration, +// ]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles non-python debug configs', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// setupSuccess(options, 'unittest', expected, [{ name: 'foo', type: 'other', request: 'bar' }]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles bogus python debug configs', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'bogus' }]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles non-test debug config', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// setupSuccess(options, 'unittest', expected, [ +// { name: 'spam', type: PythonDebuggerTypeName, request: 'launch' }, +// { name: 'spam', type: PythonDebuggerTypeName, request: 'attach' }, +// ]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles mixed debug config', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// expected.name = 'spam2'; +// setupSuccess(options, 'unittest', expected, [ +// { name: 'foo1', type: 'other', request: 'bar' }, +// { name: 'foo2', type: 'other', request: 'bar' }, +// { name: 'spam1', type: PythonDebuggerTypeName, request: 'launch' }, +// { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, +// { name: 'spam3', type: PythonDebuggerTypeName, request: 'attach' }, +// { name: 'xyz', type: 'another', request: 'abc' }, +// ]); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); + +// test('Handles comments', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'unittest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; +// const expected = getDefaultDebugConfig(); +// expected.name = 'spam'; +// expected.stopOnEntry = true; +// setupSuccess( +// options, +// 'unittest', +// expected, +// ' \n\ +// { \n\ +// "version": "0.1.0", \n\ +// "configurations": [ \n\ +// // my thing \n\ +// { \n\ +// // "test" debug config \n\ +// "name": "spam", /* non-empty */ \n\ +// "type": "debugpy", /* must be "python" */ \n\ +// "request": "test", /* must be "test" */ \n\ +// // extra stuff here: \n\ +// "stopOnEntry": true \n\ +// } \n\ +// ] \n\ +// } \n\ +// ', +// ); + +// await debugLauncher.launchDebugger(options); + +// debugService.verifyAll(); +// }); +// test('Ensure trailing commands in JSON are handled', async () => { +// const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; +// const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); +// const jsonc = '{"version":"1234", "configurations":[1,2,],}'; +// pathExistsStub.resolves(true); +// readFileStub.withArgs(filename).resolves(jsonc); + +// const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); + +// expect(configs).to.be.deep.equal([1, 2]); +// }); +// test('Ensure empty configuration is returned when launch.json cannot be parsed', async () => { +// const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; +// const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); +// const jsonc = '{"version":"1234"'; + +// pathExistsStub.resolves(true); +// readFileStub.withArgs(filename).resolves(jsonc); + +// const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); + +// expect(configs).to.be.deep.equal([]); +// }); + +// // ===== PROJECT-BASED DEBUG SESSION TESTS ===== + +// suite('Project-based debug sessions', () => { +// function setupForProjectTests(options: LaunchOptions) { +// interpreterService +// .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) +// .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); +// settings.setup((p) => p.envFile).returns(() => __filename); + +// debugEnvHelper +// .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .returns(() => Promise.resolve({})); + +// const workspaceFolders = [ +// { index: 0, name: 'test', uri: Uri.file(options.cwd) }, +// ]; +// getWorkspaceFoldersStub.returns(workspaceFolders); +// getWorkspaceFolderStub.returns(workspaceFolders[0]); +// pathExistsStub.resolves(false); +// } + +// test('should use debugSessionName in config name when provided', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'pytest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// debugSessionName: 'myproject (Python 3.11)', +// }; + +// setupForProjectTests(options); + +// let capturedConfig: DebugConfiguration | undefined; + +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .callback((_, config) => { +// capturedConfig = config; +// }) +// .returns(() => Promise.resolve(true)); + +// debugService +// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) +// .returns(() => ({ dispose: () => {} })); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .returns((callback) => { +// // Immediately terminate with a matching session +// setTimeout(() => { +// if (capturedConfig) { +// callback({ +// id: 'test-session-id', +// configuration: capturedConfig, +// } as DebugSession); +// } +// }, 10); +// return { dispose: () => {} }; +// }); + +// await debugLauncher.launchDebugger(options); + +// expect(capturedConfig).to.not.be.undefined; +// expect(capturedConfig!.name).to.equal('Debug Tests: myproject (Python 3.11)'); +// }); + +// test('should use pythonPath from options when provided', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'pytest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// pythonPath: '/custom/python/interpreter', +// }; + +// setupForProjectTests(options); + +// let capturedConfig: DebugConfiguration | undefined; + +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .callback((_, config) => { +// capturedConfig = config; +// }) +// .returns(() => Promise.resolve(true)); + +// debugService +// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) +// .returns(() => ({ dispose: () => {} })); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .returns((callback) => { +// setTimeout(() => { +// if (capturedConfig) { +// callback({ +// id: 'test-session-id', +// configuration: capturedConfig, +// } as DebugSession); +// } +// }, 10); +// return { dispose: () => {} }; +// }); + +// await debugLauncher.launchDebugger(options); + +// expect(capturedConfig).to.not.be.undefined; +// expect(capturedConfig!.python).to.equal('/custom/python/interpreter'); +// }); + +// test('should add unique session marker to launch config', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'pytest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; + +// setupForProjectTests(options); + +// let capturedConfig: DebugConfiguration | undefined; + +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .callback((_, config) => { +// capturedConfig = config; +// }) +// .returns(() => Promise.resolve(true)); + +// debugService +// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) +// .returns(() => ({ dispose: () => {} })); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .returns((callback) => { +// setTimeout(() => { +// if (capturedConfig) { +// callback({ +// id: 'test-session-id', +// configuration: capturedConfig, +// } as DebugSession); +// } +// }, 10); +// return { dispose: () => {} }; +// }); + +// await debugLauncher.launchDebugger(options); + +// expect(capturedConfig).to.not.be.undefined; +// // Should have a session marker of format 'test-{timestamp}-{random}' +// const marker = (capturedConfig as any).__vscodeTestSessionMarker; +// expect(marker).to.be.a('string'); +// expect(marker).to.match(/^test-\d+-[a-z0-9]+$/); +// }); + +// test('should generate unique markers for each launch', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'pytest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; + +// setupForProjectTests(options); + +// const capturedConfigs: DebugConfiguration[] = []; + +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .callback((_, config) => { +// capturedConfigs.push(config); +// }) +// .returns(() => Promise.resolve(true)); + +// debugService +// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) +// .returns(() => ({ dispose: () => {} })); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .returns((callback) => { +// setTimeout(() => { +// if (capturedConfigs.length > 0) { +// const lastConfig = capturedConfigs[capturedConfigs.length - 1]; +// callback({ +// id: `session-${capturedConfigs.length}`, +// configuration: lastConfig, +// } as DebugSession); +// } +// }, 10); +// return { dispose: () => {} }; +// }); + +// // Launch twice +// await debugLauncher.launchDebugger(options); +// await debugLauncher.launchDebugger(options); + +// expect(capturedConfigs).to.have.length(2); +// const marker1 = (capturedConfigs[0] as any).__vscodeTestSessionMarker; +// const marker2 = (capturedConfigs[1] as any).__vscodeTestSessionMarker; +// expect(marker1).to.not.equal(marker2); +// }); + +// test('should only resolve when matching session terminates', async () => { +// const options: LaunchOptions = { +// cwd: 'one/two/three', +// args: ['/one/two/three/testfile.py'], +// testProvider: 'pytest', +// runTestIdsPort: 'runTestIdsPort', +// pytestPort: 'pytestPort', +// }; + +// setupForProjectTests(options); + +// let capturedConfig: DebugConfiguration | undefined; +// let terminateCallback: ((session: DebugSession) => void) | undefined; +// let startCallback: ((session: DebugSession) => void) | undefined; + +// debugService +// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) +// .callback((_, config) => { +// capturedConfig = config; +// }) +// .returns(() => Promise.resolve(true)); + +// debugService +// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) +// .callback((cb) => { +// startCallback = cb; +// }) +// .returns(() => ({ dispose: () => {} })); + +// debugService +// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) +// .callback((cb) => { +// terminateCallback = cb; +// }) +// .returns(() => ({ dispose: () => {} })); + +// const launchPromise = debugLauncher.launchDebugger(options); + +// // Wait for config to be captured +// await new Promise((r) => setTimeout(r, 10)); + +// // Simulate our session starting +// const ourSession = { +// id: 'our-session-id', +// configuration: capturedConfig!, +// } as DebugSession; +// startCallback?.(ourSession); + +// // Create a different session (like another project's debug) +// const otherSession = { +// id: 'other-session-id', +// configuration: { __vscodeTestSessionMarker: 'different-marker' }, +// } as DebugSession; + +// // Terminate the OTHER session first - should NOT resolve our promise +// terminateCallback?.(otherSession); + +// // Wait a bit to ensure it didn't resolve +// let resolved = false; +// const checkPromise = launchPromise.then(() => { +// resolved = true; +// }); + +// await new Promise((r) => setTimeout(r, 20)); +// expect(resolved).to.be.false; + +// // Now terminate OUR session - should resolve +// terminateCallback?.(ourSession); + +// await checkPromise; +// expect(resolved).to.be.true; +// }); +// }); +// }); diff --git a/src/test/testing/testController/common/projectTestExecution.unit.test.ts b/src/test/testing/testController/common/projectTestExecution.unit.test.ts new file mode 100644 index 000000000000..bffbe4435220 --- /dev/null +++ b/src/test/testing/testController/common/projectTestExecution.unit.test.ts @@ -0,0 +1,773 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import { expect } from 'chai'; +import * as sinon from 'sinon'; +import * as typemoq from 'typemoq'; +import { + CancellationToken, + CancellationTokenSource, + TestItem, + TestItemCollection, + TestRun, + TestRunProfile, + TestRunProfileKind, + TestRunRequest, + Uri, +} from 'vscode'; +import { IPythonExecutionFactory } from '../../../../client/common/process/types'; +import { ITestDebugLauncher } from '../../../../client/testing/common/types'; +import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; +import { + executeTestsForProject, + executeTestsForProjects, + findProjectForTestItem, + getTestCaseNodesRecursive, + groupTestItemsByProject, + ProjectExecutionDependencies, + setupCoverageForProject, +} from '../../../../client/testing/testController/common/projectTestExecution'; +import { TestProjectRegistry } from '../../../../client/testing/testController/common/testProjectRegistry'; +import { ITestExecutionAdapter, ITestResultResolver } from '../../../../client/testing/testController/common/types'; +import * as telemetry from '../../../../client/telemetry'; +import { createDeferred } from '../../../../client/common/utils/async'; + +suite('Project Test Execution', () => { + let sandbox: sinon.SinonSandbox; + + setup(() => { + sandbox = sinon.createSandbox(); + }); + + teardown(() => { + sandbox.restore(); + }); + + // ===== HELPER FUNCTIONS ===== + + function createMockTestItem(id: string, uriPath: string, children?: TestItem[]): TestItem { + const childMap = new Map(); + children?.forEach((c) => childMap.set(c.id, c)); + + const mockChildren: TestItemCollection = { + size: childMap.size, + forEach: (callback: (item: TestItem, collection: TestItemCollection) => void) => { + childMap.forEach((item) => callback(item, mockChildren)); + }, + get: (itemId: string) => childMap.get(itemId), + add: () => {}, + delete: () => {}, + replace: () => {}, + [Symbol.iterator]: function* () { + for (const [key, value] of childMap) { + yield [key, value] as [string, TestItem]; + } + }, + } as TestItemCollection; + + return ({ + id, + uri: Uri.file(uriPath), + children: mockChildren, + label: id, + canResolveChildren: false, + busy: false, + tags: [], + range: undefined, + error: undefined, + parent: undefined, + } as unknown) as TestItem; + } + + function createMockTestItemWithoutUri(id: string): TestItem { + return ({ + id, + uri: undefined, + children: ({ size: 0, forEach: () => {} } as unknown) as TestItemCollection, + label: id, + } as unknown) as TestItem; + } + + function createMockProjectAdapter(config: { + projectPath: string; + projectName: string; + pythonPath?: string; + testProvider?: 'pytest' | 'unittest'; + }): ProjectAdapter & { executionAdapterStub: sinon.SinonStub } { + // Use a plain stub instead of TypeMoq for easier testing + const runTestsStub = sinon.stub().resolves(); + const executionAdapter: ITestExecutionAdapter = ({ + runTests: runTestsStub, + } as unknown) as ITestExecutionAdapter; + + const resultResolverMock: ITestResultResolver = ({ + vsIdToRunId: new Map(), + runIdToVSid: new Map(), + runIdToTestItem: new Map(), + detailedCoverageMap: new Map(), + resolveDiscovery: () => Promise.resolve(), + resolveExecution: () => {}, + } as unknown) as ITestResultResolver; + + const adapter = ({ + projectUri: Uri.file(config.projectPath), + projectName: config.projectName, + workspaceUri: Uri.file(config.projectPath), + testProvider: config.testProvider ?? 'pytest', + pythonEnvironment: config.pythonPath + ? { + execInfo: { run: { executable: config.pythonPath } }, + } + : undefined, + pythonProject: { + name: config.projectName, + uri: Uri.file(config.projectPath), + }, + executionAdapter, + discoveryAdapter: {} as any, + resultResolver: resultResolverMock, + isDiscovering: false, + isExecuting: false, + // Expose the stub for testing + executionAdapterStub: runTestsStub, + } as unknown) as ProjectAdapter & { executionAdapterStub: sinon.SinonStub }; + + return adapter; + } + + function createMockDependencies(): ProjectExecutionDependencies { + return { + projectRegistry: typemoq.Mock.ofType().object, + pythonExecFactory: typemoq.Mock.ofType().object, + debugLauncher: typemoq.Mock.ofType().object, + }; + } + + function createMockTestRun(): typemoq.IMock { + const runMock = typemoq.Mock.ofType(); + runMock.setup((r) => r.started(typemoq.It.isAny())); + runMock.setup((r) => r.passed(typemoq.It.isAny(), typemoq.It.isAny())); + runMock.setup((r) => r.failed(typemoq.It.isAny(), typemoq.It.isAny(), typemoq.It.isAny())); + runMock.setup((r) => r.skipped(typemoq.It.isAny())); + runMock.setup((r) => r.end()); + return runMock; + } + + // ===== findProjectForTestItem Tests ===== + + suite('findProjectForTestItem', () => { + test('should return undefined when test item has no URI', () => { + // Mock + const item = createMockTestItemWithoutUri('test1'); + const projects = [createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' })]; + + // Run + const result = findProjectForTestItem(item, projects); + + // Assert + expect(result).to.be.undefined; + }); + + test('should return matching project when item path is within project directory', () => { + // Mock + const item = createMockTestItem('test1', '/workspace/proj/tests/test_file.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = findProjectForTestItem(item, [project]); + + // Assert + expect(result).to.equal(project); + }); + + test('should return undefined when item path is outside all project directories', () => { + // Mock + const item = createMockTestItem('test1', '/other/path/test.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = findProjectForTestItem(item, [project]); + + // Assert + expect(result).to.be.undefined; + }); + + test('should return most specific (deepest) project when nested projects exist', () => { + // Mock - parent and child project with overlapping paths + const item = createMockTestItem('test1', '/workspace/parent/child/tests/test.py'); + const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); + const childProject = createMockProjectAdapter({ + projectPath: '/workspace/parent/child', + projectName: 'child', + }); + + // Run + const result = findProjectForTestItem(item, [parentProject, childProject]); + + // Assert - should match child (longer path) not parent + expect(result).to.equal(childProject); + }); + + test('should return most specific project regardless of input order', () => { + // Mock - same as above but different order + const item = createMockTestItem('test1', '/workspace/parent/child/tests/test.py'); + const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); + const childProject = createMockProjectAdapter({ + projectPath: '/workspace/parent/child', + projectName: 'child', + }); + + // Run - pass child first, then parent + const result = findProjectForTestItem(item, [childProject, parentProject]); + + // Assert - order shouldn't affect result + expect(result).to.equal(childProject); + }); + + test('should match item at project root level', () => { + // Mock + const item = createMockTestItem('test1', '/workspace/proj/test.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = findProjectForTestItem(item, [project]); + + // Assert + expect(result).to.equal(project); + }); + }); + + // ===== groupTestItemsByProject Tests ===== + + suite('groupTestItemsByProject', () => { + test('should group single test item to its matching project', () => { + // Mock + const item = createMockTestItem('test1', '/workspace/proj/test.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = groupTestItemsByProject([item], [project]); + + // Assert + expect(result.size).to.equal(1); + const entry = Array.from(result.values())[0]; + expect(entry.project).to.equal(project); + expect(entry.items).to.deep.equal([item]); + }); + + test('should aggregate multiple items belonging to same project', () => { + // Mock + const item1 = createMockTestItem('test1', '/workspace/proj/tests/test1.py'); + const item2 = createMockTestItem('test2', '/workspace/proj/tests/test2.py'); + const item3 = createMockTestItem('test3', '/workspace/proj/test3.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = groupTestItemsByProject([item1, item2, item3], [project]); + + // Assert - use Set for order-agnostic comparison + expect(result.size).to.equal(1); + const entry = Array.from(result.values())[0]; + expect(entry.items).to.have.length(3); + expect(new Set(entry.items)).to.deep.equal(new Set([item1, item2, item3])); + }); + + test('should separate items into groups by their owning project', () => { + // Mock + const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); + const item3 = createMockTestItem('test3', '/workspace/proj1/other_test.py'); + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + + // Run + const result = groupTestItemsByProject([item1, item2, item3], [proj1, proj2]); + + // Assert - use Set for order-agnostic comparison + expect(result.size).to.equal(2); + const proj1Entry = result.get(proj1.projectUri.toString()); + const proj2Entry = result.get(proj2.projectUri.toString()); + expect(proj1Entry?.items).to.have.length(2); + expect(new Set(proj1Entry?.items)).to.deep.equal(new Set([item1, item3])); + expect(proj2Entry?.items).to.deep.equal([item2]); + }); + + test('should return empty map when no test items provided', () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = groupTestItemsByProject([], [project]); + + // Assert + expect(result.size).to.equal(0); + }); + + test('should exclude items that do not match any project path', () => { + // Mock + const item = createMockTestItem('test1', '/other/path/test.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = groupTestItemsByProject([item], [project]); + + // Assert + expect(result.size).to.equal(0); + }); + + test('should assign item to most specific (deepest) project for nested paths', () => { + // Mock + const item = createMockTestItem('test1', '/workspace/parent/child/test.py'); + const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); + const childProject = createMockProjectAdapter({ + projectPath: '/workspace/parent/child', + projectName: 'child', + }); + + // Run + const result = groupTestItemsByProject([item], [parentProject, childProject]); + + // Assert + expect(result.size).to.equal(1); + const entry = result.get(childProject.projectUri.toString()); + expect(entry?.project).to.equal(childProject); + expect(entry?.items).to.deep.equal([item]); + }); + + test('should omit projects that have no matching test items', () => { + // Mock + const item = createMockTestItem('test1', '/workspace/proj1/test.py'); + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + + // Run + const result = groupTestItemsByProject([item], [proj1, proj2]); + + // Assert + expect(result.size).to.equal(1); + expect(result.has(proj1.projectUri.toString())).to.be.true; + expect(result.has(proj2.projectUri.toString())).to.be.false; + }); + }); + + // ===== getTestCaseNodesRecursive Tests ===== + + suite('getTestCaseNodesRecursive', () => { + test('should return single item when it is a leaf node with no children', () => { + // Mock + const item = createMockTestItem('test_func', '/test.py'); + + // Run + const result = getTestCaseNodesRecursive(item); + + // Assert + expect(result).to.deep.equal([item]); + }); + + test('should return all leaf nodes from single-level nested structure', () => { + // Mock + const leaf1 = createMockTestItem('test_method1', '/test.py'); + const leaf2 = createMockTestItem('test_method2', '/test.py'); + const classItem = createMockTestItem('TestClass', '/test.py', [leaf1, leaf2]); + + // Run + const result = getTestCaseNodesRecursive(classItem); + + // Assert - use Set for order-agnostic comparison + expect(result).to.have.length(2); + expect(new Set(result)).to.deep.equal(new Set([leaf1, leaf2])); + }); + + test('should traverse deeply nested structure to find all leaf nodes', () => { + // Mock - 3 levels deep: file → class → inner class → test + const leaf1 = createMockTestItem('test1', '/test.py'); + const leaf2 = createMockTestItem('test2', '/test.py'); + const innerClass = createMockTestItem('InnerClass', '/test.py', [leaf2]); + const outerClass = createMockTestItem('OuterClass', '/test.py', [leaf1, innerClass]); + const fileItem = createMockTestItem('test_file.py', '/test.py', [outerClass]); + + // Run + const result = getTestCaseNodesRecursive(fileItem); + + // Assert - use Set for order-agnostic comparison + expect(result).to.have.length(2); + expect(new Set(result)).to.deep.equal(new Set([leaf1, leaf2])); + }); + + test('should collect leaves from multiple sibling branches', () => { + // Mock - multiple test classes at same level + const leaf1 = createMockTestItem('test1', '/test.py'); + const leaf2 = createMockTestItem('test2', '/test.py'); + const leaf3 = createMockTestItem('test3', '/test.py'); + const class1 = createMockTestItem('Class1', '/test.py', [leaf1]); + const class2 = createMockTestItem('Class2', '/test.py', [leaf2, leaf3]); + const fileItem = createMockTestItem('test_file.py', '/test.py', [class1, class2]); + + // Run + const result = getTestCaseNodesRecursive(fileItem); + + // Assert - use Set for order-agnostic comparison + expect(result).to.have.length(3); + expect(new Set(result)).to.deep.equal(new Set([leaf1, leaf2, leaf3])); + }); + }); + + // ===== executeTestsForProject Tests ===== + + suite('executeTestsForProject', () => { + test('should call executionAdapter.runTests with project URI and mapped test IDs', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + project.resultResolver.vsIdToRunId.set('test1', 'test_file.py::test1'); + const testItem = createMockTestItem('test1', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProject(project, [testItem], runMock.object, request, deps); + + // Assert + expect(project.executionAdapterStub.calledOnce).to.be.true; + const callArgs = project.executionAdapterStub.firstCall.args; + expect(callArgs[0].fsPath).to.equal(project.projectUri.fsPath); // uri + expect(callArgs[1]).to.deep.equal(['test_file.py::test1']); // testCaseIds + expect(callArgs[7]).to.equal(project); // project + }); + + test('should mark all leaf test items as started in the test run', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + project.resultResolver.vsIdToRunId.set('test1', 'runId1'); + project.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const item1 = createMockTestItem('test1', '/workspace/proj/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProject(project, [item1, item2], runMock.object, request, deps); + + // Assert - both items marked as started + runMock.verify((r) => r.started(item1), typemoq.Times.once()); + runMock.verify((r) => r.started(item2), typemoq.Times.once()); + }); + + test('should resolve test IDs via resultResolver.vsIdToRunId mapping', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + project.resultResolver.vsIdToRunId.set('test1', 'path/to/test1'); + project.resultResolver.vsIdToRunId.set('test2', 'path/to/test2'); + const item1 = createMockTestItem('test1', '/workspace/proj/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProject(project, [item1, item2], runMock.object, request, deps); + + // Assert - use Set for order-agnostic comparison + const passedTestIds = project.executionAdapterStub.firstCall.args[1] as string[]; + expect(new Set(passedTestIds)).to.deep.equal(new Set(['path/to/test1', 'path/to/test2'])); + }); + + test('should skip execution when no items have vsIdToRunId mappings', async () => { + // Mock - no mappings set, so lookups return undefined + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const item = createMockTestItem('unmapped_test', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProject(project, [item], runMock.object, request, deps); + + // Assert - execution adapter never called + expect(project.executionAdapterStub.called).to.be.false; + }); + + test('should recursively expand nested test items to find leaf nodes', async () => { + // Mock - class containing two test methods + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const leaf1 = createMockTestItem('test1', '/workspace/proj/test.py'); + const leaf2 = createMockTestItem('test2', '/workspace/proj/test.py'); + const classItem = createMockTestItem('TestClass', '/workspace/proj/test.py', [leaf1, leaf2]); + project.resultResolver.vsIdToRunId.set('test1', 'runId1'); + project.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProject(project, [classItem], runMock.object, request, deps); + + // Assert - leaf nodes marked as started, not the parent class + runMock.verify((r) => r.started(leaf1), typemoq.Times.once()); + runMock.verify((r) => r.started(leaf2), typemoq.Times.once()); + const passedTestIds = project.executionAdapterStub.firstCall.args[1] as string[]; + expect(passedTestIds).to.have.length(2); + }); + }); + + // ===== executeTestsForProjects Tests ===== + + suite('executeTestsForProjects', () => { + let telemetryStub: sinon.SinonStub; + + setup(() => { + telemetryStub = sandbox.stub(telemetry, 'sendTelemetryEvent'); + }); + + test('should return immediately when empty projects array provided', async () => { + // Mock + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([], [], runMock.object, request, token, deps); + + // Assert - no telemetry sent since no projects executed + expect(telemetryStub.called).to.be.false; + }); + + test('should skip execution when cancellation requested before start', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const item = createMockTestItem('test1', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const tokenSource = new CancellationTokenSource(); + tokenSource.cancel(); // Pre-cancel + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([project], [item], runMock.object, request, tokenSource.token, deps); + + // Assert - execution adapter never called + expect(project.executionAdapterStub.called).to.be.false; + }); + + test('should execute tests for each project when multiple projects provided', async () => { + // Mock + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + proj1.resultResolver.vsIdToRunId.set('test1', 'runId1'); + proj2.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([proj1, proj2], [item1, item2], runMock.object, request, token, deps); + + // Assert - both projects had their execution adapters called + expect(proj1.executionAdapterStub.calledOnce).to.be.true; + expect(proj2.executionAdapterStub.calledOnce).to.be.true; + }); + + test('should emit telemetry event for each project execution', async () => { + // Mock + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + proj1.resultResolver.vsIdToRunId.set('test1', 'runId1'); + proj2.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([proj1, proj2], [item1, item2], runMock.object, request, token, deps); + + // Assert - telemetry sent twice (once per project) + expect(telemetryStub.callCount).to.equal(2); + }); + + test('should stop processing remaining projects when cancellation requested mid-execution', async () => { + // Mock + const tokenSource = new CancellationTokenSource(); + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + // First project triggers cancellation during its execution + proj1.executionAdapterStub.callsFake(async () => { + tokenSource.cancel(); + }); + proj1.resultResolver.vsIdToRunId.set('test1', 'runId1'); + proj2.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); + const runMock = createMockTestRun(); + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects( + [proj1, proj2], + [item1, item2], + runMock.object, + request, + tokenSource.token, + deps, + ); + + // Assert - first project executed, second may be skipped due to cancellation check + expect(proj1.executionAdapterStub.calledOnce).to.be.true; + }); + + test('should continue executing remaining projects when one project fails', async () => { + // Mock + const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); + const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); + proj1.executionAdapterStub.rejects(new Error('Execution failed')); + proj1.resultResolver.vsIdToRunId.set('test1', 'runId1'); + proj2.resultResolver.vsIdToRunId.set('test2', 'runId2'); + const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); + const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const request = { profile: { kind: TestRunProfileKind.Run } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run - should not throw + await executeTestsForProjects([proj1, proj2], [item1, item2], runMock.object, request, token, deps); + + // Assert - second project still executed despite first failing + expect(proj2.executionAdapterStub.calledOnce).to.be.true; + }); + + test('should configure loadDetailedCoverage callback when run profile is Coverage', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + project.resultResolver.vsIdToRunId.set('test1', 'runId1'); + const item = createMockTestItem('test1', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const profileMock = ({ + kind: TestRunProfileKind.Coverage, + loadDetailedCoverage: undefined, + } as unknown) as TestRunProfile; + const request = { profile: profileMock } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([project], [item], runMock.object, request, token, deps); + + // Assert - loadDetailedCoverage callback was configured + expect(profileMock.loadDetailedCoverage).to.not.be.undefined; + }); + + test('should include debugging=true in telemetry when run profile is Debug', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + project.resultResolver.vsIdToRunId.set('test1', 'runId1'); + const item = createMockTestItem('test1', '/workspace/proj/test.py'); + const runMock = createMockTestRun(); + const token = new CancellationTokenSource().token; + const request = { profile: { kind: TestRunProfileKind.Debug } } as TestRunRequest; + const deps = createMockDependencies(); + + // Run + await executeTestsForProjects([project], [item], runMock.object, request, token, deps); + + // Assert - telemetry contains debugging=true + expect(telemetryStub.calledOnce).to.be.true; + const telemetryProps = telemetryStub.firstCall.args[2]; + expect(telemetryProps.debugging).to.be.true; + }); + }); + + // ===== setupCoverageForProject Tests ===== + + suite('setupCoverageForProject', () => { + test('should configure loadDetailedCoverage callback when profile kind is Coverage', () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const profileMock = ({ + kind: TestRunProfileKind.Coverage, + loadDetailedCoverage: undefined, + } as unknown) as TestRunProfile; + const request = { profile: profileMock } as TestRunRequest; + + // Run + setupCoverageForProject(request, project); + + // Assert + expect(profileMock.loadDetailedCoverage).to.be.a('function'); + }); + + test('should leave loadDetailedCoverage undefined when profile kind is Run', () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const profileMock = ({ + kind: TestRunProfileKind.Run, + loadDetailedCoverage: undefined, + } as unknown) as TestRunProfile; + const request = { profile: profileMock } as TestRunRequest; + + // Run + setupCoverageForProject(request, project); + + // Assert + expect(profileMock.loadDetailedCoverage).to.be.undefined; + }); + + test('should return coverage data from detailedCoverageMap when loadDetailedCoverage is called', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const mockCoverageDetails = [{ line: 1, executed: true }]; + project.resultResolver.detailedCoverageMap.set('/workspace/proj/file.py', mockCoverageDetails as any); + const profileMock = ({ + kind: TestRunProfileKind.Coverage, + loadDetailedCoverage: undefined, + } as unknown) as TestRunProfile; + const request = { profile: profileMock } as TestRunRequest; + + // Run - configure coverage + setupCoverageForProject(request, project); + + // Run - call the configured callback + const fileCoverage = { uri: Uri.file('/workspace/proj/file.py') }; + const result = await profileMock.loadDetailedCoverage!( + {} as TestRun, + fileCoverage as any, + {} as CancellationToken, + ); + + // Assert + expect(result).to.deep.equal(mockCoverageDetails); + }); + + test('should return empty array when file has no coverage data in map', async () => { + // Mock + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + const profileMock = ({ + kind: TestRunProfileKind.Coverage, + loadDetailedCoverage: undefined, + } as unknown) as TestRunProfile; + const request = { profile: profileMock } as TestRunRequest; + + // Run - configure coverage + setupCoverageForProject(request, project); + + // Run - call callback for file not in map + const fileCoverage = { uri: Uri.file('/workspace/proj/uncovered_file.py') }; + const result = await profileMock.loadDetailedCoverage!( + {} as TestRun, + fileCoverage as any, + {} as CancellationToken, + ); + + // Assert + expect(result).to.deep.equal([]); + }); + }); +}); diff --git a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts index e0401edc7b41..884916182e04 100644 --- a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts @@ -22,6 +22,7 @@ import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; import { MockChildProcess } from '../../../mocks/mockChildProcess'; import { traceInfo } from '../../../../client/logging'; import * as extapi from '../../../../client/envExt/api.internal'; +import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; suite('pytest test execution adapter', () => { let useEnvExtensionStub: sinon.SinonStub; @@ -325,4 +326,222 @@ suite('pytest test execution adapter', () => { typeMoq.Times.once(), ); }); + + // ===== PROJECT-BASED EXECUTION TESTS ===== + + suite('project-based execution', () => { + function createMockProjectAdapter(projectPath: string, projectName: string): ProjectAdapter { + return ({ + projectUri: Uri.file(projectPath), + projectName, + workspaceUri: Uri.file(projectPath), + testProvider: 'pytest', + pythonEnvironment: { + execInfo: { run: { executable: '/custom/python/path' } }, + }, + pythonProject: { + name: projectName, + uri: Uri.file(projectPath), + }, + executionAdapter: {}, + discoveryAdapter: {}, + resultResolver: {}, + isDiscovering: false, + isExecuting: false, + } as unknown) as ProjectAdapter; + } + + test('should set PROJECT_ROOT_PATH env var when project provided', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsWriteTestIdsFileStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve('testIdPipe-mockName'); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + + const projectPath = path.join('/', 'workspace', 'myproject'); + const mockProject = createMockProjectAdapter(projectPath, 'myproject'); + + const uri = Uri.file(myTestPath); + adapter = new PytestTestExecutionAdapter(configService); + adapter.runTests( + uri, + [], + TestRunProfileKind.Run, + testRun.object, + execFactory.object, + undefined, + undefined, + mockProject, + ); + + await deferred2.promise; + await deferred3.promise; + await deferred4.promise; + mockProc.trigger('close'); + + execService.verify( + (x) => + x.execObservable( + typeMoq.It.isAny(), + typeMoq.It.is((options) => { + assert.equal(options.env?.PROJECT_ROOT_PATH, projectPath); + return true; + }), + ), + typeMoq.Times.once(), + ); + }); + + test('should pass debugSessionName in LaunchOptions for debug mode with project', async () => { + const deferred3 = createDeferred(); + utilsWriteTestIdsFileStub.callsFake(() => Promise.resolve('testIdPipe-mockName')); + + debugLauncher + .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(async (_opts, callback) => { + traceInfo('stubs launch debugger'); + if (typeof callback === 'function') { + deferred3.resolve(); + callback(); + } + }); + + const testRun = typeMoq.Mock.ofType(); + testRun + .setup((t) => t.token) + .returns( + () => + ({ + onCancellationRequested: () => undefined, + } as any), + ); + + const projectPath = path.join('/', 'workspace', 'myproject'); + const mockProject = createMockProjectAdapter(projectPath, 'myproject (Python 3.11)'); + + const uri = Uri.file(myTestPath); + adapter = new PytestTestExecutionAdapter(configService); + adapter.runTests( + uri, + [], + TestRunProfileKind.Debug, + testRun.object, + execFactory.object, + debugLauncher.object, + undefined, + mockProject, + ); + + await deferred3.promise; + + debugLauncher.verify( + (x) => + x.launchDebugger( + typeMoq.It.is((launchOptions) => { + assert.equal(launchOptions.debugSessionName, 'myproject (Python 3.11)'); + assert.equal(launchOptions.pythonPath, '/custom/python/path'); + return true; + }), + typeMoq.It.isAny(), + typeMoq.It.isAny(), + ), + typeMoq.Times.once(), + ); + }); + + test('should not set PROJECT_ROOT_PATH when no project provided', async () => { + const deferred2 = createDeferred(); + const deferred3 = createDeferred(); + execFactory = typeMoq.Mock.ofType(); + execFactory + .setup((x) => x.createActivatedEnvironment(typeMoq.It.isAny())) + .returns(() => { + deferred2.resolve(); + return Promise.resolve(execService.object); + }); + utilsWriteTestIdsFileStub.callsFake(() => { + deferred3.resolve(); + return Promise.resolve('testIdPipe-mockName'); + }); + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + + const uri = Uri.file(myTestPath); + adapter = new PytestTestExecutionAdapter(configService); + // Call without project parameter + adapter.runTests(uri, [], TestRunProfileKind.Run, testRun.object, execFactory.object); + + await deferred2.promise; + await deferred3.promise; + await deferred4.promise; + mockProc.trigger('close'); + + execService.verify( + (x) => + x.execObservable( + typeMoq.It.isAny(), + typeMoq.It.is((options) => { + assert.equal(options.env?.PROJECT_ROOT_PATH, undefined); + return true; + }), + ), + typeMoq.Times.once(), + ); + }); + + test('should not set debugSessionName or pythonPath in LaunchOptions when no project', async () => { + const deferred3 = createDeferred(); + utilsWriteTestIdsFileStub.callsFake(() => Promise.resolve('testIdPipe-mockName')); + + debugLauncher + .setup((dl) => dl.launchDebugger(typeMoq.It.isAny(), typeMoq.It.isAny(), typeMoq.It.isAny())) + .returns(async (_opts, callback) => { + if (typeof callback === 'function') { + deferred3.resolve(); + callback(); + } + }); + + const testRun = typeMoq.Mock.ofType(); + testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); + + const uri = Uri.file(myTestPath); + adapter = new PytestTestExecutionAdapter(configService); + // Call without project parameter + adapter.runTests( + uri, + [], + TestRunProfileKind.Debug, + testRun.object, + execFactory.object, + debugLauncher.object, + ); + + await deferred3.promise; + + debugLauncher.verify( + (x) => + x.launchDebugger( + typeMoq.It.is((launchOptions) => { + assert.equal(launchOptions.debugSessionName, undefined); + assert.equal(launchOptions.pythonPath, undefined); + return true; + }), + typeMoq.It.isAny(), + typeMoq.It.isAny(), + ), + typeMoq.Times.once(), + ); + }); + }); }); From 922258732a40f498f2c9a5443a7430b60d8ec603 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 13:42:57 -0800 Subject: [PATCH 2/9] move out ai artifacts --- docs/test-plan-project-based-execution.md | 511 ---------------------- plan-project-based-exec.md | 217 --------- 2 files changed, 728 deletions(-) delete mode 100644 docs/test-plan-project-based-execution.md delete mode 100644 plan-project-based-exec.md diff --git a/docs/test-plan-project-based-execution.md b/docs/test-plan-project-based-execution.md deleted file mode 100644 index 8de1f1ae66bc..000000000000 --- a/docs/test-plan-project-based-execution.md +++ /dev/null @@ -1,511 +0,0 @@ -# Test Plan: Project-Based Pytest Execution - -This document outlines the testing strategy for the project-based pytest execution feature, including scenarios, edge cases, and test implementations. - -## Table of Contents -1. [Overview](#overview) -2. [Test Architecture Summary](#test-architecture-summary) -3. [Unit Tests - New Functions](#unit-tests---new-functions) -4. [Unit Tests - Modified Functions](#unit-tests---modified-functions) -5. [Integration/Higher-Level Tests](#integrationhigher-level-tests) -6. [Edge Cases & Error Scenarios](#edge-cases--error-scenarios) -7. [Implementation Recommendations](#implementation-recommendations) - ---- - -## Overview - -The project-based execution feature introduces: -- **`projectTestExecution.ts`** - New file with execution orchestration functions -- **`pytestExecutionAdapter.ts`** - Modified to accept `ProjectAdapter` parameter -- **`debugLauncher.ts`** - New debug session isolation with unique markers -- **`controller.ts`** - Integration point calling `executeTestsForProjects()` - ---- - -## Test Architecture Summary - -### Existing Patterns to Reuse - -| Pattern | Location | Description | -|---------|----------|-------------| -| TypeMoq mocking | `pytestExecutionAdapter.unit.test.ts` | Mock services, exec factory, debug launcher | -| Sinon stubs for utilities | `workspaceTestAdapter.unit.test.ts` | Stub `util.*` functions | -| Deferred promises | `testCancellationRunAdapters.unit.test.ts` | Test async flows and cancellation | -| TestItem mocking | `testExecutionHandler.unit.test.ts` | Create mock test items with children | -| ProjectAdapter creation | `testProjectRegistry.unit.test.ts` | Mock Python projects and environments | -| Debug service mocking | `debugLauncher.unit.test.ts` | Mock `IDebugService`, session handling | - -### Testing Tools Used -- **Mocha** - Test framework (suite/test) -- **TypeMoq** - Interface mocking -- **Sinon** - Stubs, spies, fakes -- **Chai** - Assertions (expect/assert) - ---- - -## Unit Tests - New Functions - -### File: `projectTestExecution.unit.test.ts` (NEW) - -#### 1. `groupTestItemsByProject()` - -**Function Signature:** -```typescript -groupTestItemsByProject( - testItems: TestItem[], - projects: ProjectAdapter[] -): Map -``` - -**Test Cases:** - -| Test Name | Scenario | Expected Behavior | -|-----------|----------|-------------------| -| `should group single item to single project` | 1 test item, 1 project | Map has 1 entry with 1 item | -| `should group multiple items to single project` | 3 items same project | Map has 1 entry with 3 items | -| `should group items across multiple projects` | 3 items, 2 projects | Map has 2 entries, items split correctly | -| `should return empty map when no test items` | 0 items, 2 projects | Empty map | -| `should handle items with no matching project` | Item outside all project paths | Item not included, logged as verbose | -| `should match to most specific project (longest path)` | Nested projects `/a` and `/a/b` | Item in `/a/b/test.py` → project `/a/b` | -| `should handle Windows paths` | `C:\workspace\project` paths | Correct grouping | - -**Mock Setup:** -```typescript -// Create mock test items with URIs -function createMockTestItem(id: string, uri: Uri): TestItem { - return { - id, - uri, - children: { size: 0, forEach: () => {} } - } as unknown as TestItem; -} - -// Create mock ProjectAdapter -function createMockProject(projectPath: string): ProjectAdapter { - return { - projectUri: Uri.file(projectPath), - projectName: path.basename(projectPath), - // ... other required properties - } as unknown as ProjectAdapter; -} -``` - ---- - -#### 2. `findProjectForTestItem()` - -**Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should return undefined for item with no URI` | `item.uri = undefined` | `undefined` | -| `should return project when item path starts with project path` | `/proj/tests/test.py` → `/proj` | Returns project | -| `should return undefined when no project matches` | `/other/test.py` vs `/proj` | `undefined` | -| `should return most specific project for nested paths` | `/ws/a/b/test.py` with projects `/ws/a` and `/ws/a/b` | `/ws/a/b` project | -| `should handle exact path match` | Item at `/proj/test.py`, project at `/proj` | Returns project | - ---- - -#### 3. `getTestCaseNodesRecursive()` - -**Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should return single item when no children` | Leaf test case | `[item]` | -| `should return all leaf nodes from nested structure` | File → Class → Methods | All method nodes | -| `should handle deeply nested structure` | 4 levels deep | All leaf nodes | -| `should return empty array for item with empty children` | Item with `children.size = 0` | `[item]` | - -**Mock Setup:** -```typescript -function createNestedTestItem( - id: string, - childIds: string[] -): TestItem { - const children = new Map(); - childIds.forEach(cid => { - children.set(cid, createMockTestItem(cid, Uri.file('/test.py'))); - }); - return { - id, - uri: Uri.file('/test.py'), - children: { - size: children.size, - forEach: (cb) => children.forEach(cb) - } - } as unknown as TestItem; -} -``` - ---- - -#### 4. `executeTestsForProject()` - -**Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should call executionAdapter.runTests with correct parameters` | Normal execution | Adapter called with projectUri, testIds, project | -| `should mark all test items as started` | 3 test items | `runInstance.started()` called 3 times | -| `should collect testIds from resultResolver.vsIdToRunId` | Test items with mapped IDs | Correct IDs passed to adapter | -| `should handle empty testIds gracefully` | No mapped IDs found | Returns early, logs verbose | -| `should pass project to execution adapter` | Project-based mode | `project` parameter is the ProjectAdapter | - ---- - -#### 5. `executeTestsForProjects()` - -**Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should execute tests for multiple projects in parallel` | 3 projects, 9 tests | All 3 executionAdapter.runTests called | -| `should skip execution if cancellation requested before start` | Token cancelled | No adapters called | -| `should skip project if cancellation requested mid-execution` | Cancel after 1st project | 2nd project skipped | -| `should handle empty projects array` | 0 projects | Returns early, logs error | -| `should setup coverage when profile kind is Coverage` | Coverage profile | `loadDetailedCoverage` set on profile | -| `should send telemetry for each project execution` | 2 projects | 2 telemetry events | -| `should continue other projects if one fails` | 1 project throws | Other projects still execute | -| `should not log cancellation as error` | Cancelled during execution | No error logged | - ---- - -#### 6. `setupCoverageForProject()` - -**Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should set loadDetailedCoverage on profile` | Coverage profile kind | Function assigned | -| `should do nothing for non-coverage profile` | Run profile kind | No changes to profile | -| `should return details from project.resultResolver.detailedCoverageMap` | Coverage data exists | Returns coverage details | -| `should return empty array when no coverage data` | No data for file | Returns `[]` | - ---- - -## Unit Tests - Modified Functions - -### File: `pytestExecutionAdapter.unit.test.ts` (EXTEND) - -**New Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should set PROJECT_ROOT_PATH env var when project provided` | Project-based execution | `PROJECT_ROOT_PATH` set to project.projectUri.fsPath | -| `should use project's Python environment when available` | Project with pythonEnv | `execService` created with project's env | -| `should pass debugSessionName in LaunchOptions for debug` | Debug mode with project | `debugSessionName` = project.projectName | -| `should fall back to execFactory when no project environment` | No project.pythonEnvironment | Uses execFactory.createActivatedEnvironment | - -**Mock Setup Addition:** -```typescript -const mockProject: ProjectAdapter = { - projectUri: Uri.file('/workspace/myproject'), - projectName: 'myproject (Python 3.11)', - pythonEnvironment: { - execInfo: { run: { executable: '/usr/bin/python3' } } - }, - // ... other fields -} as unknown as ProjectAdapter; - -// Test with project -adapter.runTests(uri, testIds, kind, testRun, execFactory, debugLauncher, undefined, mockProject); -``` - ---- - -### File: `debugLauncher.unit.test.ts` (EXTEND) - -**New Test Cases for Session Isolation:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should add unique session marker to launch config` | Any debug launch | `config[TEST_SESSION_MARKER_KEY]` is unique | -| `should only terminate matching session on callback` | Multiple sessions | Only session with matching marker terminates | -| `should use debugSessionName in config name when provided` | `options.debugSessionName` set | `config.name` includes session name | -| `should use pythonPath when provided` | `options.pythonPath` set | `config.python` = pythonPath | -| `should handle parallel debug sessions independently` | 2 concurrent launches | Each completes independently | -| `should dispose listener when session terminates` | Session ends | `onDidTerminateDebugSession` listener disposed | -| `should resolve deferred on matching session termination` | Correct session ends | Promise resolves | -| `should not resolve deferred on non-matching session termination` | Different session ends | Promise still pending | - -**Mock Setup for Parallel Sessions:** -```typescript -test('should handle parallel debug sessions independently', async () => { - const sessions: DebugSession[] = []; - let terminateCallback: (session: DebugSession) => void; - - debugService - .setup(d => d.startDebugging(typemoq.It.isAny(), typemoq.It.isAny(), undefined)) - .callback((_, config) => { - const mockSession = { - id: `session-${sessions.length}`, - configuration: config - }; - sessions.push(mockSession); - }) - .returns(() => Promise.resolve(true)); - - debugService - .setup(d => d.onDidTerminateDebugSession(typemoq.It.isAny())) - .callback((cb) => { terminateCallback = cb; }) - .returns(() => ({ dispose: () => {} })); - - // Launch two sessions in parallel - const launch1 = debugLauncher.launchDebugger(options1); - const launch2 = debugLauncher.launchDebugger(options2); - - // Terminate first session - terminateCallback(sessions[0]); - - // Verify only first resolved - await launch1; // Should resolve - // launch2 should still be pending -}); -``` - ---- - -## Integration/Higher-Level Tests - -### File: `projectBasedExecution.integration.test.ts` (NEW) - -These tests verify the complete flow from controller through to execution adapters. - -#### Test Suite: Multi-Project Workspace Execution - -| Test Name | Scenario | Verifications | -|-----------|----------|---------------| -| `should discover and execute tests across 3 projects` | Multi-project workspace | Each project's adapter called with correct tests | -| `should use correct Python environment per project` | Projects with different Pythons | Environment matches project config | -| `should handle mixed test selection across projects` | 2 tests from proj1, 1 from proj2 | Correct grouping and execution | -| `should isolate results per project` | Results from multiple projects | ResultResolver receives per-project data | - -#### Test Suite: Debug Mode Multi-Project - -| Test Name | Scenario | Verifications | -|-----------|----------|---------------| -| `should launch separate debug session per project` | 2 projects in debug mode | 2 debug sessions started | -| `should name debug sessions with project names` | Debug with named projects | Session names include project names | -| `should allow stopping one session without affecting others` | Stop project A | Project B continues | -| `should handle debug session errors per project` | One project fails to debug | Other projects still debug | - -#### Test Suite: Cancellation Flow - -| Test Name | Scenario | Verifications | -|-----------|----------|---------------| -| `should cancel all projects when token cancelled` | Cancel mid-run | All projects stop gracefully | -| `should not start pending projects after cancellation` | Cancel after 1 project | Remaining projects not started | -| `should propagate cancellation to debug sessions` | Cancel during debug | Debug sessions terminate | -| `should cleanup named pipes on cancellation` | Cancel during execution | Server disposed, pipes cleaned | - ---- - -### File: `controller.unit.test.ts` (EXTEND) - -**New Test Cases:** - -| Test Name | Scenario | Expected | -|-----------|----------|----------| -| `should call executeTestsForProjects when projects registered` | Project-based mode | `executeTestsForProjects()` called | -| `should fall back to legacy execution when no projects` | Legacy mode | `workspaceTestAdapter.executeTests()` called | -| `should pass correct dependencies to executeTestsForProjects` | Valid deps | pythonExecFactory, debugLauncher, registry passed | - ---- - -## Edge Cases & Error Scenarios - -### Edge Case Matrix - -| Category | Edge Case | Test Location | Expected Behavior | -|----------|-----------|---------------|-------------------| -| **Empty Input** | No test items selected | `executeTestsForProjects` | Returns early, no errors | -| **Empty Input** | No projects in registry | `executeTestsForProjects` | Logs error, returns early | -| **Empty Input** | Test items with no URIs | `findProjectForTestItem` | Returns undefined, item skipped | -| **Path Matching** | Nested projects (parent/child) | `groupTestItemsByProject` | Uses most specific match | -| **Path Matching** | Sibling projects | `groupTestItemsByProject` | Correct assignment | -| **Path Matching** | Windows vs Unix paths | `findProjectForTestItem` | Handles both | -| **Cancellation** | Cancelled before start | `executeTestsForProjects` | Immediate return | -| **Cancellation** | Cancelled mid-project | `executeTestsForProject` | Stops gracefully | -| **Cancellation** | Cancelled during debug | `debugLauncher` | Session terminated | -| **Debug Sessions** | Multiple simultaneous | `debugLauncher` | Independent isolation | -| **Debug Sessions** | One fails to start | `executeTestsForProjects` | Others continue | -| **Debug Sessions** | Session terminated externally | `debugLauncher` | Deferred resolves | -| **Environment** | Project missing Python env | `pytestExecutionAdapter` | Falls back to workspace env | -| **Environment** | Invalid Python path | `pytestExecutionAdapter` | Error reported | -| **Results** | Mixed pass/fail across projects | `executeTestsForProjects` | All results processed | -| **Results** | One project times out | `executeTestsForProjects` | Others complete | - -### Error Scenarios - -| Error Type | Test | Expected Outcome | -|------------|------|------------------| -| Adapter throws exception | `executeTestsForProject` catches | Error logged, other projects continue | -| Debug launcher rejects | `executeTestsForProjects` | Error logged, not cancellation error | -| Named pipe fails | `pytestExecutionAdapter` | Test run fails gracefully | -| Result resolver not found | `executeTestsForProject` | Test IDs empty, returns early | - ---- - -## Implementation Recommendations - -### 1. New Test File Structure - -``` -src/test/testing/testController/ -├── common/ -│ ├── projectTestExecution.unit.test.ts <-- NEW -│ ├── testProjectRegistry.unit.test.ts (existing, extend if needed) -│ └── projectUtils.unit.test.ts (existing) -├── pytest/ -│ └── pytestExecutionAdapter.unit.test.ts (extend) -├── debugLauncher.unit.test.ts (extend in common/) -└── controller.unit.test.ts (extend) -``` - -### 2. Shared Test Utilities - -Create a helper file for project-based test utilities: - -```typescript -// src/test/testing/testController/common/projectTestHelpers.ts - -import { TestItem, Uri } from 'vscode'; -import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; - -export function createMockTestItem(id: string, uriPath: string, children?: TestItem[]): TestItem { - const childMap = new Map(); - children?.forEach(c => childMap.set(c.id, c)); - - return { - id, - uri: Uri.file(uriPath), - children: { - size: childMap.size, - forEach: (cb: (item: TestItem) => void) => childMap.forEach(cb) - } - } as unknown as TestItem; -} - -export function createMockProjectAdapter(config: { - projectPath: string; - projectName: string; - pythonPath?: string; - testProvider?: 'pytest' | 'unittest'; -}): ProjectAdapter { - return { - projectUri: Uri.file(config.projectPath), - projectName: config.projectName, - testProvider: config.testProvider ?? 'pytest', - pythonEnvironment: config.pythonPath ? { - execInfo: { run: { executable: config.pythonPath } } - } : undefined, - executionAdapter: { - runTests: sinon.stub().resolves() - }, - resultResolver: { - vsIdToRunId: new Map(), - detailedCoverageMap: new Map() - } - } as unknown as ProjectAdapter; -} - -export function createMockDependencies(): ProjectExecutionDependencies { - return { - projectRegistry: typemoq.Mock.ofType().object, - pythonExecFactory: typemoq.Mock.ofType().object, - debugLauncher: typemoq.Mock.ofType().object - }; -} -``` - -### 3. Test Priority Order - -1. **HIGH PRIORITY** - Core logic tests: - - `groupTestItemsByProject()` - All cases - - `findProjectForTestItem()` - All cases - - `executeTestsForProject()` - Basic flow - - Debug session isolation tests - -2. **MEDIUM PRIORITY** - Integration tests: - - Multi-project execution flow - - Cancellation propagation - - Error handling - -3. **LOWER PRIORITY** - Edge cases: - - Windows path handling - - Coverage setup - - Telemetry verification - -### 4. Mocking Strategy - -| Component | Mock Type | Reason | -|-----------|-----------|--------| -| `TestItem` | Custom object | Simple interface | -| `ProjectAdapter` | Custom object | Many optional fields | -| `TestRun` | TypeMoq | Verify method calls | -| `IPythonExecutionFactory` | TypeMoq | Interface with promises | -| `ITestDebugLauncher` | TypeMoq | Interface with callbacks | -| `IDebugService` | TypeMoq | Complex async behavior | -| Utility functions (`util.*`) | Sinon stub | Replace implementation | - -### 5. Async Testing Patterns - -```typescript -// Pattern for testing cancellation -test('should stop on cancellation', async () => { - const token = new CancellationTokenSource(); - const deferredExecution = createDeferred(); - - mockAdapter.runTests.callsFake(async () => { - token.cancel(); // Cancel during execution - await deferredExecution.promise; - }); - - // Should complete without hanging - await executeTestsForProjects(projects, items, runInstance, request, token.token, deps); - - // Verify correct behavior - expect(log).to.include('cancelled'); -}); - -// Pattern for parallel execution verification -test('should execute projects in parallel', async () => { - const executionOrder: string[] = []; - const deferreds = projects.map(() => createDeferred()); - - projects.forEach((p, i) => { - p.executionAdapter.runTests.callsFake(async () => { - executionOrder.push(`start-${i}`); - await deferreds[i].promise; - executionOrder.push(`end-${i}`); - }); - }); - - const executePromise = executeTestsForProjects(...); - - // All should have started before any completed - await new Promise(r => setTimeout(r, 10)); - expect(executionOrder).to.deep.equal(['start-0', 'start-1', 'start-2']); - - // Resolve all - deferreds.forEach(d => d.resolve()); - await executePromise; -}); -``` - ---- - -## Summary - -| Test Category | Estimated Test Count | Effort | -|---------------|---------------------|--------| -| `projectTestExecution.unit.test.ts` (new) | ~25 tests | Medium | -| `pytestExecutionAdapter.unit.test.ts` (extend) | ~5 tests | Low | -| `debugLauncher.unit.test.ts` (extend) | ~8 tests | Medium | -| `controller.unit.test.ts` (extend) | ~3 tests | Low | -| Integration tests (optional) | ~10 tests | High | -| **Total** | **~50 tests** | - | - -The primary focus should be on the new `projectTestExecution.unit.test.ts` file, as it contains all the new orchestration logic. The debug launcher session isolation tests are also critical since they fix a real bug. diff --git a/plan-project-based-exec.md b/plan-project-based-exec.md deleted file mode 100644 index 3a3d2a9040f5..000000000000 --- a/plan-project-based-exec.md +++ /dev/null @@ -1,217 +0,0 @@ -# Plan: Project-Based Pytest Execution - -## Overview - -This plan describes the implementation of **project-based test execution for pytest**, enabling multi-project workspace support where each Python project within a workspace can execute tests using its own Python environment. This builds on top of the project-based discovery work from PR #25760. - -## Problem to Solve - -In a multi-project workspace (e.g., a monorepo with multiple Python services), users currently cannot: -- Run tests with the correct Python interpreter for each project -- Have separate test trees per project in the Test Explorer -- Properly handle nested projects (parent/child) - -## Architecture - -### Key Components to Add - -| Component | File | Purpose | -| ----------------------- | -------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------- | -| **TestProjectRegistry** | [testProjectRegistry.ts](../src/client/testing/testController/common/testProjectRegistry.ts) | Registry that discovers and manages Python projects per workspace | -| **ProjectAdapter** | [projectAdapter.ts](../src/client/testing/testController/common/projectAdapter.ts) | Interface representing a single Python project with its test infrastructure | -| **projectUtils** | [projectUtils.ts](../src/client/testing/testController/common/projectUtils.ts) | Utility functions for project ID generation and adapter creation | - -### How It Works - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ VS Code Workspace │ -│ ┌─────────────────────────────────────────────────────────────┐│ -│ │ TestController ││ -│ │ ┌───────────────────────────────────────────────────────┐ ││ -│ │ │ TestProjectRegistry │ ││ -│ │ │ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ ││ -│ │ │ │ ProjectA │ │ ProjectB │ │ ProjectC │ │ ││ -│ │ │ │ (Py 3.11) │ │ (Py 3.12) │ │ (Py 3.10) │ │ ││ -│ │ │ │ ┌─────────┐ │ │ ┌─────────┐ │ │ ┌─────────┐ │ │ ││ -│ │ │ │ │Discovery│ │ │ │Discovery│ │ │ │Discovery│ │ │ ││ -│ │ │ │ │Adapter │ │ │ │Adapter │ │ │ │Adapter │ │ │ ││ -│ │ │ │ ├─────────┤ │ │ ├─────────┤ │ │ ├─────────┤ │ │ ││ -│ │ │ │ │Execution│ │ │ │Execution│ │ │ │Execution│ │ │ ││ -│ │ │ │ │Adapter │ │ │ │Adapter │ │ │ │Adapter │ │ │ ││ -│ │ │ │ └─────────┘ │ │ └─────────┘ │ │ └─────────┘ │ │ ││ -│ │ │ └─────────────┘ └─────────────┘ └─────────────┘ │ ││ -│ │ └───────────────────────────────────────────────────────┘ ││ -│ └─────────────────────────────────────────────────────────────┘│ -└─────────────────────────────────────────────────────────────────┘ -``` - -### Execution Flow - -1. **User runs tests** → `TestRunRequest` with selected `TestItem`s arrives -2. **Controller** checks if project-based testing is enabled -3. **Group tests by project** → Tests are sorted by which `ProjectAdapter` they belong to (via URI matching) -4. **Execute per project** → Each project's `executionAdapter.runTests()` is called with: - - The project's Python environment - - `PROJECT_ROOT_PATH` environment variable set to project root -5. **Results collected** → Each project's `resultResolver` maps results back to test items - -### Required Changes by File - -#### Controller ([controller.ts](../src/client/testing/testController/controller.ts)) -- Add `TestProjectRegistry` integration -- New methods: `discoverForProject()`, `executeTestsForProjects()`, `groupTestItemsByProject()` -- Debug mode should handle multi-project scenarios by launching multiple debug sessions - -#### Pytest Execution Adapter ([pytestExecutionAdapter.ts](../src/client/testing/testController/pytest/pytestExecutionAdapter.ts)) -- Add `project?: ProjectAdapter` parameter to `runTests()` -- Set `PROJECT_ROOT_PATH` environment variable when project is provided -- Use project's Python environment instead of workspace environment -- Debug launches should use `pythonPath` from project when available - -#### Debug Launcher ([debugLauncher.ts](../src/client/testing/common/debugLauncher.ts)) -- Add optional `pythonPath` to `LaunchOptions` for project-specific interpreter -- Add optional `debugSessionName` to `LaunchOptions` for session identification -- Debug sessions should use explicit Python path when provided -- Use unique session markers to track individual debug sessions (avoids `activeDebugSession` race conditions) -- Properly dispose event handlers when debugging completes - -#### Python Side ([vscode_pytest/__init__.py](../python_files/vscode_pytest/__init__.py)) -- `get_test_root_path()` should return `PROJECT_ROOT_PATH` env var if set (otherwise cwd) -- Session node should use project root for test tree structure - -## Feature Behavior - -### Single Project Workspace -No change from existing behavior—tests run using the workspace's interpreter. - -### Multi-Project Workspace -- Each project has its own root node in Test Explorer -- Running tests uses the correct interpreter for each project -- Results are scoped to the correct project - -### Nested Projects -``` -workspace/ -└── parent-project/ - ├── tests/ - └── child-project/ - └── tests/ -``` -- Parent project discovery ignores child project via `--ignore` flags -- Execution receives specific test IDs, so no cross-contamination - -### Debug Mode -- **Single project**: Debug should proceed normally with project interpreter -- **Multiple projects**: Multiple debug sessions should be launched in parallel—one per project, each using its own interpreter -- **Session naming**: Each debug session includes the project name (e.g., "Debug Tests: alice (Python 3.11)") -- **Session isolation**: Each debug session is tracked independently using unique markers, so stopping one session doesn't affect others - -### Cancellation Handling - -Cancellation is handled at multiple levels to ensure proper cleanup across all parallel project executions: - -``` -┌─────────────────────────────────────────────────────────────────┐ -│ User Clicks "Stop" │ -│ │ │ -│ ▼ │ -│ CancellationToken fires │ -│ │ │ -│ ┌─────────────────┼─────────────────┐ │ -│ ▼ ▼ ▼ │ -│ ┌───────────┐ ┌───────────┐ ┌───────────┐ │ -│ │ Project A │ │ Project B │ │ Project C │ │ -│ │ Execution │ │ Execution │ │ Execution │ │ -│ └─────┬─────┘ └─────┬─────┘ └─────┬─────┘ │ -│ │ │ │ │ -│ ▼ ▼ ▼ │ -│ Kill subprocess Kill subprocess Kill subprocess │ -│ Close pipes Close pipes Close pipes │ -│ Resolve deferred Resolve deferred Resolve deferred │ -└─────────────────────────────────────────────────────────────────┘ -``` - -#### Cancellation Levels - -1. **Project execution level** ([projectTestExecution.ts](src/client/testing/testController/common/projectTestExecution.ts)) - - Early exit if cancelled before starting - - Checks cancellation before starting each project's execution - - Projects not yet started are skipped gracefully - -2. **Execution adapter level** ([pytestExecutionAdapter.ts](src/client/testing/testController/pytest/pytestExecutionAdapter.ts)) - - `runInstance.token.onCancellationRequested` kills the subprocess - - Named pipe server is closed via the callback - - Deferred promises resolve to unblock waiting code - -3. **Debug launcher level** ([debugLauncher.ts](src/client/testing/common/debugLauncher.ts)) - - Token cancellation resolves the deferred and invokes cleanup callback - - Session termination events are filtered to only react to the correct session - - Event handlers are disposed when debugging completes - -#### Multi-Session Debug Independence - -When debugging multiple projects simultaneously, each `launchDebugger()` call must track its own debug session independently. The implementation uses a unique marker system: - -```typescript -// Each debug session gets a unique marker in its configuration -const sessionMarker = `test-${Date.now()}-${random}`; -launchArgs[TEST_SESSION_MARKER_KEY] = sessionMarker; - -// When sessions start/terminate, we match by marker (not activeDebugSession) -onDidStartDebugSession((session) => { - if (session.configuration[TEST_SESSION_MARKER_KEY] === sessionMarker) { - ourSession = session; // Found our specific session - } -}); -``` - -This avoids race conditions where the global `activeDebugSession` could be overwritten by another concurrent session start. - -### Legacy Fallback -When Python Environments API is unavailable, the system falls back to single-workspace adapter mode. - -## Files to Change - -| Category | Files | -| ----------------------- | ------------------------------------------------------------------------------------------------------------ | -| **Core Implementation** | `controller.ts`, `testProjectRegistry.ts`, `projectAdapter.ts`, `projectUtils.ts`, `projectTestExecution.ts` | -| **Adapters** | `pytestExecutionAdapter.ts`, `pytestDiscoveryAdapter.ts`, `resultResolver.ts` | -| **Types** | `types.ts` (common), `types.ts` (testController) | -| **Debug** | `debugLauncher.ts` | -| **Python** | `vscode_pytest/__init__.py` | -| **Tests** | `controller.unit.test.ts`, `testProjectRegistry.unit.test.ts`, `projectUtils.unit.test.ts` | - -## Testing - -### Unit Tests to Add -- `testProjectRegistry.unit.test.ts` - Registry lifecycle, project discovery, nested projects -- `controller.unit.test.ts` - Controller integration, debug scenarios, test grouping -- `projectUtils.unit.test.ts` - Utility functions - -### Test Scenarios to Cover -| Scenario | Coverage | -| ----------------------------- | --------------------------------------------- | -| Single project workspace | Unit tests + legacy flows | -| Multi-project workspace | New controller unit tests | -| Nested projects | Discovery tests + ignore behavior | -| Debug mode (single project) | Existing debug tests | -| Debug mode (multi-project) | Session isolation, independent cancellation | -| Legacy fallback | Existing controller tests | -| Test cancellation | Cancellation at all levels (see above) | - -## Out of Scope -- **Unittest support**: Project-based unittest execution will be handled in a separate PR -- **End-to-end tests**: Manual testing will be required for full validation -- **Multi-project coverage aggregation**: Deferred to future work - -## Expected User Experience - -### Debugging Across Multiple Projects -When debugging tests spanning multiple projects: -- Multiple debug sessions should be launched simultaneously—one per project -- Each debug session should use the project's configured Python interpreter -- All projects' tests should run in debug mode in parallel -- Users should be able to switch between debug sessions in VS Code's debug panel -- **Stopping one debug session should NOT affect other running sessions** -- Each debug session is named with its project (e.g., "Debug Tests: alice (Python 3.11)") From ecc92c4221863f1bdbea256cf72553e62e552e8f Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 14:45:28 -0800 Subject: [PATCH 3/9] updates --- src/client/testing/common/debugLauncher.ts | 22 +++-- src/client/testing/common/types.ts | 14 ++- .../common/projectTestExecution.ts | 96 +++++++++++++++++-- .../pytest/pytestExecutionAdapter.ts | 6 +- 4 files changed, 113 insertions(+), 25 deletions(-) diff --git a/src/client/testing/common/debugLauncher.ts b/src/client/testing/common/debugLauncher.ts index 51e46f04196e..24892d6afe3d 100644 --- a/src/client/testing/common/debugLauncher.ts +++ b/src/client/testing/common/debugLauncher.ts @@ -17,6 +17,7 @@ import { getWorkspaceFolder, getWorkspaceFolders } from '../../common/vscodeApis import { showErrorMessage } from '../../common/vscodeApis/windowApis'; import { createDeferred } from '../../common/utils/async'; import { addPathToPythonpath } from './helpers'; +import * as envExtApi from '../../envExt/api.internal'; /** * Key used to mark debug configurations with a unique session identifier. @@ -186,8 +187,8 @@ export class DebugLauncher implements ITestDebugLauncher { } // Use project name in debug session name if provided - if (options.debugSessionName) { - debugConfig.name = `Debug Tests: ${options.debugSessionName}`; + if (options.project) { + debugConfig.name = `Debug Tests: ${options.project.name}`; } if (!debugConfig.rules) { @@ -339,11 +340,20 @@ export class DebugLauncher implements ITestDebugLauncher { // run via F5 style debugging. launchArgs.purpose = []; - // For project-based execution, use the explicit Python path if provided. + // For project-based execution, get the Python path from the project's environment. // This ensures debug sessions use the correct interpreter for each project. - if (options.pythonPath) { - launchArgs.python = options.pythonPath; - traceVerbose(`[test-by-project] Debug session using explicit Python path: ${options.pythonPath}`); + if (options.project && envExtApi.useEnvExtension()) { + try { + const pythonEnv = await envExtApi.getEnvironment(options.project.uri); + if (pythonEnv?.execInfo?.run?.executable) { + launchArgs.python = pythonEnv.execInfo.run.executable; + traceVerbose( + `[test-by-project] Debug session using Python path from project: ${launchArgs.python}`, + ); + } + } catch (error) { + traceVerbose(`[test-by-project] Could not get environment for project, using default: ${error}`); + } } return launchArgs; diff --git a/src/client/testing/common/types.ts b/src/client/testing/common/types.ts index 49034d9f5f6f..4f999305c3ee 100644 --- a/src/client/testing/common/types.ts +++ b/src/client/testing/common/types.ts @@ -2,6 +2,7 @@ import { CancellationToken, DebugSessionOptions, OutputChannel, Uri } from 'vsco import { Product } from '../../common/types'; import { TestSettingsPropertyNames } from '../configuration/types'; import { TestProvider } from '../types'; +import { PythonProject } from '../../envExt/types'; export type UnitTestProduct = Product.pytest | Product.unittest; @@ -27,15 +28,12 @@ export type LaunchOptions = { pytestUUID?: string; runTestIdsPort?: string; /** - * Optional explicit Python path for project-based execution. - * When provided, debug sessions should use this interpreter instead of the workspace default. + * Optional Python project for project-based execution. + * When provided, the debug launcher will: + * - Use the project's associated Python environment + * - Name the debug session after the project */ - pythonPath?: string; - /** - * Optional name for the debug session (e.g., project name). - * Used to identify debug sessions in the VS Code debug panel. - */ - debugSessionName?: string; + project?: PythonProject; }; export enum TestFilter { diff --git a/src/client/testing/testController/common/projectTestExecution.ts b/src/client/testing/testController/common/projectTestExecution.ts index 12652f6de23c..a11402ed3279 100644 --- a/src/client/testing/testController/common/projectTestExecution.ts +++ b/src/client/testing/testController/common/projectTestExecution.ts @@ -10,6 +10,7 @@ import { ITestDebugLauncher } from '../../common/types'; import { ProjectAdapter } from './projectAdapter'; import { TestProjectRegistry } from './testProjectRegistry'; import { getProjectId } from './projectUtils'; +import { getEnvExtApi, useEnvExtension } from '../../../envExt/api.internal'; /** * Dependencies required for project-based test execution. @@ -50,7 +51,7 @@ export async function executeTestsForProjects( } // Group test items by project - const testsByProject = groupTestItemsByProject(testItems, projects); + const testsByProject = await groupTestItemsByProject(testItems, projects); const isDebugMode = request.profile?.kind === TestRunProfileKind.Debug; traceInfo(`[test-by-project] Executing tests across ${testsByProject.size} project(s), debug=${isDebugMode}`); @@ -99,13 +100,33 @@ export async function executeTestsForProjects( } /** - * Groups test items by their owning project based on file path matching. - * Each test item's URI is matched against project root paths. + * Lookup context for project resolution during a single test run. + * Maps file paths to their resolved ProjectAdapter to avoid + * repeated API calls and linear searches. + * Created fresh per run and discarded after grouping completes. */ -export function groupTestItemsByProject( +interface ProjectLookupContext { + /** Maps file URI fsPath → resolved ProjectAdapter (or undefined if no match) */ + uriToAdapter: Map; + /** Maps project URI fsPath → ProjectAdapter for O(1) adapter lookup */ + projectPathToAdapter: Map; +} + +/** + * Groups test items by their owning project using the Python Environment API. + * Each test item's URI is matched to a project via the API's getPythonProject method. + * Falls back to path-based matching when the extension API is not available. + * + * Uses a per-run cache to avoid redundant API calls for test items sharing the same file. + * + * Time complexity: O(n + p) amortized, where n = test items, p = projects + * - Building adapter lookup map: O(p) + * - Each test item: O(1) amortized (cached after first lookup per unique file) + */ +export async function groupTestItemsByProject( testItems: TestItem[], projects: ProjectAdapter[], -): Map { +): Promise> { const result = new Map(); // Initialize entries for all projects @@ -113,9 +134,15 @@ export function groupTestItemsByProject( result.set(getProjectId(project.projectUri), { project, items: [] }); } + // Build lookup context for this run - O(p) setup, enables O(1) lookups + const lookupContext: ProjectLookupContext = { + uriToAdapter: new Map(), + projectPathToAdapter: new Map(projects.map((p) => [p.projectUri.fsPath, p])), + }; + // Assign each test item to its project for (const item of testItems) { - const project = findProjectForTestItem(item, projects); + const project = await findProjectForTestItem(item, projects, lookupContext); if (project) { const entry = result.get(getProjectId(project.projectUri)); if (entry) { @@ -139,9 +166,64 @@ export function groupTestItemsByProject( /** * Finds the project that owns a test item based on the test item's URI. + * Uses the Python Environment extension API when available, falling back + * to path-based matching (longest matching path prefix). + * + * Results are stored in the lookup context to avoid redundant API calls for items in the same file. + * Time complexity: O(1) amortized with context, O(p) worst case on context miss. + */ +export async function findProjectForTestItem( + item: TestItem, + projects: ProjectAdapter[], + lookupContext?: ProjectLookupContext, +): Promise { + if (!item.uri) return undefined; + + const uriPath = item.uri.fsPath; + + // Check lookup context first - O(1) + if (lookupContext?.uriToAdapter.has(uriPath)) { + return lookupContext.uriToAdapter.get(uriPath); + } + + let result: ProjectAdapter | undefined; + + // Try using the Python Environment extension API first + if (useEnvExtension()) { + try { + const envExtApi = await getEnvExtApi(); + const pythonProject = envExtApi.getPythonProject(item.uri); + if (pythonProject) { + // Use lookup context for O(1) adapter lookup instead of O(p) linear search + result = lookupContext?.projectPathToAdapter.get(pythonProject.uri.fsPath); + if (!result) { + // Fallback to linear search if lookup context not available + result = projects.find((p) => p.projectUri.fsPath === pythonProject.uri.fsPath); + } + } + } catch (error) { + traceVerbose(`[test-by-project] Failed to use env extension API, falling back to path matching: ${error}`); + } + } + + // Fallback: path-based matching (most specific/longest path wins) + if (!result) { + result = findProjectByPath(item, projects); + } + + // Store result for future lookups of same file within this run - O(1) + if (lookupContext) { + lookupContext.uriToAdapter.set(uriPath, result); + } + + return result; +} + +/** + * Finds the project that owns a test item using path-based matching. * Returns the most specific (longest path) matching project. */ -export function findProjectForTestItem(item: TestItem, projects: ProjectAdapter[]): ProjectAdapter | undefined { +function findProjectByPath(item: TestItem, projects: ProjectAdapter[]): ProjectAdapter | undefined { if (!item.uri) return undefined; const itemPath = item.uri.fsPath; diff --git a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts index 99257238ee26..34e4c5933c72 100644 --- a/src/client/testing/testController/pytest/pytestExecutionAdapter.ts +++ b/src/client/testing/testController/pytest/pytestExecutionAdapter.ts @@ -176,10 +176,8 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter { testProvider: PYTEST_PROVIDER, runTestIdsPort: testIdsFileName, pytestPort: resultNamedPipeName, - // Pass explicit Python path for project-based debugging - pythonPath: project?.pythonEnvironment.execInfo?.run?.executable, - // Pass project name for debug session identification - debugSessionName: project?.projectName, + // Pass project for project-based debugging (Python path and session name derived from this) + project: project?.pythonProject, }; const sessionOptions: DebugSessionOptions = { testRun: runInstance, From 2cd8bc7b5510c19fbbcfbe040ce009e217c04f82 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 14:46:20 -0800 Subject: [PATCH 4/9] test mocking --- .../pytestExecutionAdapter.unit.test.ts | 46 ++---- src/test/testing/testController/testMocks.ts | 152 ++++++++++++++++++ 2 files changed, 169 insertions(+), 29 deletions(-) create mode 100644 src/test/testing/testController/testMocks.ts diff --git a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts index 884916182e04..40c701b22641 100644 --- a/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts +++ b/src/test/testing/testController/pytest/pytestExecutionAdapter.unit.test.ts @@ -22,7 +22,7 @@ import { EXTENSION_ROOT_DIR } from '../../../../client/constants'; import { MockChildProcess } from '../../../mocks/mockChildProcess'; import { traceInfo } from '../../../../client/logging'; import * as extapi from '../../../../client/envExt/api.internal'; -import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; +import { createMockProjectAdapter } from '../testMocks'; suite('pytest test execution adapter', () => { let useEnvExtensionStub: sinon.SinonStub; @@ -330,27 +330,6 @@ suite('pytest test execution adapter', () => { // ===== PROJECT-BASED EXECUTION TESTS ===== suite('project-based execution', () => { - function createMockProjectAdapter(projectPath: string, projectName: string): ProjectAdapter { - return ({ - projectUri: Uri.file(projectPath), - projectName, - workspaceUri: Uri.file(projectPath), - testProvider: 'pytest', - pythonEnvironment: { - execInfo: { run: { executable: '/custom/python/path' } }, - }, - pythonProject: { - name: projectName, - uri: Uri.file(projectPath), - }, - executionAdapter: {}, - discoveryAdapter: {}, - resultResolver: {}, - isDiscovering: false, - isExecuting: false, - } as unknown) as ProjectAdapter; - } - test('should set PROJECT_ROOT_PATH env var when project provided', async () => { const deferred2 = createDeferred(); const deferred3 = createDeferred(); @@ -369,7 +348,11 @@ suite('pytest test execution adapter', () => { testRun.setup((t) => t.token).returns(() => ({ onCancellationRequested: () => undefined } as any)); const projectPath = path.join('/', 'workspace', 'myproject'); - const mockProject = createMockProjectAdapter(projectPath, 'myproject'); + const mockProject = createMockProjectAdapter({ + projectPath, + projectName: 'myproject', + pythonPath: '/custom/python/path', + }); const uri = Uri.file(myTestPath); adapter = new PytestTestExecutionAdapter(configService); @@ -427,7 +410,11 @@ suite('pytest test execution adapter', () => { ); const projectPath = path.join('/', 'workspace', 'myproject'); - const mockProject = createMockProjectAdapter(projectPath, 'myproject (Python 3.11)'); + const mockProject = createMockProjectAdapter({ + projectPath, + projectName: 'myproject (Python 3.11)', + pythonPath: '/custom/python/path', + }); const uri = Uri.file(myTestPath); adapter = new PytestTestExecutionAdapter(configService); @@ -448,8 +435,10 @@ suite('pytest test execution adapter', () => { (x) => x.launchDebugger( typeMoq.It.is((launchOptions) => { - assert.equal(launchOptions.debugSessionName, 'myproject (Python 3.11)'); - assert.equal(launchOptions.pythonPath, '/custom/python/path'); + // Project should be passed for project-based debugging + assert.ok(launchOptions.project, 'project should be defined'); + assert.equal(launchOptions.project?.name, 'myproject (Python 3.11)'); + assert.equal(launchOptions.project?.uri.fsPath, projectPath); return true; }), typeMoq.It.isAny(), @@ -499,7 +488,7 @@ suite('pytest test execution adapter', () => { ); }); - test('should not set debugSessionName or pythonPath in LaunchOptions when no project', async () => { + test('should not set project in LaunchOptions when no project provided', async () => { const deferred3 = createDeferred(); utilsWriteTestIdsFileStub.callsFake(() => Promise.resolve('testIdPipe-mockName')); @@ -533,8 +522,7 @@ suite('pytest test execution adapter', () => { (x) => x.launchDebugger( typeMoq.It.is((launchOptions) => { - assert.equal(launchOptions.debugSessionName, undefined); - assert.equal(launchOptions.pythonPath, undefined); + assert.equal(launchOptions.project, undefined); return true; }), typeMoq.It.isAny(), diff --git a/src/test/testing/testController/testMocks.ts b/src/test/testing/testController/testMocks.ts new file mode 100644 index 000000000000..eb37d492f1d9 --- /dev/null +++ b/src/test/testing/testController/testMocks.ts @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +/** + * Centralized mock utilities for testing testController components. + * Re-use these helpers across multiple test files for consistency. + */ + +import * as sinon from 'sinon'; +import * as typemoq from 'typemoq'; +import { TestItem, TestItemCollection, TestRun, Uri } from 'vscode'; +import { IPythonExecutionFactory } from '../../../client/common/process/types'; +import { ITestDebugLauncher } from '../../../client/testing/common/types'; +import { ProjectAdapter } from '../../../client/testing/testController/common/projectAdapter'; +import { ProjectExecutionDependencies } from '../../../client/testing/testController/common/projectTestExecution'; +import { TestProjectRegistry } from '../../../client/testing/testController/common/testProjectRegistry'; +import { ITestExecutionAdapter, ITestResultResolver } from '../../../client/testing/testController/common/types'; + +/** + * Creates a mock TestItem with configurable properties. + * @param id - The unique ID of the test item + * @param uriPath - The file path for the test item's URI + * @param children - Optional array of child test items + */ +export function createMockTestItem(id: string, uriPath: string, children?: TestItem[]): TestItem { + const childMap = new Map(); + children?.forEach((c) => childMap.set(c.id, c)); + + const mockChildren: TestItemCollection = { + size: childMap.size, + forEach: (callback: (item: TestItem, collection: TestItemCollection) => void) => { + childMap.forEach((item) => callback(item, mockChildren)); + }, + get: (itemId: string) => childMap.get(itemId), + add: () => {}, + delete: () => {}, + replace: () => {}, + [Symbol.iterator]: function* () { + for (const [key, value] of childMap) { + yield [key, value] as [string, TestItem]; + } + }, + } as TestItemCollection; + + return ({ + id, + uri: Uri.file(uriPath), + children: mockChildren, + label: id, + canResolveChildren: false, + busy: false, + tags: [], + range: undefined, + error: undefined, + parent: undefined, + } as unknown) as TestItem; +} + +/** + * Creates a mock TestItem without a URI. + * Useful for testing edge cases where test items have no associated file. + * @param id - The unique ID of the test item + */ +export function createMockTestItemWithoutUri(id: string): TestItem { + return ({ + id, + uri: undefined, + children: ({ size: 0, forEach: () => {} } as unknown) as TestItemCollection, + label: id, + } as unknown) as TestItem; +} + +export interface MockProjectAdapterConfig { + projectPath: string; + projectName: string; + pythonPath?: string; + testProvider?: 'pytest' | 'unittest'; +} + +export type MockProjectAdapter = ProjectAdapter & { executionAdapterStub: sinon.SinonStub }; + +/** + * Creates a mock ProjectAdapter for testing project-based test execution. + * @param config - Configuration object with project details + * @returns A mock ProjectAdapter with an exposed executionAdapterStub for verification + */ +export function createMockProjectAdapter(config: MockProjectAdapterConfig): MockProjectAdapter { + const runTestsStub = sinon.stub().resolves(); + const executionAdapter: ITestExecutionAdapter = ({ + runTests: runTestsStub, + } as unknown) as ITestExecutionAdapter; + + const resultResolverMock: ITestResultResolver = ({ + vsIdToRunId: new Map(), + runIdToVSid: new Map(), + runIdToTestItem: new Map(), + detailedCoverageMap: new Map(), + resolveDiscovery: () => Promise.resolve(), + resolveExecution: () => {}, + } as unknown) as ITestResultResolver; + + const adapter = ({ + projectUri: Uri.file(config.projectPath), + projectName: config.projectName, + workspaceUri: Uri.file(config.projectPath), + testProvider: config.testProvider ?? 'pytest', + pythonEnvironment: config.pythonPath + ? { + execInfo: { run: { executable: config.pythonPath } }, + } + : undefined, + pythonProject: { + name: config.projectName, + uri: Uri.file(config.projectPath), + }, + executionAdapter, + discoveryAdapter: {} as any, + resultResolver: resultResolverMock, + isDiscovering: false, + isExecuting: false, + // Expose the stub for testing + executionAdapterStub: runTestsStub, + } as unknown) as MockProjectAdapter; + + return adapter; +} + +/** + * Creates mock dependencies for project test execution. + * @returns An object containing mocked ProjectExecutionDependencies + */ +export function createMockDependencies(): ProjectExecutionDependencies { + return { + projectRegistry: typemoq.Mock.ofType().object, + pythonExecFactory: typemoq.Mock.ofType().object, + debugLauncher: typemoq.Mock.ofType().object, + }; +} + +/** + * Creates a mock TestRun with common setup methods. + * @returns A TypeMoq mock of TestRun + */ +export function createMockTestRun(): typemoq.IMock { + const runMock = typemoq.Mock.ofType(); + runMock.setup((r) => r.started(typemoq.It.isAny())); + runMock.setup((r) => r.passed(typemoq.It.isAny(), typemoq.It.isAny())); + runMock.setup((r) => r.failed(typemoq.It.isAny(), typemoq.It.isAny(), typemoq.It.isAny())); + runMock.setup((r) => r.skipped(typemoq.It.isAny())); + runMock.setup((r) => r.end()); + return runMock; +} From 00a92640074a6dbde00795ff58a34e1af2324961 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 14:52:07 -0800 Subject: [PATCH 5/9] linting fix --- .../common/projectTestExecution.unit.test.ts | 29 +++++++++---------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/test/testing/testController/common/projectTestExecution.unit.test.ts b/src/test/testing/testController/common/projectTestExecution.unit.test.ts index bffbe4435220..fa79e1531e40 100644 --- a/src/test/testing/testController/common/projectTestExecution.unit.test.ts +++ b/src/test/testing/testController/common/projectTestExecution.unit.test.ts @@ -30,7 +30,6 @@ import { import { TestProjectRegistry } from '../../../../client/testing/testController/common/testProjectRegistry'; import { ITestExecutionAdapter, ITestResultResolver } from '../../../../client/testing/testController/common/types'; import * as telemetry from '../../../../client/telemetry'; -import { createDeferred } from '../../../../client/common/utils/async'; suite('Project Test Execution', () => { let sandbox: sinon.SinonSandbox; @@ -240,13 +239,13 @@ suite('Project Test Execution', () => { // ===== groupTestItemsByProject Tests ===== suite('groupTestItemsByProject', () => { - test('should group single test item to its matching project', () => { + test('should group single test item to its matching project', async () => { // Mock const item = createMockTestItem('test1', '/workspace/proj/test.py'); const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = groupTestItemsByProject([item], [project]); + const result = await groupTestItemsByProject([item], [project]); // Assert expect(result.size).to.equal(1); @@ -255,7 +254,7 @@ suite('Project Test Execution', () => { expect(entry.items).to.deep.equal([item]); }); - test('should aggregate multiple items belonging to same project', () => { + test('should aggregate multiple items belonging to same project', async () => { // Mock const item1 = createMockTestItem('test1', '/workspace/proj/tests/test1.py'); const item2 = createMockTestItem('test2', '/workspace/proj/tests/test2.py'); @@ -263,7 +262,7 @@ suite('Project Test Execution', () => { const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = groupTestItemsByProject([item1, item2, item3], [project]); + const result = await groupTestItemsByProject([item1, item2, item3], [project]); // Assert - use Set for order-agnostic comparison expect(result.size).to.equal(1); @@ -272,7 +271,7 @@ suite('Project Test Execution', () => { expect(new Set(entry.items)).to.deep.equal(new Set([item1, item2, item3])); }); - test('should separate items into groups by their owning project', () => { + test('should separate items into groups by their owning project', async () => { // Mock const item1 = createMockTestItem('test1', '/workspace/proj1/test.py'); const item2 = createMockTestItem('test2', '/workspace/proj2/test.py'); @@ -281,7 +280,7 @@ suite('Project Test Execution', () => { const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); // Run - const result = groupTestItemsByProject([item1, item2, item3], [proj1, proj2]); + const result = await groupTestItemsByProject([item1, item2, item3], [proj1, proj2]); // Assert - use Set for order-agnostic comparison expect(result.size).to.equal(2); @@ -292,30 +291,30 @@ suite('Project Test Execution', () => { expect(proj2Entry?.items).to.deep.equal([item2]); }); - test('should return empty map when no test items provided', () => { + test('should return empty map when no test items provided', async () => { // Mock const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = groupTestItemsByProject([], [project]); + const result = await groupTestItemsByProject([], [project]); // Assert expect(result.size).to.equal(0); }); - test('should exclude items that do not match any project path', () => { + test('should exclude items that do not match any project path', async () => { // Mock const item = createMockTestItem('test1', '/other/path/test.py'); const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = groupTestItemsByProject([item], [project]); + const result = await groupTestItemsByProject([item], [project]); // Assert expect(result.size).to.equal(0); }); - test('should assign item to most specific (deepest) project for nested paths', () => { + test('should assign item to most specific (deepest) project for nested paths', async () => { // Mock const item = createMockTestItem('test1', '/workspace/parent/child/test.py'); const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); @@ -325,7 +324,7 @@ suite('Project Test Execution', () => { }); // Run - const result = groupTestItemsByProject([item], [parentProject, childProject]); + const result = await groupTestItemsByProject([item], [parentProject, childProject]); // Assert expect(result.size).to.equal(1); @@ -334,14 +333,14 @@ suite('Project Test Execution', () => { expect(entry?.items).to.deep.equal([item]); }); - test('should omit projects that have no matching test items', () => { + test('should omit projects that have no matching test items', async () => { // Mock const item = createMockTestItem('test1', '/workspace/proj1/test.py'); const proj1 = createMockProjectAdapter({ projectPath: '/workspace/proj1', projectName: 'proj1' }); const proj2 = createMockProjectAdapter({ projectPath: '/workspace/proj2', projectName: 'proj2' }); // Run - const result = groupTestItemsByProject([item], [proj1, proj2]); + const result = await groupTestItemsByProject([item], [proj1, proj2]); // Assert expect(result.size).to.equal(1); From 51f14be74e365095a0dac9f18a7ec2b300cb2403 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 15:09:28 -0800 Subject: [PATCH 6/9] fix tests --- .../testing/common/debugLauncher.unit.test.ts | 1910 ++++++++--------- .../common/projectTestExecution.unit.test.ts | 190 +- 2 files changed, 993 insertions(+), 1107 deletions(-) diff --git a/src/test/testing/common/debugLauncher.unit.test.ts b/src/test/testing/common/debugLauncher.unit.test.ts index f433fec0086b..86e862103bf6 100644 --- a/src/test/testing/common/debugLauncher.unit.test.ts +++ b/src/test/testing/common/debugLauncher.unit.test.ts @@ -1,977 +1,933 @@ -// // Copyright (c) Microsoft Corporation. All rights reserved. -// // Licensed under the MIT License. - -// 'use strict'; - -// import { expect, use } from 'chai'; -// import * as chaiAsPromised from 'chai-as-promised'; -// import * as path from 'path'; -// import * as sinon from 'sinon'; -// import * as TypeMoq from 'typemoq'; -// import * as fs from '../../../client/common/platform/fs-paths'; -// import * as workspaceApis from '../../../client/common/vscodeApis/workspaceApis'; -// import { CancellationTokenSource, DebugConfiguration, DebugSession, Uri, WorkspaceFolder } from 'vscode'; -// import { IInvalidPythonPathInDebuggerService } from '../../../client/application/diagnostics/types'; -// import { IApplicationShell, IDebugService } from '../../../client/common/application/types'; -// import { EXTENSION_ROOT_DIR } from '../../../client/common/constants'; -// import '../../../client/common/extensions'; -// import { IConfigurationService, IPythonSettings } from '../../../client/common/types'; -// import { PythonDebuggerTypeName } from '../../../client/debugger/constants'; -// import { IDebugEnvironmentVariablesService } from '../../../client/debugger/extension/configuration/resolvers/helper'; -// import { LaunchConfigurationResolver } from '../../../client/debugger/extension/configuration/resolvers/launch'; -// import { DebugOptions } from '../../../client/debugger/types'; -// import { IInterpreterService } from '../../../client/interpreter/contracts'; -// import { IServiceContainer } from '../../../client/ioc/types'; -// import { PythonEnvironment } from '../../../client/pythonEnvironments/info'; -// import { DebugLauncher } from '../../../client/testing/common/debugLauncher'; -// import { LaunchOptions } from '../../../client/testing/common/types'; -// import { ITestingSettings } from '../../../client/testing/configuration/types'; -// import { TestProvider } from '../../../client/testing/types'; -// import { isOs, OSType } from '../../common'; -// import { IEnvironmentActivationService } from '../../../client/interpreter/activation/types'; -// import { createDeferred } from '../../../client/common/utils/async'; - -// use(chaiAsPromised.default); - -// suite('Unit Tests - Debug Launcher', () => { -// let serviceContainer: TypeMoq.IMock; -// let unitTestSettings: TypeMoq.IMock; -// let debugLauncher: DebugLauncher; -// let debugService: TypeMoq.IMock; -// let settings: TypeMoq.IMock; -// let debugEnvHelper: TypeMoq.IMock; -// let interpreterService: TypeMoq.IMock; -// let environmentActivationService: TypeMoq.IMock; -// let getWorkspaceFolderStub: sinon.SinonStub; -// let getWorkspaceFoldersStub: sinon.SinonStub; -// let pathExistsStub: sinon.SinonStub; -// let readFileStub: sinon.SinonStub; -// const envVars = { FOO: 'BAR' }; - -// setup(async () => { -// environmentActivationService = TypeMoq.Mock.ofType(); -// environmentActivationService -// .setup((e) => e.getActivatedEnvironmentVariables(TypeMoq.It.isAny())) -// .returns(() => Promise.resolve(envVars)); -// interpreterService = TypeMoq.Mock.ofType(); -// serviceContainer = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// const configService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// serviceContainer -// .setup((c) => c.get(TypeMoq.It.isValue(IConfigurationService))) -// .returns(() => configService.object); - -// debugService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IDebugService))).returns(() => debugService.object); -// getWorkspaceFolderStub = sinon.stub(workspaceApis, 'getWorkspaceFolder'); -// getWorkspaceFoldersStub = sinon.stub(workspaceApis, 'getWorkspaceFolders'); -// pathExistsStub = sinon.stub(fs, 'pathExists'); -// readFileStub = sinon.stub(fs, 'readFile'); - -// const appShell = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// appShell.setup((a) => a.showErrorMessage(TypeMoq.It.isAny())).returns(() => Promise.resolve(undefined)); -// serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IApplicationShell))).returns(() => appShell.object); - -// settings = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// configService.setup((c) => c.getSettings(TypeMoq.It.isAny())).returns(() => settings.object); - -// unitTestSettings = TypeMoq.Mock.ofType(); -// settings.setup((p) => p.testing).returns(() => unitTestSettings.object); - -// debugEnvHelper = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); -// serviceContainer -// .setup((c) => c.get(TypeMoq.It.isValue(IDebugEnvironmentVariablesService))) -// .returns(() => debugEnvHelper.object); - -// debugLauncher = new DebugLauncher(serviceContainer.object, getNewResolver(configService.object)); -// }); - -// teardown(() => { -// sinon.restore(); -// }); - -// function getNewResolver(configService: IConfigurationService) { -// const validator = TypeMoq.Mock.ofType( -// undefined, -// TypeMoq.MockBehavior.Strict, -// ); -// validator -// .setup((v) => v.validatePythonPath(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .returns(() => Promise.resolve(true)); -// return new LaunchConfigurationResolver( -// validator.object, -// configService, -// debugEnvHelper.object, -// interpreterService.object, -// environmentActivationService.object, -// ); -// } -// function setupDebugManager( -// workspaceFolder: WorkspaceFolder, -// expected: DebugConfiguration, -// testProvider: TestProvider, -// ) { -// interpreterService -// .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) -// .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); -// settings.setup((p) => p.envFile).returns(() => __filename); -// const args = expected.args; -// const debugArgs = testProvider === 'unittest' ? args.filter((item: string) => item !== '--debug') : args; -// expected.args = debugArgs; - -// debugEnvHelper -// .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .returns(() => Promise.resolve(expected.env)); - -// const deferred = createDeferred(); - -// debugService -// .setup((d) => -// d.startDebugging(TypeMoq.It.isValue(workspaceFolder), TypeMoq.It.isValue(expected), undefined), -// ) -// .returns((_wspc: WorkspaceFolder, _expectedParam: DebugConfiguration) => { -// deferred.resolve(); -// return Promise.resolve(undefined as any); -// }); - -// // create a fake debug session that the debug service will return on terminate -// const fakeDebugSession = TypeMoq.Mock.ofType(); -// fakeDebugSession.setup((ds) => ds.id).returns(() => 'id-val'); -// const debugSessionInstance = fakeDebugSession.object; - -// debugService -// .setup((d) => d.activeDebugSession) -// .returns(() => debugSessionInstance) -// .verifiable(TypeMoq.Times.once()); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .returns((callback) => { -// deferred.promise.then(() => { -// callback(debugSessionInstance); -// }); -// return undefined as any; -// }) -// .verifiable(TypeMoq.Times.once()); -// } -// function createWorkspaceFolder(folderPath: string): WorkspaceFolder { -// return { -// index: 0, -// name: path.basename(folderPath), -// uri: Uri.file(folderPath), -// }; -// } -// function getTestLauncherScript(testProvider: TestProvider, pythonTestAdapterRewriteExperiment?: boolean) { -// if (!pythonTestAdapterRewriteExperiment) { -// switch (testProvider) { -// case 'unittest': { -// return path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); -// } -// case 'pytest': { -// return path.join(EXTENSION_ROOT_DIR, 'python_files', 'vscode_pytest', 'run_pytest_script.py'); -// } -// default: { -// throw new Error(`Unknown test provider '${testProvider}'`); -// } -// } -// } -// } - -// function getDefaultDebugConfig(): DebugConfiguration { -// return { -// name: 'Debug Unit Test', -// type: PythonDebuggerTypeName, -// request: 'launch', -// console: 'internalConsole', -// env: {}, -// envFile: __filename, -// stopOnEntry: false, -// showReturnValue: true, -// redirectOutput: true, -// debugStdLib: false, -// subProcess: true, -// purpose: [], -// }; -// } -// function setupSuccess( -// options: LaunchOptions, -// testProvider: TestProvider, -// expected?: DebugConfiguration, -// debugConfigs?: string | DebugConfiguration[], -// ) { -// const testLaunchScript = getTestLauncherScript(testProvider, false); - -// const workspaceFolders = [createWorkspaceFolder(options.cwd), createWorkspaceFolder('five/six/seven')]; -// getWorkspaceFoldersStub.returns(workspaceFolders); -// getWorkspaceFolderStub.returns(workspaceFolders[0]); - -// if (!debugConfigs) { -// pathExistsStub.resolves(false); -// } else { -// pathExistsStub.resolves(true); - -// if (typeof debugConfigs !== 'string') { -// debugConfigs = JSON.stringify({ -// version: '0.1.0', -// configurations: debugConfigs, -// }); -// } -// readFileStub.resolves(debugConfigs as string); -// } - -// if (!expected) { -// expected = getDefaultDebugConfig(); -// } -// expected.rules = [{ path: path.join(EXTENSION_ROOT_DIR, 'python_files'), include: false }]; -// expected.program = testLaunchScript; -// expected.args = options.args; - -// if (!expected.cwd) { -// expected.cwd = workspaceFolders[0].uri.fsPath; -// } -// const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); -// const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; -// expected.env.PYTHONPATH = pythonPath; -// expected.env.TEST_RUN_PIPE = 'pytestPort'; -// expected.env.RUN_TEST_IDS_PIPE = 'runTestIdsPort'; - -// // added by LaunchConfigurationResolver: -// if (!expected.python) { -// expected.python = 'python'; -// } -// if (!expected.clientOS) { -// expected.clientOS = isOs(OSType.Windows) ? 'windows' : 'unix'; -// } -// if (!expected.debugAdapterPython) { -// expected.debugAdapterPython = 'python'; -// } -// if (!expected.debugLauncherPython) { -// expected.debugLauncherPython = 'python'; -// } -// expected.workspaceFolder = workspaceFolders[0].uri.fsPath; -// expected.debugOptions = []; -// if (expected.stopOnEntry) { -// expected.debugOptions.push(DebugOptions.StopOnEntry); -// } -// if (expected.showReturnValue) { -// expected.debugOptions.push(DebugOptions.ShowReturnValue); -// } -// if (expected.redirectOutput) { -// expected.debugOptions.push(DebugOptions.RedirectOutput); -// } -// if (expected.subProcess) { -// expected.debugOptions.push(DebugOptions.SubProcess); -// } -// if (isOs(OSType.Windows)) { -// expected.debugOptions.push(DebugOptions.FixFilePathCase); -// } - -// setupDebugManager(workspaceFolders[0], expected, testProvider); -// } - -// const testProviders: TestProvider[] = ['pytest', 'unittest']; - -// testProviders.forEach((testProvider) => { -// const testTitleSuffix = `(Test Framework '${testProvider}')`; - -// test(`Must launch debugger ${testTitleSuffix}`, async () => { -// const options = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider, -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// setupSuccess(options, testProvider); - -// await debugLauncher.launchDebugger(options); - -// try { -// debugService.verifyAll(); -// } catch (ex) { -// console.log(ex); -// } -// }); -// test(`Must launch debugger with arguments ${testTitleSuffix}`, async () => { -// const options = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py', '--debug', '1'], -// testProvider, -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// setupSuccess(options, testProvider); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); -// test(`Must not launch debugger if cancelled ${testTitleSuffix}`, async () => { -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .returns(() => { -// return Promise.resolve(undefined as any); -// }) -// .verifiable(TypeMoq.Times.never()); - -// const cancellationToken = new CancellationTokenSource(); -// cancellationToken.cancel(); -// const token = cancellationToken.token; -// const options: LaunchOptions = { -// cwd: '', -// args: [], -// token, -// testProvider, -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; - -// await expect(debugLauncher.launchDebugger(options)).to.be.eventually.equal(undefined, 'not undefined'); - -// debugService.verifyAll(); -// }); -// test(`Must throw an exception if there are no workspaces ${testTitleSuffix}`, async () => { -// getWorkspaceFoldersStub.returns(undefined); -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .returns(() => { -// console.log('Debugging should not start'); -// return Promise.resolve(undefined as any); -// }) -// .verifiable(TypeMoq.Times.never()); - -// const options: LaunchOptions = { -// cwd: '', -// args: [], -// testProvider, -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; - -// await expect(debugLauncher.launchDebugger(options)).to.eventually.rejectedWith('Please open a workspace'); - -// debugService.verifyAll(); -// }); -// }); - -// test('Tries launch.json first', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// expected.name = 'spam'; -// setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'test' }]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Use cwd value in settings if exist', async () => { -// unitTestSettings.setup((p) => p.cwd).returns(() => 'path/to/settings/cwd'); -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// expected.cwd = 'path/to/settings/cwd'; -// const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); -// const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; -// expected.env.PYTHONPATH = pythonPath; - -// setupSuccess(options, 'unittest', expected); -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Full debug config', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = { -// name: 'my tests', -// type: PythonDebuggerTypeName, -// request: 'launch', -// python: 'some/dir/bin/py3', -// debugAdapterPython: 'some/dir/bin/py3', -// debugLauncherPython: 'some/dir/bin/py3', -// stopOnEntry: true, -// showReturnValue: true, -// console: 'integratedTerminal', -// cwd: 'some/dir', -// env: { -// PYTHONPATH: 'one/two/three', -// SPAM: 'EGGS', -// TEST_RUN_PIPE: 'pytestPort', -// RUN_TEST_IDS_PIPE: 'runTestIdsPort', -// }, -// envFile: 'some/dir/.env', -// redirectOutput: false, -// debugStdLib: true, -// // added by LaunchConfigurationResolver: -// internalConsoleOptions: 'neverOpen', -// subProcess: true, -// purpose: [], -// }; -// setupSuccess(options, 'unittest', expected, [ -// { -// name: 'my tests', -// type: PythonDebuggerTypeName, -// request: 'test', -// pythonPath: expected.python, -// stopOnEntry: expected.stopOnEntry, -// showReturnValue: expected.showReturnValue, -// console: expected.console, -// cwd: expected.cwd, -// env: expected.env, -// envFile: expected.envFile, -// redirectOutput: expected.redirectOutput, -// debugStdLib: expected.debugStdLib, -// }, -// ]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Uses first entry', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// expected.name = 'spam1'; -// setupSuccess(options, 'unittest', expected, [ -// { name: 'spam1', type: PythonDebuggerTypeName, request: 'test' }, -// { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, -// { name: 'spam3', type: PythonDebuggerTypeName, request: 'test' }, -// ]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles bad JSON', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// setupSuccess(options, 'unittest', expected, ']'); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// const malformedFiles = [ -// '// test 1', -// '// test 2 \n\ -// { \n\ -// "name": "spam", \n\ -// "type": "debugpy", \n\ -// "request": "test" \n\ -// } \n\ -// ', -// '// test 3 \n\ -// [ \n\ -// { \n\ -// "name": "spam", \n\ -// "type": "debugpy", \n\ -// "request": "test" \n\ -// } \n\ -// ] \n\ -// ', -// '// test 4 \n\ -// { \n\ -// "configurations": [ \n\ -// { \n\ -// "name": "spam", \n\ -// "type": "debugpy", \n\ -// "request": "test" \n\ -// } \n\ -// ] \n\ -// } \n\ -// ', -// ]; -// for (const text of malformedFiles) { -// const testID = text.split('\n')[0].substring(3).trim(); -// test(`Handles malformed launch.json - ${testID}`, async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// setupSuccess(options, 'unittest', expected, text); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); -// } - -// test('Handles bad debug config items', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); - -// setupSuccess(options, 'unittest', expected, [ -// {} as DebugConfiguration, -// { name: 'spam1' } as DebugConfiguration, -// { name: 'spam2', type: PythonDebuggerTypeName } as DebugConfiguration, -// { name: 'spam3', request: 'test' } as DebugConfiguration, -// { type: PythonDebuggerTypeName } as DebugConfiguration, -// { type: PythonDebuggerTypeName, request: 'test' } as DebugConfiguration, -// { request: 'test' } as DebugConfiguration, -// ]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles non-python debug configs', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// setupSuccess(options, 'unittest', expected, [{ name: 'foo', type: 'other', request: 'bar' }]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles bogus python debug configs', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'bogus' }]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles non-test debug config', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// setupSuccess(options, 'unittest', expected, [ -// { name: 'spam', type: PythonDebuggerTypeName, request: 'launch' }, -// { name: 'spam', type: PythonDebuggerTypeName, request: 'attach' }, -// ]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles mixed debug config', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// expected.name = 'spam2'; -// setupSuccess(options, 'unittest', expected, [ -// { name: 'foo1', type: 'other', request: 'bar' }, -// { name: 'foo2', type: 'other', request: 'bar' }, -// { name: 'spam1', type: PythonDebuggerTypeName, request: 'launch' }, -// { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, -// { name: 'spam3', type: PythonDebuggerTypeName, request: 'attach' }, -// { name: 'xyz', type: 'another', request: 'abc' }, -// ]); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); - -// test('Handles comments', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'unittest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; -// const expected = getDefaultDebugConfig(); -// expected.name = 'spam'; -// expected.stopOnEntry = true; -// setupSuccess( -// options, -// 'unittest', -// expected, -// ' \n\ -// { \n\ -// "version": "0.1.0", \n\ -// "configurations": [ \n\ -// // my thing \n\ -// { \n\ -// // "test" debug config \n\ -// "name": "spam", /* non-empty */ \n\ -// "type": "debugpy", /* must be "python" */ \n\ -// "request": "test", /* must be "test" */ \n\ -// // extra stuff here: \n\ -// "stopOnEntry": true \n\ -// } \n\ -// ] \n\ -// } \n\ -// ', -// ); - -// await debugLauncher.launchDebugger(options); - -// debugService.verifyAll(); -// }); -// test('Ensure trailing commands in JSON are handled', async () => { -// const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; -// const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); -// const jsonc = '{"version":"1234", "configurations":[1,2,],}'; -// pathExistsStub.resolves(true); -// readFileStub.withArgs(filename).resolves(jsonc); - -// const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); - -// expect(configs).to.be.deep.equal([1, 2]); -// }); -// test('Ensure empty configuration is returned when launch.json cannot be parsed', async () => { -// const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; -// const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); -// const jsonc = '{"version":"1234"'; - -// pathExistsStub.resolves(true); -// readFileStub.withArgs(filename).resolves(jsonc); - -// const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); - -// expect(configs).to.be.deep.equal([]); -// }); - -// // ===== PROJECT-BASED DEBUG SESSION TESTS ===== - -// suite('Project-based debug sessions', () => { -// function setupForProjectTests(options: LaunchOptions) { -// interpreterService -// .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) -// .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); -// settings.setup((p) => p.envFile).returns(() => __filename); - -// debugEnvHelper -// .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .returns(() => Promise.resolve({})); - -// const workspaceFolders = [ -// { index: 0, name: 'test', uri: Uri.file(options.cwd) }, -// ]; -// getWorkspaceFoldersStub.returns(workspaceFolders); -// getWorkspaceFolderStub.returns(workspaceFolders[0]); -// pathExistsStub.resolves(false); -// } - -// test('should use debugSessionName in config name when provided', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'pytest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// debugSessionName: 'myproject (Python 3.11)', -// }; - -// setupForProjectTests(options); - -// let capturedConfig: DebugConfiguration | undefined; - -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .callback((_, config) => { -// capturedConfig = config; -// }) -// .returns(() => Promise.resolve(true)); - -// debugService -// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) -// .returns(() => ({ dispose: () => {} })); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .returns((callback) => { -// // Immediately terminate with a matching session -// setTimeout(() => { -// if (capturedConfig) { -// callback({ -// id: 'test-session-id', -// configuration: capturedConfig, -// } as DebugSession); -// } -// }, 10); -// return { dispose: () => {} }; -// }); - -// await debugLauncher.launchDebugger(options); - -// expect(capturedConfig).to.not.be.undefined; -// expect(capturedConfig!.name).to.equal('Debug Tests: myproject (Python 3.11)'); -// }); - -// test('should use pythonPath from options when provided', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'pytest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// pythonPath: '/custom/python/interpreter', -// }; - -// setupForProjectTests(options); - -// let capturedConfig: DebugConfiguration | undefined; - -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .callback((_, config) => { -// capturedConfig = config; -// }) -// .returns(() => Promise.resolve(true)); - -// debugService -// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) -// .returns(() => ({ dispose: () => {} })); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .returns((callback) => { -// setTimeout(() => { -// if (capturedConfig) { -// callback({ -// id: 'test-session-id', -// configuration: capturedConfig, -// } as DebugSession); -// } -// }, 10); -// return { dispose: () => {} }; -// }); - -// await debugLauncher.launchDebugger(options); - -// expect(capturedConfig).to.not.be.undefined; -// expect(capturedConfig!.python).to.equal('/custom/python/interpreter'); -// }); - -// test('should add unique session marker to launch config', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'pytest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; - -// setupForProjectTests(options); - -// let capturedConfig: DebugConfiguration | undefined; - -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .callback((_, config) => { -// capturedConfig = config; -// }) -// .returns(() => Promise.resolve(true)); - -// debugService -// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) -// .returns(() => ({ dispose: () => {} })); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .returns((callback) => { -// setTimeout(() => { -// if (capturedConfig) { -// callback({ -// id: 'test-session-id', -// configuration: capturedConfig, -// } as DebugSession); -// } -// }, 10); -// return { dispose: () => {} }; -// }); - -// await debugLauncher.launchDebugger(options); - -// expect(capturedConfig).to.not.be.undefined; -// // Should have a session marker of format 'test-{timestamp}-{random}' -// const marker = (capturedConfig as any).__vscodeTestSessionMarker; -// expect(marker).to.be.a('string'); -// expect(marker).to.match(/^test-\d+-[a-z0-9]+$/); -// }); - -// test('should generate unique markers for each launch', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'pytest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; - -// setupForProjectTests(options); - -// const capturedConfigs: DebugConfiguration[] = []; - -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .callback((_, config) => { -// capturedConfigs.push(config); -// }) -// .returns(() => Promise.resolve(true)); - -// debugService -// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) -// .returns(() => ({ dispose: () => {} })); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .returns((callback) => { -// setTimeout(() => { -// if (capturedConfigs.length > 0) { -// const lastConfig = capturedConfigs[capturedConfigs.length - 1]; -// callback({ -// id: `session-${capturedConfigs.length}`, -// configuration: lastConfig, -// } as DebugSession); -// } -// }, 10); -// return { dispose: () => {} }; -// }); - -// // Launch twice -// await debugLauncher.launchDebugger(options); -// await debugLauncher.launchDebugger(options); - -// expect(capturedConfigs).to.have.length(2); -// const marker1 = (capturedConfigs[0] as any).__vscodeTestSessionMarker; -// const marker2 = (capturedConfigs[1] as any).__vscodeTestSessionMarker; -// expect(marker1).to.not.equal(marker2); -// }); - -// test('should only resolve when matching session terminates', async () => { -// const options: LaunchOptions = { -// cwd: 'one/two/three', -// args: ['/one/two/three/testfile.py'], -// testProvider: 'pytest', -// runTestIdsPort: 'runTestIdsPort', -// pytestPort: 'pytestPort', -// }; - -// setupForProjectTests(options); - -// let capturedConfig: DebugConfiguration | undefined; -// let terminateCallback: ((session: DebugSession) => void) | undefined; -// let startCallback: ((session: DebugSession) => void) | undefined; - -// debugService -// .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) -// .callback((_, config) => { -// capturedConfig = config; -// }) -// .returns(() => Promise.resolve(true)); - -// debugService -// .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) -// .callback((cb) => { -// startCallback = cb; -// }) -// .returns(() => ({ dispose: () => {} })); - -// debugService -// .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) -// .callback((cb) => { -// terminateCallback = cb; -// }) -// .returns(() => ({ dispose: () => {} })); - -// const launchPromise = debugLauncher.launchDebugger(options); - -// // Wait for config to be captured -// await new Promise((r) => setTimeout(r, 10)); - -// // Simulate our session starting -// const ourSession = { -// id: 'our-session-id', -// configuration: capturedConfig!, -// } as DebugSession; -// startCallback?.(ourSession); - -// // Create a different session (like another project's debug) -// const otherSession = { -// id: 'other-session-id', -// configuration: { __vscodeTestSessionMarker: 'different-marker' }, -// } as DebugSession; - -// // Terminate the OTHER session first - should NOT resolve our promise -// terminateCallback?.(otherSession); - -// // Wait a bit to ensure it didn't resolve -// let resolved = false; -// const checkPromise = launchPromise.then(() => { -// resolved = true; -// }); - -// await new Promise((r) => setTimeout(r, 20)); -// expect(resolved).to.be.false; - -// // Now terminate OUR session - should resolve -// terminateCallback?.(ourSession); - -// await checkPromise; -// expect(resolved).to.be.true; -// }); -// }); -// }); +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +'use strict'; + +import { expect, use } from 'chai'; +import * as chaiAsPromised from 'chai-as-promised'; +import * as path from 'path'; +import * as sinon from 'sinon'; +import * as TypeMoq from 'typemoq'; +import * as fs from '../../../client/common/platform/fs-paths'; +import * as workspaceApis from '../../../client/common/vscodeApis/workspaceApis'; +import { CancellationTokenSource, DebugConfiguration, DebugSession, Uri, WorkspaceFolder } from 'vscode'; +import { IInvalidPythonPathInDebuggerService } from '../../../client/application/diagnostics/types'; +import { IApplicationShell, IDebugService } from '../../../client/common/application/types'; +import { EXTENSION_ROOT_DIR } from '../../../client/common/constants'; +import '../../../client/common/extensions'; +import { IConfigurationService, IPythonSettings } from '../../../client/common/types'; +import { PythonDebuggerTypeName } from '../../../client/debugger/constants'; +import { IDebugEnvironmentVariablesService } from '../../../client/debugger/extension/configuration/resolvers/helper'; +import { LaunchConfigurationResolver } from '../../../client/debugger/extension/configuration/resolvers/launch'; +import { DebugOptions } from '../../../client/debugger/types'; +import { IInterpreterService } from '../../../client/interpreter/contracts'; +import { IServiceContainer } from '../../../client/ioc/types'; +import { PythonEnvironment } from '../../../client/pythonEnvironments/info'; +import { DebugLauncher } from '../../../client/testing/common/debugLauncher'; +import { LaunchOptions } from '../../../client/testing/common/types'; +import { ITestingSettings } from '../../../client/testing/configuration/types'; +import { TestProvider } from '../../../client/testing/types'; +import { isOs, OSType } from '../../common'; +import { IEnvironmentActivationService } from '../../../client/interpreter/activation/types'; +import { createDeferred } from '../../../client/common/utils/async'; +import * as envExtApi from '../../../client/envExt/api.internal'; + +use(chaiAsPromised.default); + +suite('Unit Tests - Debug Launcher', () => { + let serviceContainer: TypeMoq.IMock; + let unitTestSettings: TypeMoq.IMock; + let debugLauncher: DebugLauncher; + let debugService: TypeMoq.IMock; + let settings: TypeMoq.IMock; + let debugEnvHelper: TypeMoq.IMock; + let interpreterService: TypeMoq.IMock; + let environmentActivationService: TypeMoq.IMock; + let getWorkspaceFolderStub: sinon.SinonStub; + let getWorkspaceFoldersStub: sinon.SinonStub; + let pathExistsStub: sinon.SinonStub; + let readFileStub: sinon.SinonStub; + const envVars = { FOO: 'BAR' }; + + setup(async () => { + environmentActivationService = TypeMoq.Mock.ofType(); + environmentActivationService + .setup((e) => e.getActivatedEnvironmentVariables(TypeMoq.It.isAny())) + .returns(() => Promise.resolve(envVars)); + interpreterService = TypeMoq.Mock.ofType(); + serviceContainer = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + const configService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + serviceContainer + .setup((c) => c.get(TypeMoq.It.isValue(IConfigurationService))) + .returns(() => configService.object); + + debugService = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IDebugService))).returns(() => debugService.object); + getWorkspaceFolderStub = sinon.stub(workspaceApis, 'getWorkspaceFolder'); + getWorkspaceFoldersStub = sinon.stub(workspaceApis, 'getWorkspaceFolders'); + pathExistsStub = sinon.stub(fs, 'pathExists'); + readFileStub = sinon.stub(fs, 'readFile'); + + const appShell = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + appShell.setup((a) => a.showErrorMessage(TypeMoq.It.isAny())).returns(() => Promise.resolve(undefined)); + serviceContainer.setup((c) => c.get(TypeMoq.It.isValue(IApplicationShell))).returns(() => appShell.object); + + settings = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + configService.setup((c) => c.getSettings(TypeMoq.It.isAny())).returns(() => settings.object); + + unitTestSettings = TypeMoq.Mock.ofType(); + settings.setup((p) => p.testing).returns(() => unitTestSettings.object); + + debugEnvHelper = TypeMoq.Mock.ofType(undefined, TypeMoq.MockBehavior.Strict); + serviceContainer + .setup((c) => c.get(TypeMoq.It.isValue(IDebugEnvironmentVariablesService))) + .returns(() => debugEnvHelper.object); + + debugLauncher = new DebugLauncher(serviceContainer.object, getNewResolver(configService.object)); + }); + + teardown(() => { + sinon.restore(); + }); + + function getNewResolver(configService: IConfigurationService) { + const validator = TypeMoq.Mock.ofType( + undefined, + TypeMoq.MockBehavior.Strict, + ); + validator + .setup((v) => v.validatePythonPath(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .returns(() => Promise.resolve(true)); + return new LaunchConfigurationResolver( + validator.object, + configService, + debugEnvHelper.object, + interpreterService.object, + environmentActivationService.object, + ); + } + function setupDebugManager( + _workspaceFolder: WorkspaceFolder, + expected: DebugConfiguration, + testProvider: TestProvider, + ) { + interpreterService + .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) + .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); + settings.setup((p) => p.envFile).returns(() => __filename); + const args = expected.args; + const debugArgs = testProvider === 'unittest' ? args.filter((item: string) => item !== '--debug') : args; + expected.args = debugArgs; + + debugEnvHelper + .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .returns(() => Promise.resolve(expected.env)); + + const deferred = createDeferred(); + let capturedConfig: DebugConfiguration | undefined; + + // Use TypeMoq.It.isAny() because the implementation adds a session marker to the config + debugService + .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .callback((_wspc: WorkspaceFolder, config: DebugConfiguration) => { + capturedConfig = config; + deferred.resolve(); + }) + .returns(() => Promise.resolve(true)); + + // Setup onDidStartDebugSession - the new implementation uses this to capture the session + debugService + .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) + .returns((callback) => { + deferred.promise.then(() => { + if (capturedConfig) { + callback(({ + id: 'test-session-id', + configuration: capturedConfig, + } as unknown) as DebugSession); + } + }); + return { dispose: () => {} }; + }); + + // Setup onDidTerminateDebugSession - fires after the session starts + debugService + .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) + .returns((callback) => { + deferred.promise.then(() => { + setTimeout(() => { + if (capturedConfig) { + callback(({ + id: 'test-session-id', + configuration: capturedConfig, + } as unknown) as DebugSession); + } + }, 10); + }); + return { dispose: () => {} }; + }); + } + function createWorkspaceFolder(folderPath: string): WorkspaceFolder { + return { + index: 0, + name: path.basename(folderPath), + uri: Uri.file(folderPath), + }; + } + function getTestLauncherScript(testProvider: TestProvider, pythonTestAdapterRewriteExperiment?: boolean) { + if (!pythonTestAdapterRewriteExperiment) { + switch (testProvider) { + case 'unittest': { + return path.join(EXTENSION_ROOT_DIR, 'python_files', 'unittestadapter', 'execution.py'); + } + case 'pytest': { + return path.join(EXTENSION_ROOT_DIR, 'python_files', 'vscode_pytest', 'run_pytest_script.py'); + } + default: { + throw new Error(`Unknown test provider '${testProvider}'`); + } + } + } + } + + function getDefaultDebugConfig(): DebugConfiguration { + return { + name: 'Debug Unit Test', + type: PythonDebuggerTypeName, + request: 'launch', + console: 'internalConsole', + env: {}, + envFile: __filename, + stopOnEntry: false, + showReturnValue: true, + redirectOutput: true, + debugStdLib: false, + subProcess: true, + purpose: [], + }; + } + function setupSuccess( + options: LaunchOptions, + testProvider: TestProvider, + expected?: DebugConfiguration, + debugConfigs?: string | DebugConfiguration[], + ) { + const testLaunchScript = getTestLauncherScript(testProvider, false); + + const workspaceFolders = [createWorkspaceFolder(options.cwd), createWorkspaceFolder('five/six/seven')]; + getWorkspaceFoldersStub.returns(workspaceFolders); + getWorkspaceFolderStub.returns(workspaceFolders[0]); + + if (!debugConfigs) { + pathExistsStub.resolves(false); + } else { + pathExistsStub.resolves(true); + + if (typeof debugConfigs !== 'string') { + debugConfigs = JSON.stringify({ + version: '0.1.0', + configurations: debugConfigs, + }); + } + readFileStub.resolves(debugConfigs as string); + } + + if (!expected) { + expected = getDefaultDebugConfig(); + } + expected.rules = [{ path: path.join(EXTENSION_ROOT_DIR, 'python_files'), include: false }]; + expected.program = testLaunchScript; + expected.args = options.args; + + if (!expected.cwd) { + expected.cwd = workspaceFolders[0].uri.fsPath; + } + const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); + const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; + expected.env.PYTHONPATH = pythonPath; + expected.env.TEST_RUN_PIPE = 'pytestPort'; + expected.env.RUN_TEST_IDS_PIPE = 'runTestIdsPort'; + + // added by LaunchConfigurationResolver: + if (!expected.python) { + expected.python = 'python'; + } + if (!expected.clientOS) { + expected.clientOS = isOs(OSType.Windows) ? 'windows' : 'unix'; + } + if (!expected.debugAdapterPython) { + expected.debugAdapterPython = 'python'; + } + if (!expected.debugLauncherPython) { + expected.debugLauncherPython = 'python'; + } + expected.workspaceFolder = workspaceFolders[0].uri.fsPath; + expected.debugOptions = []; + if (expected.stopOnEntry) { + expected.debugOptions.push(DebugOptions.StopOnEntry); + } + if (expected.showReturnValue) { + expected.debugOptions.push(DebugOptions.ShowReturnValue); + } + if (expected.redirectOutput) { + expected.debugOptions.push(DebugOptions.RedirectOutput); + } + if (expected.subProcess) { + expected.debugOptions.push(DebugOptions.SubProcess); + } + if (isOs(OSType.Windows)) { + expected.debugOptions.push(DebugOptions.FixFilePathCase); + } + + setupDebugManager(workspaceFolders[0], expected, testProvider); + } + + const testProviders: TestProvider[] = ['pytest', 'unittest']; + + testProviders.forEach((testProvider) => { + const testTitleSuffix = `(Test Framework '${testProvider}')`; + + test(`Must launch debugger ${testTitleSuffix}`, async () => { + const options = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider, + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + setupSuccess(options, testProvider); + + await debugLauncher.launchDebugger(options); + + try { + debugService.verifyAll(); + } catch (ex) { + console.log(ex); + } + }); + test(`Must launch debugger with arguments ${testTitleSuffix}`, async () => { + const options = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py', '--debug', '1'], + testProvider, + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + setupSuccess(options, testProvider); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + test(`Must not launch debugger if cancelled ${testTitleSuffix}`, async () => { + debugService + .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .returns(() => { + return Promise.resolve(undefined as any); + }) + .verifiable(TypeMoq.Times.never()); + + const cancellationToken = new CancellationTokenSource(); + cancellationToken.cancel(); + const token = cancellationToken.token; + const options: LaunchOptions = { + cwd: '', + args: [], + token, + testProvider, + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + await expect(debugLauncher.launchDebugger(options)).to.be.eventually.equal(undefined, 'not undefined'); + + debugService.verifyAll(); + }); + test(`Must throw an exception if there are no workspaces ${testTitleSuffix}`, async () => { + getWorkspaceFoldersStub.returns(undefined); + debugService + .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .returns(() => { + console.log('Debugging should not start'); + return Promise.resolve(undefined as any); + }) + .verifiable(TypeMoq.Times.never()); + + const options: LaunchOptions = { + cwd: '', + args: [], + testProvider, + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + await expect(debugLauncher.launchDebugger(options)).to.eventually.rejectedWith('Please open a workspace'); + + debugService.verifyAll(); + }); + }); + + test('Tries launch.json first', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + expected.name = 'spam'; + setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'test' }]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Use cwd value in settings if exist', async () => { + unitTestSettings.setup((p) => p.cwd).returns(() => 'path/to/settings/cwd'); + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + expected.cwd = 'path/to/settings/cwd'; + const pluginPath = path.join(EXTENSION_ROOT_DIR, 'python_files'); + const pythonPath = `${pluginPath}${path.delimiter}${expected.cwd}`; + expected.env.PYTHONPATH = pythonPath; + + setupSuccess(options, 'unittest', expected); + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Full debug config', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = { + name: 'my tests', + type: PythonDebuggerTypeName, + request: 'launch', + python: 'some/dir/bin/py3', + debugAdapterPython: 'some/dir/bin/py3', + debugLauncherPython: 'some/dir/bin/py3', + stopOnEntry: true, + showReturnValue: true, + console: 'integratedTerminal', + cwd: 'some/dir', + env: { + PYTHONPATH: 'one/two/three', + SPAM: 'EGGS', + TEST_RUN_PIPE: 'pytestPort', + RUN_TEST_IDS_PIPE: 'runTestIdsPort', + }, + envFile: 'some/dir/.env', + redirectOutput: false, + debugStdLib: true, + // added by LaunchConfigurationResolver: + internalConsoleOptions: 'neverOpen', + subProcess: true, + purpose: [], + }; + setupSuccess(options, 'unittest', expected, [ + { + name: 'my tests', + type: PythonDebuggerTypeName, + request: 'test', + pythonPath: expected.python, + stopOnEntry: expected.stopOnEntry, + showReturnValue: expected.showReturnValue, + console: expected.console, + cwd: expected.cwd, + env: expected.env, + envFile: expected.envFile, + redirectOutput: expected.redirectOutput, + debugStdLib: expected.debugStdLib, + }, + ]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Uses first entry', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + expected.name = 'spam1'; + setupSuccess(options, 'unittest', expected, [ + { name: 'spam1', type: PythonDebuggerTypeName, request: 'test' }, + { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, + { name: 'spam3', type: PythonDebuggerTypeName, request: 'test' }, + ]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles bad JSON', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + setupSuccess(options, 'unittest', expected, ']'); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + const malformedFiles = [ + '// test 1', + '// test 2 \n\ + { \n\ + "name": "spam", \n\ + "type": "debugpy", \n\ + "request": "test" \n\ + } \n\ + ', + '// test 3 \n\ + [ \n\ + { \n\ + "name": "spam", \n\ + "type": "debugpy", \n\ + "request": "test" \n\ + } \n\ + ] \n\ + ', + '// test 4 \n\ + { \n\ + "configurations": [ \n\ + { \n\ + "name": "spam", \n\ + "type": "debugpy", \n\ + "request": "test" \n\ + } \n\ + ] \n\ + } \n\ + ', + ]; + for (const text of malformedFiles) { + const testID = text.split('\n')[0].substring(3).trim(); + test(`Handles malformed launch.json - ${testID}`, async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + setupSuccess(options, 'unittest', expected, text); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + } + + test('Handles bad debug config items', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + + setupSuccess(options, 'unittest', expected, [ + {} as DebugConfiguration, + { name: 'spam1' } as DebugConfiguration, + { name: 'spam2', type: PythonDebuggerTypeName } as DebugConfiguration, + { name: 'spam3', request: 'test' } as DebugConfiguration, + { type: PythonDebuggerTypeName } as DebugConfiguration, + { type: PythonDebuggerTypeName, request: 'test' } as DebugConfiguration, + { request: 'test' } as DebugConfiguration, + ]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles non-python debug configs', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + setupSuccess(options, 'unittest', expected, [{ name: 'foo', type: 'other', request: 'bar' }]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles bogus python debug configs', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + setupSuccess(options, 'unittest', expected, [{ name: 'spam', type: PythonDebuggerTypeName, request: 'bogus' }]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles non-test debug config', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + setupSuccess(options, 'unittest', expected, [ + { name: 'spam', type: PythonDebuggerTypeName, request: 'launch' }, + { name: 'spam', type: PythonDebuggerTypeName, request: 'attach' }, + ]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles mixed debug config', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + expected.name = 'spam2'; + setupSuccess(options, 'unittest', expected, [ + { name: 'foo1', type: 'other', request: 'bar' }, + { name: 'foo2', type: 'other', request: 'bar' }, + { name: 'spam1', type: PythonDebuggerTypeName, request: 'launch' }, + { name: 'spam2', type: PythonDebuggerTypeName, request: 'test' }, + { name: 'spam3', type: PythonDebuggerTypeName, request: 'attach' }, + { name: 'xyz', type: 'another', request: 'abc' }, + ]); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + + test('Handles comments', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'unittest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + const expected = getDefaultDebugConfig(); + expected.name = 'spam'; + expected.stopOnEntry = true; + setupSuccess( + options, + 'unittest', + expected, + ' \n\ + { \n\ + "version": "0.1.0", \n\ + "configurations": [ \n\ + // my thing \n\ + { \n\ + // "test" debug config \n\ + "name": "spam", /* non-empty */ \n\ + "type": "debugpy", /* must be "python" */ \n\ + "request": "test", /* must be "test" */ \n\ + // extra stuff here: \n\ + "stopOnEntry": true \n\ + } \n\ + ] \n\ + } \n\ + ', + ); + + await debugLauncher.launchDebugger(options); + + debugService.verifyAll(); + }); + test('Ensure trailing commands in JSON are handled', async () => { + const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; + const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); + const jsonc = '{"version":"1234", "configurations":[1,2,],}'; + pathExistsStub.resolves(true); + readFileStub.withArgs(filename).resolves(jsonc); + + const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); + + expect(configs).to.be.deep.equal([1, 2]); + }); + test('Ensure empty configuration is returned when launch.json cannot be parsed', async () => { + const workspaceFolder = { name: 'abc', index: 0, uri: Uri.file(__filename) }; + const filename = path.join(workspaceFolder.uri.fsPath, '.vscode', 'launch.json'); + const jsonc = '{"version":"1234"'; + + pathExistsStub.resolves(true); + readFileStub.withArgs(filename).resolves(jsonc); + + const configs = await debugLauncher.readAllDebugConfigs(workspaceFolder); + + expect(configs).to.be.deep.equal([]); + }); + + // ===== PROJECT-BASED DEBUG SESSION TESTS ===== + + suite('Project-based debug sessions', () => { + function setupForProjectTests(options: LaunchOptions) { + interpreterService + .setup((i) => i.getActiveInterpreter(TypeMoq.It.isAny())) + .returns(() => Promise.resolve(({ path: 'python' } as unknown) as PythonEnvironment)); + settings.setup((p) => p.envFile).returns(() => __filename); + + debugEnvHelper + .setup((x) => x.getEnvironmentVariables(TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .returns(() => Promise.resolve({})); + + const workspaceFolders = [{ index: 0, name: 'test', uri: Uri.file(options.cwd) }]; + getWorkspaceFoldersStub.returns(workspaceFolders); + getWorkspaceFolderStub.returns(workspaceFolders[0]); + pathExistsStub.resolves(false); + + // Stub useEnvExtension to avoid null reference errors in tests + sinon.stub(envExtApi, 'useEnvExtension').returns(false); + } + + /** + * Helper to setup debug service mocks with proper session lifecycle simulation. + * The implementation uses onDidStartDebugSession to capture the session via marker, + * then onDidTerminateDebugSession to resolve when that session ends. + */ + function setupDebugServiceWithSessionLifecycle(): { + capturedConfigs: DebugConfiguration[]; + } { + const capturedConfigs: DebugConfiguration[] = []; + let startCallback: ((session: DebugSession) => void) | undefined; + let terminateCallback: ((session: DebugSession) => void) | undefined; + + debugService + .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .callback((_, config) => { + capturedConfigs.push(config); + // Simulate the full session lifecycle after startDebugging resolves + setTimeout(() => { + const session = ({ + id: `session-${capturedConfigs.length}`, + configuration: config, + } as unknown) as DebugSession; + // Fire start first (so ourSession is captured) + startCallback?.(session); + // Then fire terminate (so the promise resolves) + setTimeout(() => terminateCallback?.(session), 5); + }, 5); + }) + .returns(() => Promise.resolve(true)); + + debugService + .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) + .callback((cb) => { + startCallback = cb; + }) + .returns(() => ({ dispose: () => {} })); + + debugService + .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) + .callback((cb) => { + terminateCallback = cb; + }) + .returns(() => ({ dispose: () => {} })); + + return { capturedConfigs }; + } + + test('should use project name in config name when provided', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'pytest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + project: { name: 'myproject (Python 3.11)', uri: Uri.file('one/two/three') }, + }; + + setupForProjectTests(options); + const { capturedConfigs } = setupDebugServiceWithSessionLifecycle(); + + await debugLauncher.launchDebugger(options); + + expect(capturedConfigs).to.have.length(1); + expect(capturedConfigs[0].name).to.equal('Debug Tests: myproject (Python 3.11)'); + }); + + test('should use default python when no project provided', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'pytest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + setupForProjectTests(options); + const { capturedConfigs } = setupDebugServiceWithSessionLifecycle(); + + await debugLauncher.launchDebugger(options); + + expect(capturedConfigs).to.have.length(1); + // Should use the default 'python' from interpreterService mock + expect(capturedConfigs[0].python).to.equal('python'); + }); + + test('should add unique session marker to launch config', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'pytest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + setupForProjectTests(options); + const { capturedConfigs } = setupDebugServiceWithSessionLifecycle(); + + await debugLauncher.launchDebugger(options); + + expect(capturedConfigs).to.have.length(1); + // Should have a session marker of format 'test-{timestamp}-{random}' + const marker = (capturedConfigs[0] as any).__vscodeTestSessionMarker; + expect(marker).to.be.a('string'); + expect(marker).to.match(/^test-\d+-[a-z0-9]+$/); + }); + + test('should generate unique markers for each launch', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'pytest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + setupForProjectTests(options); + const { capturedConfigs } = setupDebugServiceWithSessionLifecycle(); + + // Launch twice + await debugLauncher.launchDebugger(options); + await debugLauncher.launchDebugger(options); + + expect(capturedConfigs).to.have.length(2); + const marker1 = (capturedConfigs[0] as any).__vscodeTestSessionMarker; + const marker2 = (capturedConfigs[1] as any).__vscodeTestSessionMarker; + expect(marker1).to.not.equal(marker2); + }); + + test('should only resolve when matching session terminates', async () => { + const options: LaunchOptions = { + cwd: 'one/two/three', + args: ['/one/two/three/testfile.py'], + testProvider: 'pytest', + runTestIdsPort: 'runTestIdsPort', + pytestPort: 'pytestPort', + }; + + setupForProjectTests(options); + + let capturedConfig: DebugConfiguration | undefined; + let terminateCallback: ((session: DebugSession) => void) | undefined; + let startCallback: ((session: DebugSession) => void) | undefined; + + debugService + .setup((d) => d.startDebugging(TypeMoq.It.isAny(), TypeMoq.It.isAny(), TypeMoq.It.isAny())) + .callback((_, config) => { + capturedConfig = config; + }) + .returns(() => Promise.resolve(true)); + + debugService + .setup((d) => d.onDidStartDebugSession(TypeMoq.It.isAny())) + .callback((cb) => { + startCallback = cb; + }) + .returns(() => ({ dispose: () => {} })); + + debugService + .setup((d) => d.onDidTerminateDebugSession(TypeMoq.It.isAny())) + .callback((cb) => { + terminateCallback = cb; + }) + .returns(() => ({ dispose: () => {} })); + + const launchPromise = debugLauncher.launchDebugger(options); + + // Wait for config to be captured + await new Promise((r) => setTimeout(r, 10)); + + // Simulate our session starting + const ourSession = ({ + id: 'our-session-id', + configuration: capturedConfig!, + } as unknown) as DebugSession; + startCallback?.(ourSession); + + // Create a different session (like another project's debug) + const otherSession = ({ + id: 'other-session-id', + configuration: { __vscodeTestSessionMarker: 'different-marker' }, + } as unknown) as DebugSession; + + // Terminate the OTHER session first - should NOT resolve our promise + terminateCallback?.(otherSession); + + // Wait a bit to ensure it didn't resolve + let resolved = false; + const checkPromise = launchPromise.then(() => { + resolved = true; + }); + + await new Promise((r) => setTimeout(r, 20)); + expect(resolved).to.be.false; + + // Now terminate OUR session - should resolve + terminateCallback?.(ourSession); + + await checkPromise; + expect(resolved).to.be.true; + }); + }); +}); diff --git a/src/test/testing/testController/common/projectTestExecution.unit.test.ts b/src/test/testing/testController/common/projectTestExecution.unit.test.ts index fa79e1531e40..9d5e40916127 100644 --- a/src/test/testing/testController/common/projectTestExecution.unit.test.ts +++ b/src/test/testing/testController/common/projectTestExecution.unit.test.ts @@ -7,191 +7,84 @@ import * as typemoq from 'typemoq'; import { CancellationToken, CancellationTokenSource, - TestItem, - TestItemCollection, TestRun, TestRunProfile, TestRunProfileKind, TestRunRequest, Uri, } from 'vscode'; -import { IPythonExecutionFactory } from '../../../../client/common/process/types'; -import { ITestDebugLauncher } from '../../../../client/testing/common/types'; -import { ProjectAdapter } from '../../../../client/testing/testController/common/projectAdapter'; +import { + createMockDependencies, + createMockProjectAdapter, + createMockTestItem, + createMockTestItemWithoutUri, + createMockTestRun, +} from '../testMocks'; import { executeTestsForProject, executeTestsForProjects, findProjectForTestItem, getTestCaseNodesRecursive, groupTestItemsByProject, - ProjectExecutionDependencies, setupCoverageForProject, } from '../../../../client/testing/testController/common/projectTestExecution'; -import { TestProjectRegistry } from '../../../../client/testing/testController/common/testProjectRegistry'; -import { ITestExecutionAdapter, ITestResultResolver } from '../../../../client/testing/testController/common/types'; import * as telemetry from '../../../../client/telemetry'; +import * as envExtApi from '../../../../client/envExt/api.internal'; suite('Project Test Execution', () => { let sandbox: sinon.SinonSandbox; + let useEnvExtensionStub: sinon.SinonStub; setup(() => { sandbox = sinon.createSandbox(); + // Default to disabled env extension for path-based fallback tests + useEnvExtensionStub = sandbox.stub(envExtApi, 'useEnvExtension').returns(false); }); teardown(() => { sandbox.restore(); }); - // ===== HELPER FUNCTIONS ===== - - function createMockTestItem(id: string, uriPath: string, children?: TestItem[]): TestItem { - const childMap = new Map(); - children?.forEach((c) => childMap.set(c.id, c)); - - const mockChildren: TestItemCollection = { - size: childMap.size, - forEach: (callback: (item: TestItem, collection: TestItemCollection) => void) => { - childMap.forEach((item) => callback(item, mockChildren)); - }, - get: (itemId: string) => childMap.get(itemId), - add: () => {}, - delete: () => {}, - replace: () => {}, - [Symbol.iterator]: function* () { - for (const [key, value] of childMap) { - yield [key, value] as [string, TestItem]; - } - }, - } as TestItemCollection; - - return ({ - id, - uri: Uri.file(uriPath), - children: mockChildren, - label: id, - canResolveChildren: false, - busy: false, - tags: [], - range: undefined, - error: undefined, - parent: undefined, - } as unknown) as TestItem; - } - - function createMockTestItemWithoutUri(id: string): TestItem { - return ({ - id, - uri: undefined, - children: ({ size: 0, forEach: () => {} } as unknown) as TestItemCollection, - label: id, - } as unknown) as TestItem; - } - - function createMockProjectAdapter(config: { - projectPath: string; - projectName: string; - pythonPath?: string; - testProvider?: 'pytest' | 'unittest'; - }): ProjectAdapter & { executionAdapterStub: sinon.SinonStub } { - // Use a plain stub instead of TypeMoq for easier testing - const runTestsStub = sinon.stub().resolves(); - const executionAdapter: ITestExecutionAdapter = ({ - runTests: runTestsStub, - } as unknown) as ITestExecutionAdapter; - - const resultResolverMock: ITestResultResolver = ({ - vsIdToRunId: new Map(), - runIdToVSid: new Map(), - runIdToTestItem: new Map(), - detailedCoverageMap: new Map(), - resolveDiscovery: () => Promise.resolve(), - resolveExecution: () => {}, - } as unknown) as ITestResultResolver; - - const adapter = ({ - projectUri: Uri.file(config.projectPath), - projectName: config.projectName, - workspaceUri: Uri.file(config.projectPath), - testProvider: config.testProvider ?? 'pytest', - pythonEnvironment: config.pythonPath - ? { - execInfo: { run: { executable: config.pythonPath } }, - } - : undefined, - pythonProject: { - name: config.projectName, - uri: Uri.file(config.projectPath), - }, - executionAdapter, - discoveryAdapter: {} as any, - resultResolver: resultResolverMock, - isDiscovering: false, - isExecuting: false, - // Expose the stub for testing - executionAdapterStub: runTestsStub, - } as unknown) as ProjectAdapter & { executionAdapterStub: sinon.SinonStub }; - - return adapter; - } - - function createMockDependencies(): ProjectExecutionDependencies { - return { - projectRegistry: typemoq.Mock.ofType().object, - pythonExecFactory: typemoq.Mock.ofType().object, - debugLauncher: typemoq.Mock.ofType().object, - }; - } - - function createMockTestRun(): typemoq.IMock { - const runMock = typemoq.Mock.ofType(); - runMock.setup((r) => r.started(typemoq.It.isAny())); - runMock.setup((r) => r.passed(typemoq.It.isAny(), typemoq.It.isAny())); - runMock.setup((r) => r.failed(typemoq.It.isAny(), typemoq.It.isAny(), typemoq.It.isAny())); - runMock.setup((r) => r.skipped(typemoq.It.isAny())); - runMock.setup((r) => r.end()); - return runMock; - } - // ===== findProjectForTestItem Tests ===== suite('findProjectForTestItem', () => { - test('should return undefined when test item has no URI', () => { + test('should return undefined when test item has no URI', async () => { // Mock const item = createMockTestItemWithoutUri('test1'); const projects = [createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' })]; // Run - const result = findProjectForTestItem(item, projects); + const result = await findProjectForTestItem(item, projects); // Assert expect(result).to.be.undefined; }); - test('should return matching project when item path is within project directory', () => { + test('should return matching project when item path is within project directory', async () => { // Mock const item = createMockTestItem('test1', '/workspace/proj/tests/test_file.py'); const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = findProjectForTestItem(item, [project]); + const result = await findProjectForTestItem(item, [project]); // Assert expect(result).to.equal(project); }); - test('should return undefined when item path is outside all project directories', () => { + test('should return undefined when item path is outside all project directories', async () => { // Mock const item = createMockTestItem('test1', '/other/path/test.py'); const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = findProjectForTestItem(item, [project]); + const result = await findProjectForTestItem(item, [project]); // Assert expect(result).to.be.undefined; }); - test('should return most specific (deepest) project when nested projects exist', () => { + test('should return most specific (deepest) project when nested projects exist', async () => { // Mock - parent and child project with overlapping paths const item = createMockTestItem('test1', '/workspace/parent/child/tests/test.py'); const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); @@ -201,13 +94,13 @@ suite('Project Test Execution', () => { }); // Run - const result = findProjectForTestItem(item, [parentProject, childProject]); + const result = await findProjectForTestItem(item, [parentProject, childProject]); // Assert - should match child (longer path) not parent expect(result).to.equal(childProject); }); - test('should return most specific project regardless of input order', () => { + test('should return most specific project regardless of input order', async () => { // Mock - same as above but different order const item = createMockTestItem('test1', '/workspace/parent/child/tests/test.py'); const parentProject = createMockProjectAdapter({ projectPath: '/workspace/parent', projectName: 'parent' }); @@ -217,23 +110,60 @@ suite('Project Test Execution', () => { }); // Run - pass child first, then parent - const result = findProjectForTestItem(item, [childProject, parentProject]); + const result = await findProjectForTestItem(item, [childProject, parentProject]); // Assert - order shouldn't affect result expect(result).to.equal(childProject); }); - test('should match item at project root level', () => { + test('should match item at project root level', async () => { // Mock const item = createMockTestItem('test1', '/workspace/proj/test.py'); const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); // Run - const result = findProjectForTestItem(item, [project]); + const result = await findProjectForTestItem(item, [project]); // Assert expect(result).to.equal(project); }); + + test('should use env extension API when available', async () => { + // Enable env extension + useEnvExtensionStub.returns(true); + + // Mock the env extension API + const item = createMockTestItem('test1', '/workspace/proj/tests/test_file.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + const mockEnvApi = { + getPythonProject: sandbox.stub().returns({ uri: project.projectUri }), + }; + sandbox.stub(envExtApi, 'getEnvExtApi').resolves(mockEnvApi as any); + + // Run + const result = await findProjectForTestItem(item, [project]); + + // Assert + expect(result).to.equal(project); + expect(mockEnvApi.getPythonProject.calledOnceWith(item.uri)).to.be.true; + }); + + test('should fall back to path matching when env extension API is unavailable', async () => { + // Env extension enabled but throws + useEnvExtensionStub.returns(true); + sandbox.stub(envExtApi, 'getEnvExtApi').rejects(new Error('API unavailable')); + + // Mock + const item = createMockTestItem('test1', '/workspace/proj/tests/test_file.py'); + const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); + + // Run + const result = await findProjectForTestItem(item, [project]); + + // Assert - should still work via fallback + expect(result).to.equal(project); + }); }); // ===== groupTestItemsByProject Tests ===== From a0c557d2f30e3b32791a60d52b82bc7669b984a1 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 15:58:31 -0800 Subject: [PATCH 7/9] addressed comments --- src/client/testing/common/debugLauncher.ts | 29 ++----------- src/client/testing/common/types.ts | 7 +--- .../common/projectTestExecution.ts | 42 +++++++------------ 3 files changed, 20 insertions(+), 58 deletions(-) diff --git a/src/client/testing/common/debugLauncher.ts b/src/client/testing/common/debugLauncher.ts index 24892d6afe3d..ec319d9915eb 100644 --- a/src/client/testing/common/debugLauncher.ts +++ b/src/client/testing/common/debugLauncher.ts @@ -41,24 +41,7 @@ export class DebugLauncher implements ITestDebugLauncher { /** * Launches a debug session for test execution. - * - * **Cancellation handling:** - * Cancellation can occur from multiple sources, all properly handled: - * 1. **Pre-check**: If already cancelled before starting, returns immediately - * 2. **Token cancellation**: If the parent CancellationToken fires during debugging, - * the deferred resolves and the callback is invoked to clean up resources - * 3. **Session termination**: When the user stops debugging (via UI or completes), - * the onDidTerminateDebugSession event fires and we resolve - * - * **Multi-session support:** - * When debugging tests from multiple projects simultaneously, each launchDebugger() - * call needs to track its own debug session independently. We use a unique marker - * in the launch configuration to identify which session belongs to which call, - * avoiding race conditions with the global `activeDebugSession` property. - * - * @param options Launch configuration including test provider, args, and optional project info - * @param callback Called when the debug session ends (for cleanup like closing named pipes) - * @param sessionOptions VS Code debug session options (e.g., testRun association) + * Handles cancellation, multi-session support via unique markers, and cleanup. */ public async launchDebugger( options: LaunchOptions, @@ -105,12 +88,7 @@ export class DebugLauncher implements ITestDebugLauncher { ); const debugManager = this.serviceContainer.get(IDebugService); - // Generate a unique marker for this debug session. - // When multiple debug sessions start in parallel (e.g., debugging tests from - // multiple projects), we can't rely on debugManager.activeDebugSession because - // it's a global that could be overwritten by another concurrent session start. - // Instead, we embed a unique marker in our launch configuration and match it - // when the session starts to identify which session is ours. + // Unique marker to identify this session among concurrent debug sessions const sessionMarker = `test-${Date.now()}-${Math.random().toString(36).substring(7)}`; launchArgs[TEST_SESSION_MARKER_KEY] = sessionMarker; @@ -341,7 +319,8 @@ export class DebugLauncher implements ITestDebugLauncher { launchArgs.purpose = []; // For project-based execution, get the Python path from the project's environment. - // This ensures debug sessions use the correct interpreter for each project. + // Fallback: if env API unavailable or fails, LaunchConfigurationResolver already set + // launchArgs.python from the active interpreter, so debugging still works. if (options.project && envExtApi.useEnvExtension()) { try { const pythonEnv = await envExtApi.getEnvironment(options.project.uri); diff --git a/src/client/testing/common/types.ts b/src/client/testing/common/types.ts index 4f999305c3ee..e2fa2d6d2e5a 100644 --- a/src/client/testing/common/types.ts +++ b/src/client/testing/common/types.ts @@ -27,12 +27,7 @@ export type LaunchOptions = { pytestPort?: string; pytestUUID?: string; runTestIdsPort?: string; - /** - * Optional Python project for project-based execution. - * When provided, the debug launcher will: - * - Use the project's associated Python environment - * - Name the debug session after the project - */ + /** Optional Python project for project-based execution. */ project?: PythonProject; }; diff --git a/src/client/testing/testController/common/projectTestExecution.ts b/src/client/testing/testController/common/projectTestExecution.ts index a11402ed3279..970c216f6d50 100644 --- a/src/client/testing/testController/common/projectTestExecution.ts +++ b/src/client/testing/testController/common/projectTestExecution.ts @@ -2,7 +2,7 @@ // Licensed under the MIT License. import { CancellationToken, FileCoverageDetail, TestItem, TestRun, TestRunProfileKind, TestRunRequest } from 'vscode'; -import { traceError, traceInfo, traceVerbose } from '../../../logging'; +import { traceError, traceInfo, traceVerbose, traceWarn } from '../../../logging'; import { sendTelemetryEvent } from '../../../telemetry'; import { EventName } from '../../../telemetry/constants'; import { IPythonExecutionFactory } from '../../../common/process/types'; @@ -71,7 +71,7 @@ export async function executeTestsForProjects( traceInfo(`[test-by-project] Executing ${items.length} test item(s) for project: ${project.projectName}`); sendTelemetryEvent(EventName.UNITTEST_RUN, undefined, { - tool: 'pytest', + tool: project.testProvider, debugging: isDebugMode, }); @@ -100,10 +100,7 @@ export async function executeTestsForProjects( } /** - * Lookup context for project resolution during a single test run. - * Maps file paths to their resolved ProjectAdapter to avoid - * repeated API calls and linear searches. - * Created fresh per run and discarded after grouping completes. + * Lookup context for avoiding redundant API calls within a single test run. */ interface ProjectLookupContext { /** Maps file URI fsPath → resolved ProjectAdapter (or undefined if no match) */ @@ -113,15 +110,8 @@ interface ProjectLookupContext { } /** - * Groups test items by their owning project using the Python Environment API. - * Each test item's URI is matched to a project via the API's getPythonProject method. - * Falls back to path-based matching when the extension API is not available. - * - * Uses a per-run cache to avoid redundant API calls for test items sharing the same file. - * - * Time complexity: O(n + p) amortized, where n = test items, p = projects - * - Building adapter lookup map: O(p) - * - Each test item: O(1) amortized (cached after first lookup per unique file) + * Groups test items by their owning project. Uses env API when available, else falls back to path matching. + * Time complexity: O(n + p) amortized via per-run caching. */ export async function groupTestItemsByProject( testItems: TestItem[], @@ -134,7 +124,8 @@ export async function groupTestItemsByProject( result.set(getProjectId(project.projectUri), { project, items: [] }); } - // Build lookup context for this run - O(p) setup, enables O(1) lookups + // Build lookup context for this run - O(p) one-time setup, enables O(1) lookups per item. + // When tests are from a single project, most lookups hit the cache after the first item. const lookupContext: ProjectLookupContext = { uriToAdapter: new Map(), projectPathToAdapter: new Map(projects.map((p) => [p.projectUri.fsPath, p])), @@ -150,7 +141,7 @@ export async function groupTestItemsByProject( } } else { // If no project matches, log it - traceVerbose(`[test-by-project] Could not match test item ${item.id} to a project`); + traceWarn(`[test-by-project] Could not match test item ${item.id} to a project`); } } @@ -165,12 +156,7 @@ export async function groupTestItemsByProject( } /** - * Finds the project that owns a test item based on the test item's URI. - * Uses the Python Environment extension API when available, falling back - * to path-based matching (longest matching path prefix). - * - * Results are stored in the lookup context to avoid redundant API calls for items in the same file. - * Time complexity: O(1) amortized with context, O(p) worst case on context miss. + * Finds the project that owns a test item. Uses env API when available, else path-based matching. */ export async function findProjectForTestItem( item: TestItem, @@ -188,7 +174,9 @@ export async function findProjectForTestItem( let result: ProjectAdapter | undefined; - // Try using the Python Environment extension API first + // Try using the Python Environment extension API first. + // Legacy path: when useEnvExtension() is false, this block is skipped and we go + // directly to findProjectByPath() below (path-based matching). if (useEnvExtension()) { try { const envExtApi = await getEnvExtApi(); @@ -206,7 +194,8 @@ export async function findProjectForTestItem( } } - // Fallback: path-based matching (most specific/longest path wins) + // Fallback: path-based matching when env API unavailable or didn't find a match. + // O(p) time complexity where p = number of projects. if (!result) { result = findProjectByPath(item, projects); } @@ -220,8 +209,7 @@ export async function findProjectForTestItem( } /** - * Finds the project that owns a test item using path-based matching. - * Returns the most specific (longest path) matching project. + * Fallback: finds project using path-based matching. O(p) time complexity. */ function findProjectByPath(item: TestItem, projects: ProjectAdapter[]): ProjectAdapter | undefined { if (!item.uri) return undefined; From d4767df8b1cc2af28e515c5d14c7a7162549203f Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 16:54:11 -0800 Subject: [PATCH 8/9] error fix --- .../testController/common/projectTestExecution.unit.test.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/test/testing/testController/common/projectTestExecution.unit.test.ts b/src/test/testing/testController/common/projectTestExecution.unit.test.ts index 9d5e40916127..cbd0a972cdd4 100644 --- a/src/test/testing/testController/common/projectTestExecution.unit.test.ts +++ b/src/test/testing/testController/common/projectTestExecution.unit.test.ts @@ -653,7 +653,9 @@ suite('Project Test Execution', () => { // Mock const project = createMockProjectAdapter({ projectPath: '/workspace/proj', projectName: 'proj' }); const mockCoverageDetails = [{ line: 1, executed: true }]; - project.resultResolver.detailedCoverageMap.set('/workspace/proj/file.py', mockCoverageDetails as any); + // Use Uri.fsPath as the key to match the implementation's lookup + const fileUri = Uri.file('/workspace/proj/file.py'); + project.resultResolver.detailedCoverageMap.set(fileUri.fsPath, mockCoverageDetails as any); const profileMock = ({ kind: TestRunProfileKind.Coverage, loadDetailedCoverage: undefined, @@ -664,7 +666,7 @@ suite('Project Test Execution', () => { setupCoverageForProject(request, project); // Run - call the configured callback - const fileCoverage = { uri: Uri.file('/workspace/proj/file.py') }; + const fileCoverage = { uri: fileUri }; const result = await profileMock.loadDetailedCoverage!( {} as TestRun, fileCoverage as any, From 84d5f98effe5ac31abf0b6dc28d01c46aa9e7589 Mon Sep 17 00:00:00 2001 From: eleanorjboyd <26030610+eleanorjboyd@users.noreply.github.com> Date: Fri, 6 Feb 2026 16:58:34 -0800 Subject: [PATCH 9/9] start discovery better if using envs ext --- .../testing/testController/controller.ts | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/client/testing/testController/controller.ts b/src/client/testing/testController/controller.ts index 9a694e3bbf55..ea04ecbd1438 100644 --- a/src/client/testing/testController/controller.ts +++ b/src/client/testing/testController/controller.ts @@ -612,11 +612,15 @@ export class PythonTestController implements ITestController, IExtensionSingleAc await Promise.all( workspaces.map(async (workspace) => { - if (!(await this.interpreterService.getActiveInterpreter(workspace.uri))) { - this.commandManager - .executeCommand(constants.Commands.TriggerEnvironmentSelection, workspace.uri) - .then(noop, noop); - return; + // In project-based mode, each project has its own environment, + // so we don't require a global active interpreter + if (!useEnvExtension()) { + if (!(await this.interpreterService.getActiveInterpreter(workspace.uri))) { + this.commandManager + .executeCommand(constants.Commands.TriggerEnvironmentSelection, workspace.uri) + .then(noop, noop); + return; + } } await this.discoverTestsInWorkspace(workspace.uri); }), @@ -699,9 +703,13 @@ export class PythonTestController implements ITestController, IExtensionSingleAc const workspaces: readonly WorkspaceFolder[] = this.workspaceService.workspaceFolders || []; await Promise.all( workspaces.map(async (workspace) => { - if (!(await this.interpreterService.getActiveInterpreter(workspace.uri))) { - traceError('Cannot trigger test discovery as a valid interpreter is not selected'); - return; + // In project-based mode, each project has its own environment, + // so we don't require a global active interpreter + if (!useEnvExtension()) { + if (!(await this.interpreterService.getActiveInterpreter(workspace.uri))) { + traceError('Cannot trigger test discovery as a valid interpreter is not selected'); + return; + } } await this.refreshTestDataInternal(workspace.uri); }),