From ea6c53a728ddc95d5c8ce7ad9e0d90f62524a143 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 10:25:55 -0700 Subject: [PATCH 01/13] [codex] Paginate replay segment downloads (#910) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Paginate replay recording segment downloads so long replays are fully available to replay view and future replay inspection commands. The API caps recording segment pages at 100 segments, so the helper now follows Link cursors and stops when the replay metadata count is satisfied. **Replay API Compatibility** Remove `ota_updates` from the replay list field set because the live replay index endpoint rejects it as invalid. The replay schemas still tolerate `ota_updates` when a payload includes it. **Local Smoke Testing** Add `bun run cli` as a lightweight runner for live CLI checks without regenerating docs and SDK first. Validated with focused replay and explore tests, typecheck, lint, and a live read-only smoke test on a 471-segment replay that fetched five segment pages: 100, 100, 100, 100, 71. Refs GH-907 --------- Co-authored-by: OpenAI Codex Co-authored-by: Miguel Betegón --- DEVELOPMENT.md | 4 +- README.md | 4 +- docs/src/content/docs/contributing.md | 2 +- package.json | 1 + src/commands/replay/view.ts | 3 +- src/lib/api/replays.ts | 72 +++++++++++++++++++-- src/types/replay.ts | 1 - test/commands/replay/view.test.ts | 6 ++ test/lib/api/replays.test.ts | 93 +++++++++++++++++++++++++-- 9 files changed, 170 insertions(+), 16 deletions(-) diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 1dfa1a5b1..1d016f247 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -26,7 +26,7 @@ Get the client ID from your Sentry OAuth application settings. ## Running Locally ```bash -bun run --env-file=.env.local src/bin.ts auth login +bun run --env-file=.env.local cli auth login ``` ## Testing the Device Flow @@ -34,7 +34,7 @@ bun run --env-file=.env.local src/bin.ts auth login 1. Run the CLI login command: ```bash -bun run --env-file=.env.local src/bin.ts auth login +bun run --env-file=.env.local cli auth login ``` 2. You'll see output like: diff --git a/README.md b/README.md index 80cb3e7c0..4bb25d2f5 100644 --- a/README.md +++ b/README.md @@ -133,10 +133,10 @@ bun install ```bash # Run CLI in development mode -bun run dev --help +bun run cli --help # With environment variables -bun run --env-file=.env.local src/bin.ts --help +bun run --env-file=.env.local cli --help ``` ### Scripts diff --git a/docs/src/content/docs/contributing.md b/docs/src/content/docs/contributing.md index 042ac62d5..eff1bcec6 100644 --- a/docs/src/content/docs/contributing.md +++ b/docs/src/content/docs/contributing.md @@ -25,7 +25,7 @@ cd cli bun install # Run CLI in development mode -bun run --env-file=.env.local src/bin.ts --help +bun run --env-file=.env.local cli --help # Run tests bun test diff --git a/package.json b/package.json index e7872c8b1..acbc28168 100644 --- a/package.json +++ b/package.json @@ -72,6 +72,7 @@ "@sentry/core@10.50.0": "patches/@sentry%2Fcore@10.50.0.patch" }, "scripts": { + "cli": "bun run src/bin.ts", "dev": "bun run generate:schema && bun run generate:docs && bun run generate:sdk && bun run src/bin.ts", "build": "bun run generate:schema && bun run generate:docs && bun run generate:sdk && bun run script/build.ts --single", "build:all": "bun run generate:schema && bun run generate:docs && bun run generate:sdk && bun run script/build.ts", diff --git a/src/commands/replay/view.ts b/src/commands/replay/view.ts index d2f37ae27..95f368590 100644 --- a/src/commands/replay/view.ts +++ b/src/commands/replay/view.ts @@ -232,7 +232,8 @@ async function fetchReplayActivity( const segments = await getReplayRecordingSegments( org, String(replay.project_id), - replay.id + replay.id, + { expectedSegments: replay.count_segments } ); return extractReplayActivityEvents(segments, MAX_ACTIVITY_EVENTS); } catch (error) { diff --git a/src/lib/api/replays.ts b/src/lib/api/replays.ts index fbf5dfcdb..a1a722b29 100644 --- a/src/lib/api/replays.ts +++ b/src/lib/api/replays.ts @@ -24,6 +24,7 @@ import { API_MAX_PER_PAGE, apiRequestToRegion, autoPaginate, + MAX_PAGINATION_PAGES, type PaginatedResponse, parseLinkHeader, } from "./infrastructure.js"; @@ -104,6 +105,29 @@ type FetchReplayPageOptions = { cursor?: string; }; +type FetchReplayRecordingSegmentsPageOptions = { + regionUrl: string; + orgSlug: string; + projectSlugOrId: string; + replayId: string; + cursor?: string; +}; + +/** Options for {@link getReplayRecordingSegments}. */ +export type GetReplayRecordingSegmentsOptions = { + /** + * Soft stop hint: total segment count from replay metadata. + * + * Pagination stops as soon as this many segments have been fetched, even if + * the API advertises another cursor. Because metadata can be slightly stale, + * the result is NOT trimmed to this value — callers may receive more segments + * than expected if the final page overshoots. + * + * Omit (or pass null/undefined) to fetch all pages up to MAX_PAGINATION_PAGES. + */ + expectedSegments?: number | null; +}; + /** * Coerce numeric project_id to string for consistent downstream handling. * @@ -214,22 +238,62 @@ export async function getReplay( * Uses the project-scoped replay endpoint because recording segments are * partitioned by project. `download=true` matches the frontend contract and * returns the parsed segment payload directly. + * + * Uses a manual pagination loop rather than {@link autoPaginate} because + * `autoPaginate` trims results to `limit`, but `expectedSegments` is a soft + * hint — trimming could silently drop real segments if metadata is stale. */ export async function getReplayRecordingSegments( orgSlug: string, projectSlugOrId: string, - replayId: string + replayId: string, + options: GetReplayRecordingSegmentsOptions = {} ): Promise { const regionUrl = await resolveOrgRegion(orgSlug); - const { data } = await apiRequestToRegion( + const expectedSegments = options.expectedSegments ?? Number.POSITIVE_INFINITY; + const segments: ReplayRecordingSegments = []; + let cursor: string | undefined; + + for (let page = 0; page < MAX_PAGINATION_PAGES; page += 1) { + const { data, nextCursor } = await fetchReplayRecordingSegmentsPage({ + regionUrl, + orgSlug, + projectSlugOrId, + replayId, + cursor, + }); + + segments.push(...data); + + if (segments.length >= expectedSegments || !nextCursor) { + return segments; + } + + cursor = nextCursor; + } + + return segments; +} + +async function fetchReplayRecordingSegmentsPage( + options: FetchReplayRecordingSegmentsPageOptions +): Promise> { + const { cursor, orgSlug, projectSlugOrId, regionUrl, replayId } = options; + const { data, headers } = await apiRequestToRegion( regionUrl, `/projects/${orgSlug}/${projectSlugOrId}/replays/${replayId}/recording-segments/`, { - params: { download: true }, + params: { + cursor, + download: true, + per_page: API_MAX_PER_PAGE, + }, schema: ReplayRecordingSegmentsSchema, } ); - return data; + + const { nextCursor } = parseLinkHeader(headers.get("link") ?? null); + return { data, nextCursor }; } /** diff --git a/src/types/replay.ts b/src/types/replay.ts index 82a26984e..99e617405 100644 --- a/src/types/replay.ts +++ b/src/types/replay.ts @@ -121,7 +121,6 @@ export const REPLAY_LIST_FIELDS = [ "info_ids", "is_archived", "os", - "ota_updates", "platform", "project_id", "releases", diff --git a/test/commands/replay/view.test.ts b/test/commands/replay/view.test.ts index f94bc63ea..153aea4d9 100644 --- a/test/commands/replay/view.test.ts +++ b/test/commands/replay/view.test.ts @@ -208,6 +208,12 @@ describe("viewCommand.func", () => { expect(parsed.relatedIssues[0]?.shortId).toBe("CLI-123"); expect(parsed.relatedTraces[0]?.spanCount).toBe(8); expect(parsed.trace_ids[0]).toBe("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); + expect(getReplayRecordingSegmentsSpy).toHaveBeenCalledWith( + "test-org", + "42", + REPLAY_ID, + { expectedSegments: 5 } + ); expect(listIssuesPaginatedSpy).toHaveBeenCalledWith( "test-org", "", diff --git a/test/lib/api/replays.test.ts b/test/lib/api/replays.test.ts index 0b2f0f18f..7b8e9f0d8 100644 --- a/test/lib/api/replays.test.ts +++ b/test/lib/api/replays.test.ts @@ -3,6 +3,7 @@ */ import { afterEach, beforeEach, describe, expect, test } from "bun:test"; +import { MAX_PAGINATION_PAGES } from "../../../src/lib/api/infrastructure.js"; import { getReplay, getReplayRecordingSegments, @@ -29,6 +30,24 @@ function replayRow(id = REPLAY_ID) { }; } +function recordingSegmentsResponse( + body: unknown, + nextCursor?: string +): Response { + const headers: Record = { + "Content-Type": "application/json", + }; + + if (nextCursor) { + headers.Link = `; rel="next"; results="true"; cursor="${nextCursor}"`; + } + + return new Response(JSON.stringify(body), { + status: 200, + headers, + }); +} + describe("listReplays", () => { let originalFetch: typeof globalThis.fetch; @@ -73,7 +92,6 @@ describe("listReplays", () => { expect(url.searchParams.get("statsPeriod")).toBe("24h"); expect(url.searchParams.get("per_page")).toBe("25"); expect(url.searchParams.getAll("field")).toContain("id"); - expect(url.searchParams.getAll("field")).toContain("ota_updates"); expect(url.searchParams.getAll("field")).toContain("user"); expect(result.data).toHaveLength(1); expect(result.nextCursor).toBe("0:25:0"); @@ -244,10 +262,7 @@ describe("getReplayRecordingSegments", () => { globalThis.fetch = mockFetch(async (input, init) => { const req = new Request(input!, init); capturedUrl = req.url; - return new Response(JSON.stringify([[{ timestamp: 1 }]]), { - status: 200, - headers: { "Content-Type": "application/json" }, - }); + return recordingSegmentsResponse([[{ timestamp: 1 }]]); }); const segments = await getReplayRecordingSegments( @@ -261,8 +276,76 @@ describe("getReplayRecordingSegments", () => { `/api/0/projects/test-org/42/replays/${REPLAY_ID}/recording-segments/` ); expect(url.searchParams.get("download")).toBe("true"); + expect(url.searchParams.get("per_page")).toBe("100"); expect(segments).toEqual([[{ timestamp: 1 }]]); }); + + test("auto-paginates recording segments using the link cursor", async () => { + const capturedUrls: string[] = []; + let callIndex = 0; + + globalThis.fetch = mockFetch(async (input, init) => { + const req = new Request(input!, init); + capturedUrls.push(req.url); + + const body = + callIndex === 0 + ? Array.from({ length: 100 }, (_, index) => [{ segment: index }]) + : [[{ segment: 100 }]]; + const nextCursor = callIndex === 0 ? "0:100:0" : undefined; + callIndex += 1; + + return recordingSegmentsResponse(body, nextCursor); + }); + + const segments = await getReplayRecordingSegments( + "test-org", + "42", + REPLAY_ID, + { expectedSegments: 101 } + ); + + expect(segments).toHaveLength(101); + expect(capturedUrls).toHaveLength(2); + + const firstUrl = new URL(capturedUrls[0]!); + expect(firstUrl.searchParams.get("per_page")).toBe("100"); + expect(firstUrl.searchParams.get("cursor")).toBeNull(); + + const secondUrl = new URL(capturedUrls[1]!); + expect(secondUrl.searchParams.get("cursor")).toBe("0:100:0"); + expect(secondUrl.searchParams.get("per_page")).toBe("100"); + }); + + test("stops recording segment pagination at the safety cap", async () => { + const capturedUrls: string[] = []; + let callIndex = 0; + + globalThis.fetch = mockFetch(async (input, init) => { + const req = new Request(input!, init); + capturedUrls.push(req.url); + + const nextCursor = `0:${(callIndex + 1) * 100}:0`; + const body = [[{ segment: callIndex }]]; + callIndex += 1; + + return recordingSegmentsResponse(body, nextCursor); + }); + + const segments = await getReplayRecordingSegments( + "test-org", + "42", + REPLAY_ID + ); + + expect(segments).toHaveLength(MAX_PAGINATION_PAGES); + expect(capturedUrls).toHaveLength(MAX_PAGINATION_PAGES); + + const finalUrl = new URL(capturedUrls.at(-1)!); + expect(finalUrl.searchParams.get("cursor")).toBe( + `0:${(MAX_PAGINATION_PAGES - 1) * 100}:0` + ); + }); }); describe("listReplayIdsForIssue", () => { From a3ae0f33c74d29b6e34d02f781e099ad245841a9 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 11:29:46 -0700 Subject: [PATCH 02/13] feat(replay): Add agent-readable replay timelines Add path-aware replay discovery, normalized event listing, and deterministic replay summaries so agents can inspect session behavior without relying on raw replay frames. Generate skill and docs output from command metadata. Add focused coverage for path matching, event normalization, summary signals, and JSONL output. Refs GH-907 Co-Authored-By: OpenAI Codex --- docs/src/content/docs/contributing.md | 4 +- docs/src/fragments/commands/replay.md | 35 + plugins/sentry-cli/skills/sentry-cli/SKILL.md | 2 + .../skills/sentry-cli/references/replay.md | 96 ++- script/generate-docs-sections.ts | 17 +- src/commands/replay/event/index.ts | 24 + src/commands/replay/event/list.ts | 427 +++++++++++ src/commands/replay/index.ts | 7 + src/commands/replay/list.ts | 196 ++++- src/commands/replay/shared.ts | 110 +++ src/commands/replay/summarize.ts | 263 +++++++ src/commands/replay/target.ts | 118 +++ src/commands/replay/view.ts | 113 +-- src/lib/command.ts | 1 + src/lib/formatters/output.ts | 15 +- src/lib/replay-events.ts | 701 ++++++++++++++++++ src/lib/replay-search.ts | 78 ++ src/lib/replay-summary.ts | 661 +++++++++++++++++ src/types/index.ts | 16 + src/types/replay.ts | 204 +++++ test/commands/replay/event-list.test.ts | 176 +++++ test/commands/replay/list.test.ts | 100 +++ test/commands/replay/summarize.test.ts | 129 ++++ test/lib/replay-events.test.ts | 149 ++++ test/lib/replay-summary.test.ts | 92 +++ 25 files changed, 3602 insertions(+), 132 deletions(-) create mode 100644 src/commands/replay/event/index.ts create mode 100644 src/commands/replay/event/list.ts create mode 100644 src/commands/replay/shared.ts create mode 100644 src/commands/replay/summarize.ts create mode 100644 src/commands/replay/target.ts create mode 100644 src/lib/replay-events.ts create mode 100644 src/lib/replay-summary.ts create mode 100644 test/commands/replay/event-list.test.ts create mode 100644 test/commands/replay/summarize.test.ts create mode 100644 test/lib/replay-events.test.ts create mode 100644 test/lib/replay-summary.test.ts diff --git a/docs/src/content/docs/contributing.md b/docs/src/content/docs/contributing.md index eff1bcec6..05920341d 100644 --- a/docs/src/content/docs/contributing.md +++ b/docs/src/content/docs/contributing.md @@ -53,14 +53,14 @@ cli/ │ ├── commands/ # CLI commands │ │ ├── auth/ # login, logout, refresh, status, token, whoami │ │ ├── cli/ # defaults, feedback, fix, setup, upgrade -│ │ ├── dashboard/ # list, view, create, add, edit, delete +│ │ ├── dashboard/ # list, view, create, widget add, widget edit, widget delete │ │ ├── event/ # view, list │ │ ├── issue/ # list, events, explain, plan, view, resolve, unresolve, archive, merge │ │ ├── log/ # list, view │ │ ├── org/ # list, view │ │ ├── project/ # create, delete, list, view │ │ ├── release/ # list, view, create, finalize, delete, deploy, deploys, set-commits, propose-version -│ │ ├── replay/ # list, view +│ │ ├── replay/ # event list, list, summarize, view │ │ ├── repo/ # list │ │ ├── sourcemap/ # inject, upload │ │ ├── span/ # list, view diff --git a/docs/src/fragments/commands/replay.md b/docs/src/fragments/commands/replay.md index b57df2515..e8f26adb8 100644 --- a/docs/src/fragments/commands/replay.md +++ b/docs/src/fragments/commands/replay.md @@ -13,6 +13,12 @@ sentry replay list my-org/ --query "environment:production" # Change the time window and sort sentry replay list my-org/frontend --period 24h --sort errors +# Find recent sessions that actually visited a route path +sentry replay list my-org/frontend --path /signup --json + +# Find recent sessions with indexed friction signals +sentry replay list my-org/frontend --path /signup --friction --json + # Paginate through results sentry replay list my-org/frontend -c next sentry replay list my-org/frontend -c prev @@ -36,3 +42,32 @@ sentry replay view my-org/frontend/346789a703f6454384f1de473b8b9fcc # Open a replay in the browser sentry replay view my-org/346789a703f6454384f1de473b8b9fcc --web ``` + +### Summarize behavior + +```bash +# Summarize route flow, event counts, timings, and friction signals +sentry replay summarize my-org/346789a703f6454384f1de473b8b9fcc --json + +# Focus the summary on a particular route path +sentry replay summarize my-org/346789a703f6454384f1de473b8b9fcc \ + --path /signup --json +``` + +### Inspect replay events + +```bash +# List normalized replay events for agent-readable inspection +sentry replay events my-org/346789a703f6454384f1de473b8b9fcc --json + +# Focus on user actions and failures on a page +sentry replay events my-org/346789a703f6454384f1de473b8b9fcc \ + --path /signup --kind click,network,console,error --json + +# Pull an evidence window around a timestamp +sentry replay events my-org/346789a703f6454384f1de473b8b9fcc \ + --around 01:23 --json + +# Emit newline-delimited JSON for large timelines +sentry replay events my-org/346789a703f6454384f1de473b8b9fcc --json --jsonl +``` diff --git a/plugins/sentry-cli/skills/sentry-cli/SKILL.md b/plugins/sentry-cli/skills/sentry-cli/SKILL.md index d89f44bef..dca697e39 100644 --- a/plugins/sentry-cli/skills/sentry-cli/SKILL.md +++ b/plugins/sentry-cli/skills/sentry-cli/SKILL.md @@ -367,7 +367,9 @@ Manage Sentry dashboards Search and inspect Session Replays +- `sentry replay event list ` — List normalized events from a Session Replay - `sentry replay list ` — List recent Session Replays +- `sentry replay summarize ` — Summarize Session Replay behavior - `sentry replay view ` — View a Session Replay → Full flags and examples: `references/replay.md` diff --git a/plugins/sentry-cli/skills/sentry-cli/references/replay.md b/plugins/sentry-cli/skills/sentry-cli/references/replay.md index 32154321d..9a618e4e0 100644 --- a/plugins/sentry-cli/skills/sentry-cli/references/replay.md +++ b/plugins/sentry-cli/skills/sentry-cli/references/replay.md @@ -11,6 +11,49 @@ requires: Search and inspect Session Replays +### `sentry replay event list ` + +List normalized events from a Session Replay + +**Flags:** +- `-k, --kind ... - Event kind filter (navigation, click, tap, input, focus, blur, scroll, viewport, mutation, dom-snapshot, breadcrumb, network, console, error, span, web-vital, memory, video, mobile, unknown)` +- `-u, --url - Filter events by current or target URL substring` +- `--path - Filter events by parsed URL pathname` +- `-q, --contains - Filter events by text in labels, messages, URLs, selectors, or data` +- `--selector - Filter events by selector substring` +- `--from - Start offset (seconds, 90s, 01:23, or 1:02:03)` +- `--to - End offset (seconds, 90s, 01:23, or 1:02:03)` +- `--around - Center an evidence window around this offset` +- `--before - Window before --around (default: 10s)` +- `--after - Window after --around (default: 30s)` +- `-n, --limit - Number of events (1-1000) - (default: "200")` +- `--raw - Include raw source frame payloads in JSON output` +- `--jsonl - Emit one JSON object per event (requires --json)` +- `-f, --fresh - Bypass cache, re-detect projects, and fetch fresh data` + +**JSON Fields** (use `--json --fields` to select specific fields): + +| Field | Type | Description | +|-------|------|-------------| +| `replayId` | string | Replay ID | +| `segmentIndex` | number | Zero-based recording segment index | +| `frameIndex` | number | Zero-based frame index within segment | +| `offsetMs` | number \| null | Milliseconds from replay start to the event | +| `timestamp` | string \| null | Event timestamp as ISO 8601 when available | +| `kind` | string | Normalized event kind | +| `category` | string | Broad event category | +| `label` | string \| null | Short event label | +| `message` | string \| null | Message or summary | +| `url` | string \| null | Current or target URL | +| `urlPath` | string \| null | Parsed URL pathname when available | +| `urlQuery` | string \| null | Parsed URL query string when available | +| `selector` | string \| null | CSS selector or target selector when available | +| `nodeId` | unknown \| null | rrweb node ID when available | +| `rawType` | string \| null | Source frame type | +| `rawSource` | string \| null | Source frame subtype | +| `data` | unknown | Kind-specific normalized fields | +| `raw` | unknown | Raw source frame, only present when requested | + ### `sentry replay list ` List recent Session Replays @@ -18,8 +61,14 @@ List recent Session Replays **Flags:** - `-n, --limit - Number of replays (1-1000) - (default: "25")` - `-q, --query - Search query (Sentry replay search syntax)` +- `-u, --url - Filter by visited URL text using replay search` +- `--path - Filter by actual visited URL pathname` +- `--entry-path - Filter by first visited URL pathname` +- `--exit-path - Filter by last visited URL pathname` +- `--friction - Only show replays with indexed friction signals (errors, warnings, rage clicks, or dead clicks)` +- `--problem-only - Only show replays with errors, warnings, rage clicks, or dead clicks` - `-e, --environment ... - Filter by environment (repeatable, comma-separated)` -- `-s, --sort - Sort by: date, oldest, duration, errors, activity, or a raw replay sort field - (default: "date")` +- `-s, --sort - Sort by: date, oldest, duration, errors, warnings, rage, dead, activity, or a raw replay sort field - (default: "date")` - `-t, --period - Time range: "7d", "2026-04-01..2026-05-01", ">=2026-04-01" - (default: "7d")` - `-f, --fresh - Bypass cache, re-detect projects, and fetch fresh data` - `-c, --cursor - Navigate pages: "next", "prev", "first" (or raw cursor string)` @@ -72,6 +121,12 @@ sentry replay list my-org/ --query "environment:production" # Change the time window and sort sentry replay list my-org/frontend --period 24h --sort errors +# Find recent sessions that actually visited a route path +sentry replay list my-org/frontend --path /signup --json + +# Find recent sessions with indexed friction signals +sentry replay list my-org/frontend --path /signup --friction --json + # Paginate through results sentry replay list my-org/frontend -c next sentry replay list my-org/frontend -c prev @@ -80,6 +135,45 @@ sentry replay list my-org/frontend -c prev sentry replay list my-org/frontend --json ``` +### `sentry replay summarize ` + +Summarize Session Replay behavior + +**Flags:** +- `--path - Focus summary on events from this URL pathname` +- `--limit-signals - Maximum friction signals to include (0-50) - (default: "10")` +- `--limit-events - Maximum notable events to include (0-50) - (default: "12")` +- `-f, --fresh - Bypass cache, re-detect projects, and fetch fresh data` + +**JSON Fields** (use `--json --fields` to select specific fields): + +| Field | Type | Description | +|-------|------|-------------| +| `replayId` | string | Replay ID | +| `org` | string | Organization slug | +| `project` | string \| null | Project slug | +| `startedAt` | string \| null | Replay start time | +| `durationSeconds` | number \| null | Replay duration in seconds | +| `entryUrl` | string \| null | First replay URL | +| `exitUrl` | string \| null | Last replay URL | +| `focusPath` | string \| null | Optional route path used to focus the summary | +| `counts` | object | Normalized event counts | +| `timings` | object | Key timing observations | +| `routes` | array | Route timeline | +| `signals` | array | Detected non-error and error friction signals | +| `notableEvents` | array | Representative events useful for agent narrative | + +**Examples:** + +```bash +# Summarize route flow, event counts, timings, and friction signals +sentry replay summarize my-org/346789a703f6454384f1de473b8b9fcc --json + +# Focus the summary on a particular route path +sentry replay summarize my-org/346789a703f6454384f1de473b8b9fcc \ + --path /signup --json +``` + ### `sentry replay view ` View a Session Replay diff --git a/script/generate-docs-sections.ts b/script/generate-docs-sections.ts index ebeebed08..bd7994878 100644 --- a/script/generate-docs-sections.ts +++ b/script/generate-docs-sections.ts @@ -127,13 +127,20 @@ function isStandaloneCommand(route: RouteInfo): boolean { /** * Get subcommand names for a route group (e.g., "list, view, create"). - * Extracts the last path segment from each command's path. + * Preserves nested subcommands as "parent child" so route groups do not + * collapse multiple commands to the same final segment. */ function getSubcommandNames(route: RouteInfo): string[] { - return route.commands.map((cmd) => { - const parts = cmd.path.split(" "); - return parts.at(-1) ?? route.name; - }); + const prefix = `sentry ${route.name} `; + return [ + ...new Set( + route.commands.map((cmd) => + cmd.path.startsWith(prefix) + ? cmd.path.slice(prefix.length) + : (cmd.path.split(" ").at(-1) ?? route.name) + ) + ), + ]; } /** diff --git a/src/commands/replay/event/index.ts b/src/commands/replay/event/index.ts new file mode 100644 index 000000000..05dbfd8e9 --- /dev/null +++ b/src/commands/replay/event/index.ts @@ -0,0 +1,24 @@ +/** + * sentry replay event + * + * Inspect normalized events from Session Replay recordings. + */ + +import { buildRouteMap } from "../../../lib/route-map.js"; +import { listCommand } from "./list.js"; + +export const eventRoute = buildRouteMap({ + routes: { + list: listCommand, + }, + defaultCommand: "list", + docs: { + brief: "Inspect normalized replay events", + fullDescription: + "Inspect normalized events extracted from Session Replay recordings.\n\n" + + "Commands:\n" + + " list List normalized replay events\n\n" + + "Alias: `sentry replay events` → `sentry replay event list`", + hideRoute: {}, + }, +}); diff --git a/src/commands/replay/event/list.ts b/src/commands/replay/event/list.ts new file mode 100644 index 000000000..8e9496e4a --- /dev/null +++ b/src/commands/replay/event/list.ts @@ -0,0 +1,427 @@ +/** + * sentry replay event list + * + * List normalized events extracted from a Session Replay recording. + */ + +import type { SentryContext } from "../../../context.js"; +import { validateLimit } from "../../../lib/arg-parsing.js"; +import { buildCommand } from "../../../lib/command.js"; +import { ValidationError } from "../../../lib/errors.js"; +import { + escapeMarkdownCell, + formatTable, +} from "../../../lib/formatters/index.js"; +import { filterFields } from "../../../lib/formatters/json.js"; +import { CommandOutput } from "../../../lib/formatters/output.js"; +import type { Column } from "../../../lib/formatters/table.js"; +import { formatDurationCompactMs } from "../../../lib/formatters/time-utils.js"; +import { validateHexId } from "../../../lib/hex-id.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, + LIST_MAX_LIMIT, + LIST_MIN_LIMIT, +} from "../../../lib/list-command.js"; +import { withProgress } from "../../../lib/polling.js"; +import { + extractNormalizedReplayEvents, + filterNormalizedReplayEvents, + parseReplayOffset, +} from "../../../lib/replay-events.js"; +import { resolveOrgOptionalProjectFromArg } from "../../../lib/resolve-target.js"; +import { + REPLAY_EVENT_KINDS, + type ReplayEvent, + type ReplayEventKind, + ReplayEventSchema, +} from "../../../types/index.js"; +import { + fetchReplayDetailsForCommand, + fetchReplaySegmentsForCommand, + validateReplayProjectScope, +} from "../shared.js"; +import { parseReplayTargetArgs } from "../target.js"; + +type EventListFlags = { + readonly after?: number; + readonly around?: number; + readonly before?: number; + readonly contains?: string; + readonly fields?: string[]; + readonly fresh: boolean; + readonly from?: number; + readonly json: boolean; + readonly jsonl: boolean; + readonly kind?: readonly string[]; + readonly limit: number; + readonly path?: string; + readonly raw: boolean; + readonly selector?: string; + readonly to?: number; + readonly url?: string; +}; + +type EventListResult = { + events: ReplayEvent[]; + total: number; + truncated: boolean; + replayId: string; + org: string; + project?: string; +}; + +type ReplayEventOutput = EventListResult | ReplayEvent; + +const COMMAND_NAME = "replay event list"; +const USAGE_HINT = + "sentry replay event list [//] | "; +const DEFAULT_LIMIT = 200; +const DEFAULT_BEFORE_MS = 10_000; +const DEFAULT_AFTER_MS = 30_000; + +const REPLAY_EVENT_KIND_SET = new Set(REPLAY_EVENT_KINDS); + +function parseLimit(value: string): number { + return validateLimit(value, LIST_MIN_LIMIT, LIST_MAX_LIMIT); +} + +function parseOffsetFlag(value: string): number { + return parseReplayOffset(value); +} + +function parseEventKinds( + values: readonly string[] | undefined +): ReplayEventKind[] { + const kinds = values + ? [...values] + .flatMap((value) => value.split(",")) + .map((value) => value.trim()) + .filter(Boolean) + : []; + + for (const kind of kinds) { + if (!REPLAY_EVENT_KIND_SET.has(kind)) { + throw new ValidationError( + `Invalid replay event kind "${kind}". Must be one of: ${REPLAY_EVENT_KINDS.join(", ")}`, + "kind" + ); + } + } + + return kinds as ReplayEventKind[]; +} + +function resolveWindow(flags: EventListFlags): { + fromMs?: number; + toMs?: number; +} { + if ( + flags.around !== undefined && + (flags.from !== undefined || flags.to !== undefined) + ) { + throw new ValidationError( + "--around cannot be combined with --from or --to", + "around" + ); + } + + if (flags.around === undefined) { + return { fromMs: flags.from, toMs: flags.to }; + } + + const before = flags.before ?? DEFAULT_BEFORE_MS; + const after = flags.after ?? DEFAULT_AFTER_MS; + return { + fromMs: Math.max(0, flags.around - before), + toMs: flags.around + after, + }; +} + +function eventLabel(event: ReplayEvent): string { + return event.label ?? event.message ?? event.selector ?? "—"; +} + +function formatOffset(event: ReplayEvent): string { + return event.offsetMs === null + ? "—" + : formatDurationCompactMs(event.offsetMs); +} + +const EVENT_COLUMNS: Column[] = [ + { + header: "OFFSET", + value: formatOffset, + minWidth: 8, + shrinkable: false, + }, + { + header: "KIND", + value: (event) => event.kind, + minWidth: 10, + }, + { + header: "LABEL", + value: (event) => escapeMarkdownCell(eventLabel(event)), + minWidth: 18, + truncate: true, + }, + { + header: "URL", + value: (event) => escapeMarkdownCell(event.url ?? "—"), + minWidth: 20, + truncate: true, + }, + { + header: "POINTER", + value: (event) => `${event.segmentIndex}:${event.frameIndex}`, + minWidth: 9, + shrinkable: false, + }, +]; + +function formatEventListHuman(result: EventListResult): string { + if (result.events.length === 0) { + return "No replay events matched the filters."; + } + + const scope = result.project + ? `${result.org}/${result.project}` + : `${result.org}`; + return ( + `Replay events for ${scope}/${result.replayId.slice(0, 8)}:\n\n` + + formatTable(result.events, EVENT_COLUMNS, { truncate: true }) + ); +} + +function isEventListResult(data: ReplayEventOutput): data is EventListResult { + return "events" in data; +} + +function jsonTransformEventOutput( + data: ReplayEventOutput, + fields?: string[] +): unknown { + if (!isEventListResult(data)) { + return fields && fields.length > 0 ? filterFields(data, fields) : data; + } + + const items = + fields && fields.length > 0 + ? data.events.map((event) => filterFields(event, fields)) + : data.events; + return { + data: items, + total: data.total, + truncated: data.truncated, + replayId: data.replayId, + org: data.org, + project: data.project, + }; +} + +function validateJsonlMode(flags: EventListFlags): void { + if (flags.jsonl && !flags.json) { + throw new ValidationError("--jsonl requires --json", "jsonl"); + } +} + +export const listCommand = buildCommand({ + docs: { + brief: "List normalized events from a Session Replay", + fullDescription: + "List normalized events extracted from Session Replay recording segments.\n\n" + + "Replay ID formats:\n" + + " - auto-detect org from config or DSN\n" + + " / - explicit organization\n" + + " // - explicit org/project context\n" + + " - parse org and replay ID from a Sentry URL\n\n" + + "Examples:\n" + + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --json\n" + + " sentry replay event list sentry/cli/346789a703f6454384f1de473b8b9fcc --kind click,network,error\n" + + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --path /signup --json\n" + + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --around 01:23 --json\n" + + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --json --jsonl", + }, + output: { + human: formatEventListHuman, + jsonTransform: jsonTransformEventOutput, + schema: ReplayEventSchema, + }, + parameters: { + positional: { + kind: "array", + parameter: { + placeholder: "replay-id-or-url", + brief: "[/] or ", + parse: String, + }, + }, + flags: { + kind: { + kind: "parsed", + parse: String, + brief: `Event kind filter (${REPLAY_EVENT_KINDS.join(", ")})`, + variadic: true, + optional: true, + }, + url: { + kind: "parsed", + parse: String, + brief: "Filter events by current or target URL substring", + optional: true, + }, + path: { + kind: "parsed", + parse: String, + brief: "Filter events by parsed URL pathname", + optional: true, + }, + contains: { + kind: "parsed", + parse: String, + brief: + "Filter events by text in labels, messages, URLs, selectors, or data", + optional: true, + }, + selector: { + kind: "parsed", + parse: String, + brief: "Filter events by selector substring", + optional: true, + }, + from: { + kind: "parsed", + parse: parseOffsetFlag, + brief: "Start offset (seconds, 90s, 01:23, or 1:02:03)", + optional: true, + }, + to: { + kind: "parsed", + parse: parseOffsetFlag, + brief: "End offset (seconds, 90s, 01:23, or 1:02:03)", + optional: true, + }, + around: { + kind: "parsed", + parse: parseOffsetFlag, + brief: "Center an evidence window around this offset", + optional: true, + }, + before: { + kind: "parsed", + parse: parseOffsetFlag, + brief: "Window before --around (default: 10s)", + optional: true, + }, + after: { + kind: "parsed", + parse: parseOffsetFlag, + brief: "Window after --around (default: 30s)", + optional: true, + }, + limit: { + kind: "parsed", + parse: parseLimit, + brief: `Number of events (${LIST_MIN_LIMIT}-${LIST_MAX_LIMIT})`, + default: String(DEFAULT_LIMIT), + }, + raw: { + kind: "boolean", + brief: "Include raw source frame payloads in JSON output", + default: false, + }, + jsonl: { + kind: "boolean", + brief: "Emit one JSON object per event (requires --json)", + default: false, + }, + fresh: FRESH_FLAG, + }, + aliases: { + ...FRESH_ALIASES, + k: "kind", + n: "limit", + q: "contains", + u: "url", + }, + }, + async *func(this: SentryContext, flags: EventListFlags, ...args: string[]) { + validateJsonlMode(flags); + applyFreshFlag(flags); + + const parsedArgs = parseReplayTargetArgs(args, USAGE_HINT); + const replayId = validateHexId(parsedArgs.replayId, "replay ID"); + const resolved = await resolveOrgOptionalProjectFromArg( + parsedArgs.targetArg, + this.cwd, + COMMAND_NAME + ); + + const replay = await withProgress( + { message: "Fetching replay metadata...", json: flags.json }, + () => + fetchReplayDetailsForCommand( + resolved.org, + replayId, + "sentry replay event list" + ) + ); + + validateReplayProjectScope({ + replay, + projectId: resolved.projectData?.id, + replayId, + org: resolved.org, + project: resolved.project, + command: "sentry replay event list", + }); + + const segments = await fetchReplaySegmentsForCommand({ + org: resolved.org, + replay, + replayId, + project: resolved.project, + json: flags.json, + }); + + const kinds = parseEventKinds(flags.kind); + const window = resolveWindow(flags); + const allEvents = extractNormalizedReplayEvents(replay, segments, { + includeRaw: flags.raw, + }); + const filtered = filterNormalizedReplayEvents(allEvents, { + kinds, + url: flags.url, + path: flags.path, + contains: flags.contains, + selector: flags.selector, + ...window, + }); + const events = filtered.slice(0, flags.limit); + const truncated = filtered.length > events.length; + + if (flags.jsonl) { + for (const event of events) { + yield new CommandOutput(event); + } + return; + } + + yield new CommandOutput({ + events, + total: filtered.length, + truncated, + replayId, + org: resolved.org, + project: resolved.project, + }); + + const countText = `Showing ${events.length} of ${filtered.length} replay event${filtered.length === 1 ? "" : "s"}.`; + const truncationHint = truncated + ? ` Increase --limit or narrow filters to inspect the remaining ${filtered.length - events.length}.` + : ""; + return { hint: `${countText}${truncationHint}` }; + }, +}); diff --git a/src/commands/replay/index.ts b/src/commands/replay/index.ts index a5c658703..2953a640b 100644 --- a/src/commands/replay/index.ts +++ b/src/commands/replay/index.ts @@ -5,21 +5,28 @@ */ import { buildRouteMap } from "../../lib/route-map.js"; +import { eventRoute } from "./event/index.js"; import { listCommand } from "./list.js"; +import { summarizeCommand } from "./summarize.js"; import { viewCommand } from "./view.js"; export const replayRoute = buildRouteMap({ routes: { + event: eventRoute, list: listCommand, + summarize: summarizeCommand, view: viewCommand, }, + aliases: { events: "event" }, defaultCommand: "view", docs: { brief: "Search and inspect Session Replays", fullDescription: "Search and inspect Session Replays from your Sentry organization.\n\n" + "Commands:\n" + + " event Inspect normalized events from a replay (alias: events)\n" + " list List recent replays in an org or project\n" + + " summarize Summarize replay behavior and friction signals\n" + " view View details of a specific replay\n\n" + "Alias: `sentry replays` → `sentry replay list`", hideRoute: {}, diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index ebc10a7fa..1286d9aec 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -42,6 +42,7 @@ import { withProgress } from "../../lib/polling.js"; import { getReplayUserLabel, parseReplayEnvironmentFilter, + replayMatchesPath, } from "../../lib/replay-search.js"; import { resolveOrgOptionalProjectFromArg } from "../../lib/resolve-target.js"; import { sanitizeQuery } from "../../lib/search-query.js"; @@ -60,6 +61,11 @@ import { type ListFlags = { readonly environment?: readonly string[]; readonly limit: number; + readonly "problem-only": boolean; + readonly friction: boolean; + readonly "entry-path"?: string; + readonly "exit-path"?: string; + readonly path?: string; readonly query?: string; readonly sort: ReplaySortValue; readonly period: TimeRange; @@ -67,6 +73,7 @@ type ListFlags = { readonly cursor?: string; readonly fresh: boolean; readonly fields?: string[]; + readonly url?: string; }; type ReplayListResult = { @@ -78,20 +85,46 @@ type ReplayListResult = { project?: string; }; -type ReplaySortKey = "date" | "oldest" | "duration" | "errors" | "activity"; +type ReplaySortKey = + | "activity" + | "date" + | "dead" + | "duration" + | "errors" + | "oldest" + | "rage" + | "warnings"; + +type ReplayListHintFlags = Pick< + ListFlags, + | "entry-path" + | "environment" + | "exit-path" + | "friction" + | "path" + | "problem-only" + | "query" + | "sort" + | "period" + | "url" +>; const SORT_MAP: Record = { + activity: "-activity", date: "-started_at", - oldest: "started_at", + dead: "-count_dead_clicks", duration: "-duration", errors: "-count_errors", - activity: "-activity", + oldest: "started_at", + rage: "-count_rage_clicks", + warnings: "-count_warnings", }; const DEFAULT_PERIOD = LIST_PERIOD_FLAG.default; const DEFAULT_SORT: ReplaySortValue = SORT_MAP.date; const PAGINATION_KEY = "replay-list"; const COMMAND_NAME = "replay list"; +const SIMPLE_SEARCH_VALUE_RE = /^[^\s:"]+$/; function parseLimit(value: string): number { return validateLimit(value, LIST_MIN_LIMIT, LIST_MAX_LIMIT); @@ -125,6 +158,73 @@ function replayUserLabel(replay: ReplayListItem): string { return getReplayUserLabel(replay) ?? "—"; } +function quoteSearchValue(value: string): string { + return SIMPLE_SEARCH_VALUE_RE.test(value) ? value : JSON.stringify(value); +} + +function wildcardSearchValue(value: string): string { + const trimmed = value.trim(); + if (trimmed.includes("*")) { + return trimmed; + } + return `*${trimmed}*`; +} + +function buildReplaySearchQuery(filters: { + query?: string; + url?: string; + path?: string; + entryPath?: string; + exitPath?: string; +}): string | undefined { + const { entryPath, exitPath, path, query, url } = filters; + const parts = [ + query, + url ? `url:${quoteSearchValue(wildcardSearchValue(url))}` : undefined, + path ? `url:${quoteSearchValue(wildcardSearchValue(path))}` : undefined, + entryPath + ? `url:${quoteSearchValue(wildcardSearchValue(entryPath))}` + : undefined, + exitPath + ? `url:${quoteSearchValue(wildcardSearchValue(exitPath))}` + : undefined, + ].filter((part): part is string => Boolean(part)); + return parts.length > 0 ? parts.join(" ") : undefined; +} + +function hasProblemSignals(replay: ReplayListItem): boolean { + return ( + (replay.count_errors ?? 0) > 0 || + (replay.count_warnings ?? 0) > 0 || + (replay.count_rage_clicks ?? 0) > 0 || + (replay.count_dead_clicks ?? 0) > 0 || + replay.error_ids.length > 0 || + replay.warning_ids.length > 0 + ); +} + +function replayMatchesRouteFilters( + replay: ReplayListItem, + flags: ListFlags +): boolean { + if (flags.path && !replayMatchesPath(replay, flags.path)) { + return false; + } + if ( + flags["entry-path"] && + !replayMatchesPath(replay, flags["entry-path"], "entry") + ) { + return false; + } + if ( + flags["exit-path"] && + !replayMatchesPath(replay, flags["exit-path"], "exit") + ) { + return false; + } + return true; +} + const REPLAY_COLUMNS: Column[] = [ { header: "ID", @@ -174,13 +274,28 @@ function formatScope(org: string, project?: string): string { return project ? `${org}/${project}` : `${org}/`; } -function appendReplayFlags( - base: string, - flags: Pick -): string { +function appendReplayFlags(base: string, flags: ReplayListHintFlags): string { const parts: string[] = []; appendQueryHint(parts, flags.query); appendSortHint(parts, flags.sort, DEFAULT_SORT); + if (flags.url) { + parts.push(`--url "${flags.url}"`); + } + if (flags.path) { + parts.push(`--path "${flags.path}"`); + } + if (flags["entry-path"]) { + parts.push(`--entry-path "${flags["entry-path"]}"`); + } + if (flags["exit-path"]) { + parts.push(`--exit-path "${flags["exit-path"]}"`); + } + if (flags.friction) { + parts.push("--friction"); + } + if (flags["problem-only"]) { + parts.push("--problem-only"); + } if (flags.environment && flags.environment.length > 0) { for (const environment of flags.environment) { parts.push(`-e "${environment}"`); @@ -193,7 +308,7 @@ function appendReplayFlags( function nextPageHint( org: string, project: string | undefined, - flags: Pick + flags: ReplayListHintFlags ): string { return appendReplayFlags( `sentry replay list ${formatScope(org, project)} -c next`, @@ -204,7 +319,7 @@ function nextPageHint( function prevPageHint( org: string, project: string | undefined, - flags: Pick + flags: ReplayListHintFlags ): string { return appendReplayFlags( `sentry replay list ${formatScope(org, project)} -c prev`, @@ -262,6 +377,7 @@ export const listCommand = buildListCommand("replay", { " sentry replay list sentry/\n" + " sentry replay list sentry/cli --limit 50\n" + " sentry replay list sentry/cli --sort duration\n" + + " sentry replay list sentry/cli --path /signup --friction\n" + ' sentry replay list sentry/cli -q "user.email:foo@example.com"\n' + " sentry replay list sentry/cli -e production -e canary\n" + " sentry replay list sentry/cli --period 24h\n\n" + @@ -297,6 +413,42 @@ export const listCommand = buildListCommand("replay", { brief: "Search query (Sentry replay search syntax)", optional: true, }, + url: { + kind: "parsed", + parse: String, + brief: "Filter by visited URL text using replay search", + optional: true, + }, + path: { + kind: "parsed", + parse: String, + brief: "Filter by actual visited URL pathname", + optional: true, + }, + "entry-path": { + kind: "parsed", + parse: String, + brief: "Filter by first visited URL pathname", + optional: true, + }, + "exit-path": { + kind: "parsed", + parse: String, + brief: "Filter by last visited URL pathname", + optional: true, + }, + friction: { + kind: "boolean", + brief: + "Only show replays with indexed friction signals (errors, warnings, rage clicks, or dead clicks)", + default: false, + }, + "problem-only": { + kind: "boolean", + brief: + "Only show replays with errors, warnings, rage clicks, or dead clicks", + default: false, + }, environment: { kind: "parsed", parse: String, @@ -308,7 +460,7 @@ export const listCommand = buildListCommand("replay", { kind: "parsed", parse: parseSort, brief: - "Sort by: date, oldest, duration, errors, activity, or a raw replay sort field", + "Sort by: date, oldest, duration, errors, warnings, rage, dead, activity, or a raw replay sort field", default: "date", }, period: LIST_PERIOD_FLAG, @@ -319,13 +471,20 @@ export const listCommand = buildListCommand("replay", { n: "limit", q: "query", s: "sort", + u: "url", }, }, async *func(this: SentryContext, flags: ListFlags, target?: string) { const { cwd } = this; const timeRange = flags.period; const environment = parseReplayEnvironmentFilter(flags.environment); - const { query } = flags; + const query = buildReplaySearchQuery({ + query: flags.query, + url: flags.url, + path: flags.path, + entryPath: flags["entry-path"], + exitPath: flags["exit-path"], + }); const resolved = await resolveOrgOptionalProjectFromArg( target, @@ -338,6 +497,11 @@ export const listCommand = buildListCommand("replay", { formatScope(resolved.org, resolved.project), { env: environment?.join(","), + entryPath: flags["entry-path"], + exitPath: flags["exit-path"], + friction: flags.friction ? "1" : undefined, + path: flags.path, + problem: flags["problem-only"] ? "1" : undefined, sort: flags.sort, q: query, period: serializeTimeRange(timeRange), @@ -349,7 +513,7 @@ export const listCommand = buildListCommand("replay", { contextKey ); - const { data: replays, nextCursor } = await withProgress( + const { data: fetchedReplays, nextCursor } = await withProgress( { message: `Fetching replays (up to ${flags.limit})...`, json: flags.json, @@ -366,6 +530,14 @@ export const listCommand = buildListCommand("replay", { ...timeRangeToApiParams(timeRange), }) ); + const replays = fetchedReplays.filter((replay) => { + if (!replayMatchesRouteFilters(replay, flags)) { + return false; + } + return flags["problem-only"] || flags.friction + ? hasProblemSignals(replay) + : true; + }); advancePaginationState(PAGINATION_KEY, contextKey, direction, nextCursor); const hasPrev = hasPreviousPage(PAGINATION_KEY, contextKey); diff --git a/src/commands/replay/shared.ts b/src/commands/replay/shared.ts new file mode 100644 index 000000000..af285dda3 --- /dev/null +++ b/src/commands/replay/shared.ts @@ -0,0 +1,110 @@ +/** + * Shared helpers for replay commands. + */ + +import { getReplay, getReplayRecordingSegments } from "../../lib/api-client.js"; +import { ApiError, ResolutionError } from "../../lib/errors.js"; +import { withProgress } from "../../lib/polling.js"; +import type { + ReplayDetails, + ReplayRecordingSegments, +} from "../../types/index.js"; + +type ReplayProjectScopeValidation = { + replay: ReplayDetails; + projectId?: string; + replayId: string; + org: string; + project?: string; + command: string; +}; + +type ReplaySegmentsOptions = { + org: string; + replay: ReplayDetails; + replayId: string; + project?: string; + json: boolean; +}; + +export async function fetchReplayDetailsForCommand( + org: string, + replayId: string, + command: string +): Promise { + try { + return await getReplay(org, replayId); + } catch (error) { + if (error instanceof ApiError && error.status === 404) { + throw new ResolutionError( + `Replay '${replayId}'`, + "not found", + `${command} ${org}/${replayId}`, + [ + "Check that you are querying the right organization", + "The replay may be past your retention window", + ] + ); + } + throw error; + } +} + +export function validateReplayProjectScope({ + replay, + projectId, + replayId, + org, + project, + command, +}: ReplayProjectScopeValidation): void { + if (project === undefined || projectId === undefined) { + return; + } + + const replayProjectId = replay.project_id; + if (replayProjectId === null || replayProjectId === undefined) { + return; + } + + if (String(projectId) !== String(replayProjectId)) { + throw new ResolutionError( + `Replay '${replayId}'`, + `is not in project '${project}'`, + `${command} ${org}/${project}/${replayId}`, + [`Open the org-scoped replay instead: ${command} ${org}/${replayId}`] + ); + } +} + +export async function fetchReplaySegmentsForCommand({ + org, + replay, + replayId, + project, + json, +}: ReplaySegmentsOptions): Promise { + const projectSlugOrId = + replay.project_id !== null && replay.project_id !== undefined + ? String(replay.project_id) + : project; + + if ( + !projectSlugOrId || + replay.is_archived || + (replay.count_segments ?? 0) <= 0 + ) { + return []; + } + + return await withProgress( + { + message: `Fetching replay recording segments (${replay.count_segments})...`, + json, + }, + () => + getReplayRecordingSegments(org, projectSlugOrId, replayId, { + expectedSegments: replay.count_segments, + }) + ); +} diff --git a/src/commands/replay/summarize.ts b/src/commands/replay/summarize.ts new file mode 100644 index 000000000..5ed8a894b --- /dev/null +++ b/src/commands/replay/summarize.ts @@ -0,0 +1,263 @@ +/** + * sentry replay summarize + * + * Summarize Session Replay behavior and deterministic friction signals. + */ + +import type { SentryContext } from "../../context.js"; +import { validateLimit } from "../../lib/arg-parsing.js"; +import { buildCommand } from "../../lib/command.js"; +import { escapeMarkdownCell, formatTable } from "../../lib/formatters/index.js"; +import { filterFields } from "../../lib/formatters/json.js"; +import { CommandOutput } from "../../lib/formatters/output.js"; +import type { Column } from "../../lib/formatters/table.js"; +import { formatDurationCompactMs } from "../../lib/formatters/time-utils.js"; +import { validateHexId } from "../../lib/hex-id.js"; +import { + applyFreshFlag, + FRESH_ALIASES, + FRESH_FLAG, +} from "../../lib/list-command.js"; +import { withProgress } from "../../lib/polling.js"; +import { extractNormalizedReplayEvents } from "../../lib/replay-events.js"; +import { summarizeReplay } from "../../lib/replay-summary.js"; +import { resolveOrgOptionalProjectFromArg } from "../../lib/resolve-target.js"; +import { + type ReplayFrictionSignal, + type ReplayRouteSummary, + type ReplaySummaryOutput, + ReplaySummaryOutputSchema, +} from "../../types/index.js"; +import { + fetchReplayDetailsForCommand, + fetchReplaySegmentsForCommand, + validateReplayProjectScope, +} from "./shared.js"; +import { parseReplayTargetArgs } from "./target.js"; + +type SummaryFlags = { + readonly fields?: string[]; + readonly fresh: boolean; + readonly json: boolean; + readonly "limit-events": number; + readonly "limit-signals": number; + readonly path?: string; +}; + +const COMMAND_NAME = "replay summarize"; +const USAGE_HINT = + "sentry replay summarize [//] | "; +const DEFAULT_SIGNAL_LIMIT = 10; +const DEFAULT_EVENT_LIMIT = 12; + +function parseSignalLimit(value: string): number { + return validateLimit(value, 0, 50); +} + +function parseEventLimit(value: string): number { + return validateLimit(value, 0, 50); +} + +function formatOffset(offsetMs: number | null | undefined): string { + return offsetMs === null || offsetMs === undefined + ? "-" + : formatDurationCompactMs(offsetMs); +} + +const SIGNAL_COLUMNS: Column[] = [ + { + header: "OFFSET", + value: (signal) => formatOffset(signal.offsetMs), + minWidth: 8, + shrinkable: false, + }, + { + header: "SEVERITY", + value: (signal) => signal.severity, + minWidth: 8, + }, + { + header: "SIGNAL", + value: (signal) => signal.kind, + minWidth: 14, + }, + { + header: "MESSAGE", + value: (signal) => escapeMarkdownCell(signal.message), + minWidth: 28, + truncate: true, + }, +]; + +const ROUTE_COLUMNS: Column[] = [ + { + header: "FIRST", + value: (route) => formatOffset(route.firstOffsetMs), + minWidth: 8, + shrinkable: false, + }, + { + header: "LAST", + value: (route) => formatOffset(route.lastOffsetMs), + minWidth: 8, + shrinkable: false, + }, + { + header: "EVENTS", + value: (route) => String(route.eventCount), + align: "right", + minWidth: 6, + }, + { + header: "PATH", + value: (route) => escapeMarkdownCell(route.path), + minWidth: 24, + truncate: true, + }, +]; + +function jsonTransformSummary( + summary: ReplaySummaryOutput, + fields?: string[] +): unknown { + return fields && fields.length > 0 ? filterFields(summary, fields) : summary; +} + +function formatSummaryHuman(summary: ReplaySummaryOutput): string { + const lines = [ + `Replay summary for ${summary.org}/${summary.replayId.slice(0, 8)}`, + "", + `Entry: ${summary.entryUrl ?? "-"}`, + `Exit: ${summary.exitUrl ?? "-"}`, + `Duration: ${summary.durationSeconds ?? "-"}s`, + `Events: ${summary.counts.total} total, ${summary.counts.clicks} clicks, ${summary.counts.inputs} inputs, ${summary.counts.network} network, ${summary.counts.errors} errors`, + ]; + + if (summary.focusPath) { + lines.push(`Focus path: ${summary.focusPath}`); + } + + if (summary.signals.length > 0) { + lines.push( + "", + "Signals:", + "", + formatTable(summary.signals, SIGNAL_COLUMNS) + ); + } else { + lines.push("", "Signals: none detected"); + } + + if (summary.routes.length > 0) { + lines.push("", "Routes:", "", formatTable(summary.routes, ROUTE_COLUMNS)); + } + + return lines.join("\n"); +} + +export const summarizeCommand = buildCommand({ + docs: { + brief: "Summarize Session Replay behavior", + fullDescription: + "Summarize a Session Replay into route flow, event counts, timing facts, and deterministic friction signals.\n\n" + + "This command does not use AI. It returns factual evidence that an agent can use for analysis.\n\n" + + "Examples:\n" + + " sentry replay summarize sentry/346789a703f6454384f1de473b8b9fcc --json\n" + + " sentry replay summarize sentry/346789a703f6454384f1de473b8b9fcc --path /signup --json\n" + + " sentry replay summarize sentry/cli/346789a703f6454384f1de473b8b9fcc --limit-signals 20 --json", + }, + output: { + human: formatSummaryHuman, + jsonTransform: jsonTransformSummary, + schema: ReplaySummaryOutputSchema, + }, + parameters: { + positional: { + kind: "array", + parameter: { + placeholder: "replay-id-or-url", + brief: "[/] or ", + parse: String, + }, + }, + flags: { + path: { + kind: "parsed", + parse: String, + brief: "Focus summary on events from this URL pathname", + optional: true, + }, + "limit-signals": { + kind: "parsed", + parse: parseSignalLimit, + brief: "Maximum friction signals to include (0-50)", + default: String(DEFAULT_SIGNAL_LIMIT), + }, + "limit-events": { + kind: "parsed", + parse: parseEventLimit, + brief: "Maximum notable events to include (0-50)", + default: String(DEFAULT_EVENT_LIMIT), + }, + fresh: FRESH_FLAG, + }, + aliases: { + ...FRESH_ALIASES, + }, + }, + async *func(this: SentryContext, flags: SummaryFlags, ...args: string[]) { + applyFreshFlag(flags); + + const parsedArgs = parseReplayTargetArgs(args, USAGE_HINT); + const replayId = validateHexId(parsedArgs.replayId, "replay ID"); + const resolved = await resolveOrgOptionalProjectFromArg( + parsedArgs.targetArg, + this.cwd, + COMMAND_NAME + ); + + const replay = await withProgress( + { message: "Fetching replay metadata...", json: flags.json }, + () => + fetchReplayDetailsForCommand( + resolved.org, + replayId, + "sentry replay summarize" + ) + ); + + validateReplayProjectScope({ + replay, + projectId: resolved.projectData?.id, + replayId, + org: resolved.org, + project: resolved.project, + command: "sentry replay summarize", + }); + + const segments = await fetchReplaySegmentsForCommand({ + org: resolved.org, + replay, + replayId, + project: resolved.project, + json: flags.json, + }); + + const events = extractNormalizedReplayEvents(replay, segments); + const summary = summarizeReplay(replay, events, { + org: resolved.org, + project: resolved.project, + focusPath: flags.path, + maxSignals: flags["limit-signals"], + maxNotableEvents: flags["limit-events"], + }); + + yield new CommandOutput(summary); + return { + hint: + summary.signals.length > 0 + ? `Detected ${summary.signals.length} friction signal${summary.signals.length === 1 ? "" : "s"}. Cite replay ID and offset when reporting findings.` + : "No deterministic friction signals detected. Use route flow and notable events for behavior context.", + }; + }, +}); diff --git a/src/commands/replay/target.ts b/src/commands/replay/target.ts new file mode 100644 index 000000000..32f73f8f6 --- /dev/null +++ b/src/commands/replay/target.ts @@ -0,0 +1,118 @@ +/** + * Shared replay target parsing helpers. + * + * Keeps `replay view` and replay subcommands aligned on accepted target forms: + * bare replay IDs, `/`, `//`, + * ` `, and Sentry replay URLs. + */ + +import { + detectSwappedViewArgs, + parseSlashSeparatedArg, +} from "../../lib/arg-parsing.js"; +import { ContextError, ValidationError } from "../../lib/errors.js"; +import { tryNormalizeHexId } from "../../lib/hex-id.js"; +import { + applySentryUrlContext, + parseSentryUrl, +} from "../../lib/sentry-url-parser.js"; + +export type ParsedReplayTargetArgs = { + replayId: string; + targetArg: string | undefined; + warning?: string; +}; + +export const REPLAY_TARGET_USAGE = + "sentry replay [//] | "; + +/** + * Parse a single positional argument as a replay target. + * + * The single-slash case (`org/id`) needs special handling because 32-char hex + * replay IDs look valid to the generic slash parser's ID extraction. + */ +function parseSingleReplayTargetArg( + arg: string, + usageHint: string +): ParsedReplayTargetArgs { + const trimmed = arg.trim(); + if (!trimmed) { + throw new ContextError("Replay ID", usageHint, []); + } + + const slashIdx = trimmed.indexOf("/"); + if (slashIdx !== -1 && trimmed.indexOf("/", slashIdx + 1) === -1) { + const org = trimmed.slice(0, slashIdx); + const replaySegment = trimmed.slice(slashIdx + 1); + const normalizedReplayId = + replaySegment && tryNormalizeHexId(replaySegment); + if (!normalizedReplayId) { + throw new ContextError("Replay ID", usageHint, []); + } + return { replayId: normalizedReplayId, targetArg: `${org}/` }; + } + + const { id: replayId, targetArg } = parseSlashSeparatedArg( + trimmed, + "Replay ID", + usageHint + ); + return { replayId, targetArg }; +} + +/** + * Parse replay command positional arguments. + */ +export function parseReplayTargetArgs( + args: string[], + usageHint = REPLAY_TARGET_USAGE +): ParsedReplayTargetArgs { + if (args.length === 0) { + throw new ContextError("Replay ID", usageHint, []); + } + if (args.length > 2) { + throw new ValidationError( + `Too many positional arguments (got ${args.length}, expected at most 2).\n\nUsage: ${usageHint}`, + "positional" + ); + } + + const first = args[0]; + if (!first) { + throw new ContextError("Replay ID", usageHint, []); + } + + const urlParsed = parseSentryUrl(first); + if (urlParsed) { + applySentryUrlContext(urlParsed.baseUrl); + if (urlParsed.replayId && urlParsed.org) { + return { replayId: urlParsed.replayId, targetArg: `${urlParsed.org}/` }; + } + throw new ContextError("Replay ID", usageHint, [ + "Pass a replay URL: https://sentry.io/organizations/{org}/explore/replays/{replayId}/", + ]); + } + + if (args.length === 1) { + return parseSingleReplayTargetArg(first, usageHint); + } + + const second = args[1]; + if (!second) { + throw new ContextError("Replay ID", usageHint, []); + } + + const warning = + args.length === 2 ? detectSwappedViewArgs(first, second) : null; + if (warning) { + const normalizedReplayId = tryNormalizeHexId(first) ?? first; + return { + replayId: normalizedReplayId, + targetArg: second, + warning, + }; + } + + return { replayId: second, targetArg: first }; +} diff --git a/src/commands/replay/view.ts b/src/commands/replay/view.ts index 95f368590..b0291a5e2 100644 --- a/src/commands/replay/view.ts +++ b/src/commands/replay/view.ts @@ -12,18 +12,9 @@ import { getTraceMeta, listIssuesPaginated, } from "../../lib/api-client.js"; -import { - detectSwappedViewArgs, - parseSlashSeparatedArg, -} from "../../lib/arg-parsing.js"; import { openInBrowser } from "../../lib/browser.js"; import { buildCommand } from "../../lib/command.js"; -import { - ApiError, - ContextError, - ResolutionError, - ValidationError, -} from "../../lib/errors.js"; +import { ApiError, ResolutionError } from "../../lib/errors.js"; import { filterFields } from "../../lib/formatters/json.js"; import { CommandOutput } from "../../lib/formatters/output.js"; import { @@ -32,7 +23,7 @@ import { type ReplayViewData, replayHint, } from "../../lib/formatters/replay.js"; -import { tryNormalizeHexId, validateHexId } from "../../lib/hex-id.js"; +import { validateHexId } from "../../lib/hex-id.js"; import { applyFreshFlag, FRESH_ALIASES, @@ -40,10 +31,6 @@ import { } from "../../lib/list-command.js"; import { logger } from "../../lib/logger.js"; import { resolveOrgOptionalProjectFromArg } from "../../lib/resolve-target.js"; -import { - applySentryUrlContext, - parseSentryUrl, -} from "../../lib/sentry-url-parser.js"; import { buildReplayUrl } from "../../lib/sentry-urls.js"; import type { ReplayActivityEvent, @@ -52,6 +39,7 @@ import type { ReplayRelatedTrace, } from "../../types/index.js"; import { ReplayViewOutputSchema } from "../../types/index.js"; +import { parseReplayTargetArgs } from "./target.js"; type ViewFlags = { readonly json: boolean; @@ -60,12 +48,6 @@ type ViewFlags = { readonly fields?: string[]; }; -type ParsedPositionalArgs = { - replayId: string; - targetArg: string | undefined; - warning?: string; -}; - const USAGE_HINT = "sentry replay view [//] | "; const MAX_ACTIVITY_EVENTS = 6; @@ -74,44 +56,6 @@ const MAX_RELATED_TRACES = 2; const log = logger.withTag("replay.view"); -/** - * Parse a single positional argument as a replay target. - * - * Handles bare replay IDs, `/`, `//`, - * and Sentry replay URLs. The single-slash case (`org/id`) needs special - * handling because 32-char hex replay IDs look valid to the generic - * `parseSlashSeparatedArg` which would misinterpret the org as a project. - */ -function parseSingleArg(arg: string): ParsedPositionalArgs { - const trimmed = arg.trim(); - if (!trimmed) { - throw new ContextError("Replay ID", USAGE_HINT, []); - } - - // Handle / shorthand — must check before parseSlashSeparatedArg - // because replay IDs are 32-char hex strings that look valid to the generic - // slash parser's ID extraction, but with only one slash the "project" segment - // would be wrongly treated as the ID. - const slashIdx = trimmed.indexOf("/"); - if (slashIdx !== -1 && trimmed.indexOf("/", slashIdx + 1) === -1) { - const org = trimmed.slice(0, slashIdx); - const replaySegment = trimmed.slice(slashIdx + 1); - const normalizedReplayId = - replaySegment && tryNormalizeHexId(replaySegment); - if (!normalizedReplayId) { - throw new ContextError("Replay ID", USAGE_HINT, []); - } - return { replayId: normalizedReplayId, targetArg: `${org}/` }; - } - - const { id: replayId, targetArg } = parseSlashSeparatedArg( - trimmed, - "Replay ID", - USAGE_HINT - ); - return { replayId, targetArg }; -} - /** * Parse replay view positional arguments. * @@ -122,55 +66,8 @@ function parseSingleArg(arg: string): ParsedPositionalArgs { * - ` ` * - `` */ -export function parsePositionalArgs(args: string[]): ParsedPositionalArgs { - if (args.length === 0) { - throw new ContextError("Replay ID", USAGE_HINT, []); - } - if (args.length > 2) { - throw new ValidationError( - `Too many positional arguments (got ${args.length}, expected at most 2).\n\nUsage: ${USAGE_HINT}`, - "positional" - ); - } - - const first = args[0]; - if (!first) { - throw new ContextError("Replay ID", USAGE_HINT, []); - } - - const urlParsed = parseSentryUrl(first); - if (urlParsed) { - applySentryUrlContext(urlParsed.baseUrl); - if (urlParsed.replayId && urlParsed.org) { - return { replayId: urlParsed.replayId, targetArg: `${urlParsed.org}/` }; - } - throw new ContextError("Replay ID", USAGE_HINT, [ - "Pass a replay URL: https://sentry.io/organizations/{org}/explore/replays/{replayId}/", - ]); - } - - if (args.length === 1) { - return parseSingleArg(first); - } - - const second = args[1]; - if (!second) { - throw new ContextError("Replay ID", USAGE_HINT, []); - } - - const warning = - args.length === 2 ? detectSwappedViewArgs(first, second) : null; - if (warning) { - const normalizedReplayId = tryNormalizeHexId(first) ?? first; - return { - replayId: normalizedReplayId, - targetArg: second, - warning, - }; - } - - return { replayId: second, targetArg: first }; -} +export const parsePositionalArgs = (args: string[]) => + parseReplayTargetArgs(args, USAGE_HINT); type ReplayProjectScope = { org: string; diff --git a/src/lib/command.ts b/src/lib/command.ts index 97bfbc784..afac9c9f8 100644 --- a/src/lib/command.ts +++ b/src/lib/command.ts @@ -556,6 +556,7 @@ export function buildCommand< renderCommandOutput(stdout, value.data, outputConfig, renderer, { json: Boolean(flags.json), fields: flags.fields as string[] | undefined, + jsonCompact: Boolean(flags.jsonl), clearPrefix: pendingClear ? "\x1b[H\x1b[J" : undefined, }); pendingClear = false; diff --git a/src/lib/formatters/output.ts b/src/lib/formatters/output.ts index 8702810de..2b4595090 100644 --- a/src/lib/formatters/output.ts +++ b/src/lib/formatters/output.ts @@ -212,6 +212,8 @@ type RenderContext = { json: boolean; /** Pre-parsed `--fields` value */ fields?: string[]; + /** Emit compact JSON instead of pretty JSON, useful for JSONL streams. */ + jsonCompact?: boolean; /** ANSI prefix to prepend to the output (e.g., clear-screen escape) */ clearPrefix?: string; }; @@ -257,12 +259,17 @@ function applyJsonExclude( * is handed off directly without serialization. Otherwise it is * JSON-stringified and written as a single line. */ -function emitJsonObject(stdout: Writer, obj: unknown): void { +function emitJsonObject( + stdout: Writer, + obj: unknown, + options: { compact?: boolean } = {} +): void { if (stdout.captureObject) { stdout.captureObject(obj); return; } - stdout.write(`${formatJson(obj)}\n`); + const json = options.compact ? JSON.stringify(obj) : formatJson(obj); + stdout.write(`${json}\n`); } /** @@ -297,7 +304,7 @@ export function renderCommandOutput( if (transformed === undefined) { return; } - emitJsonObject(stdout, transformed); + emitJsonObject(stdout, transformed, { compact: ctx.jsonCompact }); return; } @@ -306,7 +313,7 @@ export function renderCommandOutput( ctx.fields && ctx.fields.length > 0 ? filterFields(excluded, ctx.fields) : excluded; - emitJsonObject(stdout, final); + emitJsonObject(stdout, final, { compact: ctx.jsonCompact }); return; } diff --git a/src/lib/replay-events.ts b/src/lib/replay-events.ts new file mode 100644 index 000000000..c65ea12c6 --- /dev/null +++ b/src/lib/replay-events.ts @@ -0,0 +1,701 @@ +/** + * Normalized Session Replay event extraction. + * + * Converts rrweb frames and Sentry custom replay frames into stable, + * agent-readable rows. The normalized shape intentionally preserves evidence + * pointers (segment/frame/offset) while avoiding raw payloads unless callers + * explicitly request them. + */ + +import type { + ReplayDetails, + ReplayEvent, + ReplayEventKind, + ReplayRecordingSegments, +} from "../types/index.js"; +import { ValidationError } from "./errors.js"; +import { getReplayUrlParts, replayUrlPathMatches } from "./replay-search.js"; +import { parseRelativeParts, UNIT_SECONDS } from "./time-range.js"; + +type RecordValue = Record; + +type EventContext = { + replayStartMs: number | null; + replayId: string; + includeRaw: boolean; + currentUrl?: string; +}; + +type FrameLocation = { + ctx: EventContext; + frame: RecordValue; + segmentIndex: number; + frameIndex: number; +}; + +export type ReplayEventFilters = { + kinds?: readonly ReplayEventKind[]; + url?: string; + path?: string; + contains?: string; + selector?: string; + fromMs?: number; + toMs?: number; +}; + +const RRWEB_EVENT_TYPES: Record = { + 0: "DomContentLoaded", + 1: "Load", + 2: "FullSnapshot", + 3: "IncrementalSnapshot", + 4: "Meta", + 5: "Custom", + 6: "Plugin", +}; + +const RRWEB_INCREMENTAL_SOURCES: Record = { + 0: "Mutation", + 1: "MouseMove", + 2: "MouseInteraction", + 3: "Scroll", + 4: "ViewportResize", + 5: "Input", + 6: "TouchMove", + 7: "MediaInteraction", + 8: "StyleSheetRule", + 9: "CanvasMutation", + 10: "Font", + 11: "Log", + 12: "Drag", + 13: "StyleDeclaration", + 14: "Selection", +}; + +const RRWEB_MOUSE_INTERACTIONS: Record = { + 0: "MouseUp", + 1: "MouseDown", + 2: "Click", + 3: "ContextMenu", + 4: "DblClick", + 5: "Focus", + 6: "Blur", + 7: "TouchStart", + 8: "TouchMove", + 9: "TouchEnd", +}; + +const CLICK_LIKE_CUSTOM_TAGS = new Set(["click", "deadClick", "rageClick"]); +const MASKED_INPUT_RE = /^\*+$/; +const SECONDS_OFFSET_RE = /^\d+(\.\d+)?$/; + +function isRecord(value: unknown): value is RecordValue { + return typeof value === "object" && value !== null; +} + +function firstString(...values: unknown[]): string | undefined { + return values.find( + (value): value is string => typeof value === "string" && value.length > 0 + ); +} + +function firstNumber(...values: unknown[]): number | undefined { + return values.find( + (value): value is number => + typeof value === "number" && Number.isFinite(value) + ); +} + +function timestampToMillis(value: unknown): number | null { + if (typeof value === "string") { + const parsed = Date.parse(value); + return Number.isNaN(parsed) ? null : parsed; + } + + if (typeof value !== "number" || !Number.isFinite(value)) { + return null; + } + + // rrweb timestamps are epoch milliseconds. Some Sentry payloads use epoch + // seconds, so normalize realistic second values as a fallback. + if (value > 1_000_000_000 && value < 10_000_000_000) { + return Math.round(value * 1000); + } + + return Math.round(value); +} + +function eventTimeFields( + frame: RecordValue, + replayStartMs: number | null +): Pick { + const timestampMs = timestampToMillis(frame.timestamp); + return { + offsetMs: + timestampMs !== null && replayStartMs !== null + ? Math.max(0, timestampMs - replayStartMs) + : null, + timestamp: + timestampMs !== null ? new Date(timestampMs).toISOString() : null, + }; +} + +function buildBaseEvent(params: { + ctx: EventContext; + frame: RecordValue; + segmentIndex: number; + frameIndex: number; + kind: ReplayEventKind; + category: string; + label?: string; + message?: string; + url?: string; + selector?: string; + nodeId?: string | number; + rawType?: string; + rawSource?: string; + data?: RecordValue; +}): ReplayEvent { + const { ctx, frame, segmentIndex, frameIndex, ...event } = params; + const url = event.url ?? ctx.currentUrl ?? null; + const urlParts = getReplayUrlParts(url); + return { + replayId: ctx.replayId, + segmentIndex, + frameIndex, + ...eventTimeFields(frame, ctx.replayStartMs), + kind: event.kind, + category: event.category, + label: event.label ?? null, + message: event.message ?? null, + url, + urlPath: urlParts?.path ?? null, + urlQuery: urlParts?.query ?? null, + selector: event.selector ?? null, + nodeId: event.nodeId ?? null, + rawType: event.rawType ?? null, + rawSource: event.rawSource ?? null, + ...(event.data ? { data: event.data } : {}), + ...(ctx.includeRaw ? { raw: frame } : {}), + }; +} + +function summarizeMutationData(data: RecordValue): RecordValue { + return { + adds: Array.isArray(data.adds) ? data.adds.length : undefined, + removes: Array.isArray(data.removes) ? data.removes.length : undefined, + texts: Array.isArray(data.texts) ? data.texts.length : undefined, + attributes: Array.isArray(data.attributes) + ? data.attributes.length + : undefined, + }; +} + +function normalizeMouseInteraction( + location: FrameLocation, + data: RecordValue +): ReplayEvent | null { + const interactionType = firstNumber(data.type); + const interactionName = + interactionType !== undefined + ? (RRWEB_MOUSE_INTERACTIONS[interactionType] ?? String(interactionType)) + : undefined; + + let kind: ReplayEventKind | null = null; + if (interactionName === "Click" || interactionName === "DblClick") { + kind = "click"; + } else if ( + interactionName === "TouchStart" || + interactionName === "TouchEnd" + ) { + kind = "tap"; + } else if (interactionName === "Focus") { + kind = "focus"; + } else if (interactionName === "Blur") { + kind = "blur"; + } + + if (!kind) { + return null; + } + + const selector = firstString(data.selector); + const nodeId = firstNumber(data.id); + return buildBaseEvent({ + ...location, + kind, + category: "interaction", + label: selector ?? interactionName, + selector, + nodeId, + rawType: "IncrementalSnapshot", + rawSource: interactionName ?? "MouseInteraction", + data: { + x: firstNumber(data.x), + y: firstNumber(data.y), + interaction: interactionName, + }, + }); +} + +function normalizeIncrementalFrame( + ctx: EventContext, + frame: RecordValue, + segmentIndex: number, + frameIndex: number +): ReplayEvent | null { + const location = { ctx, frame, segmentIndex, frameIndex }; + const data = isRecord(frame.data) ? frame.data : {}; + const source = firstNumber(data.source); + const sourceName = + source !== undefined + ? (RRWEB_INCREMENTAL_SOURCES[source] ?? String(source)) + : undefined; + + switch (sourceName) { + case "Mutation": + return buildBaseEvent({ + ...location, + kind: "mutation", + category: "dom", + label: "mutation", + rawType: "IncrementalSnapshot", + rawSource: sourceName, + data: summarizeMutationData(data), + }); + case "MouseInteraction": + return normalizeMouseInteraction(location, data); + case "Scroll": + return buildBaseEvent({ + ...location, + kind: "scroll", + category: "interaction", + nodeId: firstNumber(data.id), + rawType: "IncrementalSnapshot", + rawSource: sourceName, + data: { x: firstNumber(data.x), y: firstNumber(data.y) }, + }); + case "ViewportResize": + return buildBaseEvent({ + ...location, + kind: "viewport", + category: "viewport", + label: "resize", + rawType: "IncrementalSnapshot", + rawSource: sourceName, + data: { + width: firstNumber(data.width), + height: firstNumber(data.height), + }, + }); + case "Input": + return buildBaseEvent({ + ...location, + kind: "input", + category: "input", + nodeId: firstNumber(data.id), + rawType: "IncrementalSnapshot", + rawSource: sourceName, + data: { + textLength: + typeof data.text === "string" ? data.text.length : undefined, + isChecked: + typeof data.isChecked === "boolean" ? data.isChecked : undefined, + masked: + typeof data.text === "string" && MASKED_INPUT_RE.test(data.text), + }, + }); + case "Log": { + const level = firstString(data.level); + const message = Array.isArray(data.payload) + ? data.payload.map(String).join(" ") + : firstString(data.payload, data.message); + return buildBaseEvent({ + ...location, + kind: level === "error" ? "error" : "console", + category: "console", + label: level ?? "console", + message, + rawType: "IncrementalSnapshot", + rawSource: sourceName, + data: { level }, + }); + } + default: + return null; + } +} + +function breadcrumbKind(payload: RecordValue): ReplayEventKind { + const category = firstString(payload.category)?.toLowerCase() ?? ""; + const type = firstString(payload.type)?.toLowerCase() ?? ""; + const level = firstString(payload.level)?.toLowerCase() ?? ""; + + if ( + category.includes("fetch") || + category.includes("xhr") || + category.includes("http") || + type === "http" + ) { + return "network"; + } + if (category.includes("console")) { + return level === "error" ? "error" : "console"; + } + if (category.includes("exception") || category.includes("error")) { + return "error"; + } + if (category.includes("navigation")) { + return "navigation"; + } + return "breadcrumb"; +} + +function normalizeBreadcrumbCustomFrame( + location: FrameLocation, + payload: RecordValue +): ReplayEvent { + const { ctx } = location; + const nestedData = isRecord(payload.data) ? payload.data : {}; + const kind = breadcrumbKind(payload); + const url = firstString(payload.url, nestedData.url, nestedData.to); + if (kind === "navigation" && url) { + ctx.currentUrl = url; + } + + return buildBaseEvent({ + ...location, + kind, + category: kind === "breadcrumb" ? "breadcrumb" : kind, + label: firstString(payload.category, payload.type) ?? kind, + message: firstString(payload.message), + url, + rawType: "Custom", + rawSource: "breadcrumb", + data: { + level: firstString(payload.level), + statusCode: firstNumber(nestedData.status_code, nestedData.status), + method: firstString(nestedData.method), + }, + }); +} + +function normalizeClickCustomFrame( + location: FrameLocation, + tag: string, + payload: RecordValue +): ReplayEvent { + const selector = firstString(payload.selector); + const label = firstString(payload.label) ?? tag; + return buildBaseEvent({ + ...location, + kind: "click", + category: "interaction", + label, + selector, + rawType: "Custom", + rawSource: tag, + data: { + interaction: tag, + isDeadClick: tag === "deadClick", + isRageClick: tag === "rageClick", + }, + }); +} + +function normalizePerformanceSpanCustomFrame( + location: FrameLocation, + payload: RecordValue +): ReplayEvent { + const nestedData = isRecord(payload.data) ? payload.data : {}; + const op = firstString(payload.op); + const description = firstString(payload.description); + return buildBaseEvent({ + ...location, + kind: "span", + category: "performance", + label: op ?? "performanceSpan", + message: description, + rawType: "Custom", + rawSource: "performanceSpan", + data: { + op, + description, + durationMs: firstNumber(nestedData.duration, payload.duration), + }, + }); +} + +function normalizeCustomFrame( + ctx: EventContext, + frame: RecordValue, + segmentIndex: number, + frameIndex: number +): ReplayEvent | null { + const location = { ctx, frame, segmentIndex, frameIndex }; + const data = isRecord(frame.data) ? frame.data : {}; + const tag = firstString(data.tag); + const payload = isRecord(data.payload) ? data.payload : {}; + + if (!tag) { + const href = firstString(data.href); + if (!href) { + return null; + } + ctx.currentUrl = href; + return buildBaseEvent({ + ...location, + kind: "navigation", + category: "navigation", + label: "page.view", + url: href, + rawType: "Custom", + rawSource: "href", + }); + } + + if (tag === "breadcrumb") { + return normalizeBreadcrumbCustomFrame(location, payload); + } + + if (CLICK_LIKE_CUSTOM_TAGS.has(tag)) { + return normalizeClickCustomFrame(location, tag, payload); + } + + if (tag === "performanceSpan") { + return normalizePerformanceSpanCustomFrame(location, payload); + } + + const kindByTag: Record = { + memory: "memory", + mobile: "mobile", + navigation: "navigation", + video: "video", + webVital: "web-vital", + }; + const kind = kindByTag[tag] ?? "unknown"; + return buildBaseEvent({ + ...location, + kind, + category: kind === "unknown" ? "custom" : kind, + label: tag, + message: firstString(payload.message, payload.description), + rawType: "Custom", + rawSource: tag, + data: payload, + }); +} + +function normalizeFrame( + ctx: EventContext, + frame: unknown, + segmentIndex: number, + frameIndex: number +): ReplayEvent | null { + if (!isRecord(frame)) { + return null; + } + + const type = firstNumber(frame.type); + const typeName = + type !== undefined ? (RRWEB_EVENT_TYPES[type] ?? String(type)) : undefined; + + if (typeName === "FullSnapshot") { + return buildBaseEvent({ + ctx, + frame, + segmentIndex, + frameIndex, + kind: "dom-snapshot", + category: "dom", + label: "full-snapshot", + rawType: typeName, + }); + } + + if (typeName === "Meta") { + const data = isRecord(frame.data) ? frame.data : {}; + const href = firstString(data.href); + if (!href) { + return null; + } + ctx.currentUrl = href; + return buildBaseEvent({ + ctx, + frame, + segmentIndex, + frameIndex, + kind: "navigation", + category: "navigation", + label: "page.view", + url: href, + rawType: typeName, + }); + } + + if (typeName === "IncrementalSnapshot") { + return normalizeIncrementalFrame(ctx, frame, segmentIndex, frameIndex); + } + + if (typeName === "Custom" || isRecord(frame.data)) { + return normalizeCustomFrame(ctx, frame, segmentIndex, frameIndex); + } + + return null; +} + +export function extractNormalizedReplayEvents( + replay: ReplayDetails, + segments: ReplayRecordingSegments, + options: { includeRaw?: boolean } = {} +): ReplayEvent[] { + const replayStartMs = timestampToMillis(replay.started_at); + const ctx: EventContext = { + replayId: replay.id, + replayStartMs, + includeRaw: options.includeRaw ?? false, + }; + + const events: ReplayEvent[] = []; + for (const [segmentIndex, segment] of segments.entries()) { + for (const [frameIndex, frame] of segment.entries()) { + const normalized = normalizeFrame(ctx, frame, segmentIndex, frameIndex); + if (normalized) { + events.push(normalized); + } + } + } + + return events.sort((a, b) => { + if (a.offsetMs === null && b.offsetMs === null) { + return a.segmentIndex - b.segmentIndex || a.frameIndex - b.frameIndex; + } + if (a.offsetMs === null) { + return 1; + } + if (b.offsetMs === null) { + return -1; + } + return a.offsetMs - b.offsetMs; + }); +} + +function textMatches(event: ReplayEvent, needle: string): boolean { + const normalizedNeedle = needle.toLowerCase(); + const haystack = [ + event.kind, + event.category, + event.label, + event.message, + event.url, + event.selector, + event.rawType, + event.rawSource, + event.data ? JSON.stringify(event.data) : undefined, + ] + .filter((value): value is string => typeof value === "string") + .join("\n") + .toLowerCase(); + return haystack.includes(normalizedNeedle); +} + +function eventMatchesTextFilters( + event: ReplayEvent, + filters: ReplayEventFilters, + contains: string | undefined +): boolean { + if (filters.url && !(event.url ?? "").includes(filters.url)) { + return false; + } + if (filters.path && !replayUrlPathMatches(event.url, filters.path)) { + return false; + } + if (filters.selector && !(event.selector ?? "").includes(filters.selector)) { + return false; + } + if (contains && !textMatches(event, contains)) { + return false; + } + return true; +} + +function eventMatchesOffsetWindow( + event: ReplayEvent, + filters: ReplayEventFilters +): boolean { + if ( + filters.fromMs !== undefined && + (event.offsetMs === null || event.offsetMs < filters.fromMs) + ) { + return false; + } + if ( + filters.toMs !== undefined && + (event.offsetMs === null || event.offsetMs > filters.toMs) + ) { + return false; + } + return true; +} + +export function filterNormalizedReplayEvents( + events: ReplayEvent[], + filters: ReplayEventFilters +): ReplayEvent[] { + const kindSet = + filters.kinds && filters.kinds.length > 0 + ? new Set(filters.kinds) + : undefined; + const contains = filters.contains?.toLowerCase(); + + return events.filter((event) => { + if (kindSet && !kindSet.has(event.kind)) { + return false; + } + return ( + eventMatchesTextFilters(event, filters, contains) && + eventMatchesOffsetWindow(event, filters) + ); + }); +} + +/** Parse replay offsets such as `01:23`, `1:02:03`, `90s`, `2m`, or `83000ms`. */ +export function parseReplayOffset(value: string): number { + const trimmed = value.trim(); + if (!trimmed) { + throw new ValidationError("Offset cannot be empty", "offset"); + } + + if (trimmed.endsWith("ms")) { + const ms = Number(trimmed.slice(0, -2)); + if (Number.isFinite(ms) && ms >= 0) { + return Math.round(ms); + } + } + + const relative = parseRelativeParts(trimmed); + if (relative) { + return relative.value * (UNIT_SECONDS[relative.unit] ?? 0) * 1000; + } + + if (SECONDS_OFFSET_RE.test(trimmed)) { + return Math.round(Number(trimmed) * 1000); + } + + const parts = trimmed.split(":").map(Number); + if ( + parts.length < 2 || + parts.length > 3 || + parts.some((part) => !Number.isFinite(part) || part < 0) + ) { + throw new ValidationError( + `Invalid replay offset '${value}'. Use seconds, 90s, 01:23, or 1:02:03.`, + "offset" + ); + } + + const [hours, minutes, seconds] = + parts.length === 3 ? parts : [0, parts[0], parts[1]]; + return Math.round( + ((hours ?? 0) * 3600 + (minutes ?? 0) * 60 + (seconds ?? 0)) * 1000 + ); +} diff --git a/src/lib/replay-search.ts b/src/lib/replay-search.ts index 53b01305c..ddcc82c8b 100644 --- a/src/lib/replay-search.ts +++ b/src/lib/replay-search.ts @@ -11,10 +11,14 @@ import type { SentryEvent, } from "../types/index.js"; import { tryNormalizeHexId } from "./hex-id.js"; +import { logger } from "./logger.js"; type ReplayLike = ReplayListItem | ReplayDetails; type ReplayFieldResolver = (replay: ReplayLike) => unknown; +const REPLAY_URL_PARSE_BASE = "https://replay.local"; +const log = logger.withTag("replay-search"); + /** Maps user-facing field aliases to canonical replay API field names. */ const REPLAY_FIELD_ALIASES = { count_screens: "count_urls", @@ -78,6 +82,80 @@ export function getReplayUserLabel(replay: ReplayLike): string | undefined { ); } +export type ReplayUrlParts = { + path: string; + query: string; +}; + +/** Parse a replay URL or relative URL into stable path/query parts. */ +export function getReplayUrlParts( + value: string | null | undefined +): ReplayUrlParts | undefined { + if (!value) { + return; + } + + try { + const parsed = new URL(value, REPLAY_URL_PARSE_BASE); + return { path: parsed.pathname, query: parsed.search }; + } catch (error) { + log.debug("Failed to parse replay URL", { value, error }); + return; + } +} + +function normalizePathFilter(path: string): string { + const trimmed = path.trim(); + if (!trimmed) { + return "/"; + } + + const withSlash = trimmed.startsWith("/") ? trimmed : `/${trimmed}`; + return withSlash.length > 1 && withSlash.endsWith("/") + ? withSlash.slice(0, -1) + : withSlash; +} + +/** Match a route path exactly or by child path, avoiding raw query matches. */ +export function replayUrlPathMatches( + url: string | null | undefined, + path: string +): boolean { + const parts = getReplayUrlParts(url); + if (!parts) { + return false; + } + + const normalizedFilter = normalizePathFilter(path); + const normalizedPath = normalizePathFilter(parts.path); + return ( + normalizedPath === normalizedFilter || + normalizedPath.startsWith(`${normalizedFilter}/`) + ); +} + +export type ReplayPathMatchMode = "any" | "entry" | "exit"; + +/** Match replay URL arrays by route path in any, first, or last position. */ +export function replayMatchesPath( + replay: Pick, + path: string, + mode: ReplayPathMatchMode = "any" +): boolean { + const urls = replay.urls ?? []; + if (urls.length === 0) { + return false; + } + + if (mode === "entry") { + return replayUrlPathMatches(urls[0], path); + } + if (mode === "exit") { + return replayUrlPathMatches(urls.at(-1), path); + } + return urls.some((url) => replayUrlPathMatches(url, path)); +} + const REPLAY_FIELD_RESOLVERS: Record = { activity: (replay) => replay.activity, browser: (replay) => replay.browser?.name, diff --git a/src/lib/replay-summary.ts b/src/lib/replay-summary.ts new file mode 100644 index 000000000..1f13d5d0a --- /dev/null +++ b/src/lib/replay-summary.ts @@ -0,0 +1,661 @@ +/** + * Deterministic Session Replay behavior summaries. + * + * The summary intentionally stays factual: counts, routes, timings, and + * heuristic friction signals with nearby evidence. This gives agents useful + * material for analysis without pretending the CLI performed subjective RCA. + */ + +import type { + ReplayDetails, + ReplayEvent, + ReplayEventCounts, + ReplayFrictionSignal, + ReplayRouteSummary, + ReplaySummaryOutput, + ReplayTimingSummary, +} from "../types/index.js"; +import { replayUrlPathMatches } from "./replay-search.js"; + +type SummaryOptions = { + org: string; + project?: string; + focusPath?: string; + maxSignals?: number; + maxNotableEvents?: number; +}; + +type ClickPoint = { + event: ReplayEvent; + x: number; + y: number; +}; + +const DEFAULT_MAX_SIGNALS = 10; +const DEFAULT_MAX_NOTABLE_EVENTS = 12; +const REPEATED_CLICK_WINDOW_MS = 3000; +const REPEATED_CLICK_DISTANCE_PX = 32; +const LONG_WAIT_AFTER_CLICK_MS = 10_000; +const QUICK_BOUNCE_SECONDS = 10; +const SLOW_NAVIGATION_MS = 3000; +const SLOW_RESOURCE_MS = 3000; +const ROUTE_CHURN_WINDOW_MS = 15_000; +const ROUTE_CHURN_COUNT = 3; + +const INPUT_KINDS = new Set(["input", "focus", "blur"]); +const NOTABLE_EVENT_KINDS = new Set([ + "navigation", + "click", + "tap", + "input", + "network", + "console", + "error", +]); + +function numberFromData(event: ReplayEvent, key: string): number | undefined { + const value = event.data?.[key]; + return typeof value === "number" && Number.isFinite(value) + ? value + : undefined; +} + +function stringFromData(event: ReplayEvent, key: string): string | undefined { + const value = event.data?.[key]; + return typeof value === "string" && value.length > 0 ? value : undefined; +} + +function eventDuration(event: ReplayEvent): number | undefined { + return numberFromData(event, "durationMs"); +} + +function replayDurationMs(replay: ReplayDetails): number | null { + return typeof replay.duration === "number" && Number.isFinite(replay.duration) + ? Math.round(replay.duration * 1000) + : null; +} + +function routeKey(event: ReplayEvent): string | undefined { + return event.urlPath ?? undefined; +} + +function countEvents(events: ReplayEvent[]): ReplayEventCounts { + return { + total: events.length, + navigations: events.filter((event) => event.kind === "navigation").length, + clicks: events.filter( + (event) => event.kind === "click" || event.kind === "tap" + ).length, + inputs: events.filter((event) => INPUT_KINDS.has(event.kind)).length, + network: events.filter((event) => event.kind === "network").length, + console: events.filter((event) => event.kind === "console").length, + errors: events.filter((event) => event.kind === "error").length, + spans: events.filter((event) => event.kind === "span").length, + }; +} + +function buildRouteSummaries(events: ReplayEvent[]): ReplayRouteSummary[] { + const routes = new Map(); + + for (const event of events) { + const path = routeKey(event); + if (!path) { + continue; + } + + const existing = routes.get(path); + if (!existing) { + routes.set(path, { + path, + url: event.url ?? null, + firstOffsetMs: event.offsetMs, + lastOffsetMs: event.offsetMs, + eventCount: 1, + }); + continue; + } + + existing.eventCount += 1; + if (event.offsetMs !== null) { + existing.lastOffsetMs = event.offsetMs; + if ( + existing.firstOffsetMs === null || + event.offsetMs < existing.firstOffsetMs + ) { + existing.firstOffsetMs = event.offsetMs; + } + } + } + + return [...routes.values()].sort((a, b) => { + if (a.firstOffsetMs === null && b.firstOffsetMs === null) { + return 0; + } + if (a.firstOffsetMs === null) { + return 1; + } + if (b.firstOffsetMs === null) { + return -1; + } + return a.firstOffsetMs - b.firstOffsetMs; + }); +} + +function firstOffsetForSpan( + events: ReplayEvent[], + op: string, + description: string +): number | null { + const event = events.find( + (item) => + item.kind === "span" && + stringFromData(item, "op") === op && + item.message === description && + item.offsetMs !== null + ); + return event?.offsetMs ?? null; +} + +function timingSummary(events: ReplayEvent[]): ReplayTimingSummary { + const navigationSpan = events.find( + (event) => + event.kind === "span" && + stringFromData(event, "op") === "navigation.navigate" && + eventDuration(event) !== undefined + ); + + return { + firstPaintMs: firstOffsetForSpan(events, "paint", "first-paint"), + firstContentfulPaintMs: firstOffsetForSpan( + events, + "paint", + "first-contentful-paint" + ), + largestContentfulPaintMs: firstOffsetForSpan( + events, + "web-vital", + "largest-contentful-paint" + ), + navigationDurationMs: navigationSpan + ? (eventDuration(navigationSpan) ?? null) + : null, + }; +} + +function eventsAround( + events: ReplayEvent[], + offsetMs: number | null, + limit = 6 +): ReplayEvent[] { + if (offsetMs === null) { + return []; + } + + return events + .filter( + (event) => + event.offsetMs !== null && + Math.abs(event.offsetMs - offsetMs) <= LONG_WAIT_AFTER_CLICK_MS && + (NOTABLE_EVENT_KINDS.has(event.kind) || event.kind === "span") + ) + .slice(0, limit); +} + +function pushSignal( + signals: ReplayFrictionSignal[], + signal: ReplayFrictionSignal, + maxSignals: number +): void { + if (signals.length >= maxSignals) { + return; + } + signals.push(signal); +} + +function signalFromEvent(params: { + events: ReplayEvent[]; + event: ReplayEvent; + kind: ReplayFrictionSignal["kind"]; + severity: ReplayFrictionSignal["severity"]; + message: string; +}): ReplayFrictionSignal { + const { events, event, kind, message, severity } = params; + return { + kind, + severity, + offsetMs: event.offsetMs, + url: event.url ?? null, + urlPath: event.urlPath ?? null, + message, + evidence: eventsAround(events, event.offsetMs), + }; +} + +function indexedSignalContext(events: ReplayEvent[]) { + const offsetMs = events[0]?.offsetMs ?? null; + return { + offsetMs, + url: events[0]?.url ?? null, + urlPath: events[0]?.urlPath ?? null, + evidence: offsetMs === null ? [] : eventsAround(events, offsetMs), + }; +} + +function detectIndexedErrorSignal( + replay: ReplayDetails, + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + if ((replay.count_errors ?? 0) <= 0 && replay.error_ids.length === 0) { + return; + } + + const errorCount = + replay.count_errors && replay.count_errors > 0 + ? replay.count_errors + : replay.error_ids.length; + pushSignal( + signals, + { + kind: "indexed_error", + severity: "high", + ...indexedSignalContext(events), + message: `Replay is linked to ${errorCount} error event(s).`, + }, + maxSignals + ); +} + +function detectIndexedWarningSignal( + replay: ReplayDetails, + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + if ((replay.count_warnings ?? 0) <= 0 && replay.warning_ids.length === 0) { + return; + } + + const warningCount = + replay.count_warnings && replay.count_warnings > 0 + ? replay.count_warnings + : replay.warning_ids.length; + pushSignal( + signals, + { + kind: "indexed_warning", + severity: "medium", + ...indexedSignalContext(events), + message: `Replay is linked to ${warningCount} warning event(s).`, + }, + maxSignals + ); +} + +function detectIndexedSignals( + replay: ReplayDetails, + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + detectIndexedErrorSignal(replay, events, signals, maxSignals); + detectIndexedWarningSignal(replay, events, signals, maxSignals); +} + +function clickPoints(events: ReplayEvent[]): ClickPoint[] { + return events + .filter((event) => event.kind === "click" || event.kind === "tap") + .map((event) => { + const x = numberFromData(event, "x"); + const y = numberFromData(event, "y"); + return x === undefined || y === undefined ? undefined : { event, x, y }; + }) + .filter((point): point is ClickPoint => point !== undefined); +} + +function detectExplicitClickSignals( + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + for (const event of events) { + if (event.kind !== "click" && event.kind !== "tap") { + continue; + } + + if (event.data?.isRageClick === true) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "rage_click", + severity: "high", + message: "Replay includes a rage click signal.", + }), + maxSignals + ); + } + if (event.data?.isDeadClick === true) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "dead_click", + severity: "medium", + message: "Replay includes a dead click signal.", + }), + maxSignals + ); + } + } +} + +function detectRepeatedClickSignal( + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + const points = clickPoints(events); + for (let i = 1; i < points.length; i += 1) { + const previous = points[i - 1]; + const current = points[i]; + if (!(previous && current)) { + continue; + } + if (previous.event.offsetMs === null || current.event.offsetMs === null) { + continue; + } + + const deltaMs = current.event.offsetMs - previous.event.offsetMs; + const distance = Math.hypot(current.x - previous.x, current.y - previous.y); + if ( + deltaMs <= REPEATED_CLICK_WINDOW_MS && + distance <= REPEATED_CLICK_DISTANCE_PX + ) { + pushSignal( + signals, + signalFromEvent({ + events, + event: current.event, + kind: "repeated_click", + severity: "medium", + message: + "User clicked the same area repeatedly within a few seconds.", + }), + maxSignals + ); + break; + } + } +} + +function detectLongWaitAfterClickSignal( + events: ReplayEvent[], + replay: ReplayDetails, + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + const points = clickPoints(events); + const durationMs = replayDurationMs(replay); + for (const point of points) { + const offsetMs = point.event.offsetMs; + if (offsetMs === null || durationMs === null) { + continue; + } + + const next = events.find( + (event) => + event.offsetMs !== null && + event.offsetMs > offsetMs && + NOTABLE_EVENT_KINDS.has(event.kind) + ); + const nextOffset = next?.offsetMs ?? durationMs; + if (nextOffset - offsetMs >= LONG_WAIT_AFTER_CLICK_MS) { + pushSignal( + signals, + signalFromEvent({ + events, + event: point.event, + kind: "long_wait_after_click", + severity: "low", + message: + "User clicked and then had a long wait or no further notable activity.", + }), + maxSignals + ); + break; + } + } +} + +function detectClickSignals( + events: ReplayEvent[], + replay: ReplayDetails, + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + detectExplicitClickSignals(events, signals, maxSignals); + detectRepeatedClickSignal(events, signals, maxSignals); + detectLongWaitAfterClickSignal(events, replay, signals, maxSignals); +} + +function detectNetworkAndConsoleSignals( + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + for (const event of events) { + const statusCode = numberFromData(event, "statusCode"); + if ( + event.kind === "network" && + statusCode !== undefined && + statusCode >= 400 + ) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "network_error", + severity: statusCode >= 500 ? "high" : "medium", + message: `Network breadcrumb reported HTTP ${statusCode}.`, + }), + maxSignals + ); + } + if ( + event.kind === "console" && + stringFromData(event, "level")?.toLowerCase() === "error" + ) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "console_error", + severity: "medium", + message: "Console emitted an error during the replay.", + }), + maxSignals + ); + } + if (event.kind === "error") { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "error_event", + severity: "high", + message: event.message ?? "Replay contains an error event.", + }), + maxSignals + ); + } + } +} + +function detectPerformanceSignals( + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + for (const event of events) { + if (event.kind !== "span") { + continue; + } + + const durationMs = eventDuration(event); + if (durationMs === undefined) { + continue; + } + + const op = stringFromData(event, "op") ?? event.label ?? ""; + if (op === "navigation.navigate" && durationMs >= SLOW_NAVIGATION_MS) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "slow_navigation", + severity: "medium", + message: `Navigation took ${Math.round(durationMs)}ms.`, + }), + maxSignals + ); + } else if (op.startsWith("resource.") && durationMs >= SLOW_RESOURCE_MS) { + pushSignal( + signals, + signalFromEvent({ + events, + event, + kind: "slow_resource", + severity: "low", + message: `Resource load took ${Math.round(durationMs)}ms.`, + }), + maxSignals + ); + } + } +} + +function detectSessionShapeSignals( + replay: ReplayDetails, + events: ReplayEvent[], + signals: ReplayFrictionSignal[], + maxSignals: number +): void { + const counts = countEvents(events); + if ( + typeof replay.duration === "number" && + replay.duration <= QUICK_BOUNCE_SECONDS && + counts.clicks === 0 && + counts.inputs === 0 + ) { + pushSignal( + signals, + { + kind: "quick_bounce", + severity: "low", + offsetMs: events[0]?.offsetMs ?? null, + url: events[0]?.url ?? null, + urlPath: events[0]?.urlPath ?? null, + message: "Replay ended quickly without clicks or inputs.", + evidence: events.slice(0, 5), + }, + maxSignals + ); + } + + const navigations = events.filter( + (event) => event.kind === "navigation" && event.offsetMs !== null + ); + for (const start of navigations) { + if (start.offsetMs === null) { + continue; + } + const startOffsetMs = start.offsetMs; + const nearby = navigations.filter( + (event) => + event.offsetMs !== null && + event.offsetMs >= startOffsetMs && + event.offsetMs - startOffsetMs <= ROUTE_CHURN_WINDOW_MS + ); + if (nearby.length >= ROUTE_CHURN_COUNT) { + pushSignal( + signals, + { + kind: "route_churn", + severity: "low", + offsetMs: start.offsetMs, + url: start.url ?? null, + urlPath: start.urlPath ?? null, + message: `${nearby.length} route changes occurred within ${ROUTE_CHURN_WINDOW_MS / 1000}s.`, + evidence: nearby.slice(0, 6), + }, + maxSignals + ); + break; + } + } +} + +function detectFrictionSignals( + replay: ReplayDetails, + events: ReplayEvent[], + maxSignals: number +): ReplayFrictionSignal[] { + const signals: ReplayFrictionSignal[] = []; + detectIndexedSignals(replay, events, signals, maxSignals); + detectClickSignals(events, replay, signals, maxSignals); + detectNetworkAndConsoleSignals(events, signals, maxSignals); + detectPerformanceSignals(events, signals, maxSignals); + detectSessionShapeSignals(replay, events, signals, maxSignals); + return signals.slice(0, maxSignals); +} + +function notableEvents( + events: ReplayEvent[], + maxNotableEvents: number +): ReplayEvent[] { + return events + .filter((event) => NOTABLE_EVENT_KINDS.has(event.kind)) + .slice(0, maxNotableEvents); +} + +function focusEvents(events: ReplayEvent[], focusPath?: string): ReplayEvent[] { + if (!focusPath) { + return events; + } + return events.filter((event) => replayUrlPathMatches(event.url, focusPath)); +} + +export function summarizeReplay( + replay: ReplayDetails, + events: ReplayEvent[], + options: SummaryOptions +): ReplaySummaryOutput { + const focusedEvents = focusEvents(events, options.focusPath); + const maxSignals = options.maxSignals ?? DEFAULT_MAX_SIGNALS; + const maxNotableEvents = + options.maxNotableEvents ?? DEFAULT_MAX_NOTABLE_EVENTS; + + return { + replayId: replay.id, + org: options.org, + project: options.project ?? null, + startedAt: replay.started_at ?? null, + durationSeconds: replay.duration ?? null, + entryUrl: replay.urls[0] ?? null, + exitUrl: replay.urls.at(-1) ?? null, + focusPath: options.focusPath ?? null, + counts: countEvents(focusedEvents), + timings: timingSummary(focusedEvents), + routes: buildRouteSummaries(focusedEvents), + signals: detectFrictionSignals(replay, focusedEvents, maxSignals), + notableEvents: notableEvents(focusedEvents, maxNotableEvents), + }; +} diff --git a/src/types/index.ts b/src/types/index.ts index ed8af9394..264b2ad38 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -52,6 +52,11 @@ export type { ReplayDetails, ReplayDetailsResponse, ReplayDevice, + ReplayEvent, + ReplayEventCounts, + ReplayEventKind, + ReplayFrictionSignal, + ReplayFrictionSignalKind, ReplayGeo, ReplayIdsByResource, ReplayListItem, @@ -61,10 +66,15 @@ export type { ReplayRecordingSegments, ReplayRelatedIssue, ReplayRelatedTrace, + ReplayRouteSummary, ReplaySdk, + ReplaySummaryOutput, + ReplayTimingSummary, ReplayUser, } from "./replay.js"; export { + REPLAY_EVENT_KINDS, + REPLAY_FRICTION_SIGNAL_KINDS, REPLAY_LIST_FIELDS, ReplayActivityEventSchema, ReplayBrowserSchema, @@ -72,6 +82,9 @@ export { ReplayDetailsResponseSchema, ReplayDetailsSchema, ReplayDeviceSchema, + ReplayEventCountsSchema, + ReplayEventSchema, + ReplayFrictionSignalSchema, ReplayGeoSchema, ReplayIdsByResourceSchema, ReplayListItemOutputSchema, @@ -82,7 +95,10 @@ export { ReplayRecordingSegmentsSchema, ReplayRelatedIssueSchema, ReplayRelatedTraceSchema, + ReplayRouteSummarySchema, ReplaySdkSchema, + ReplaySummaryOutputSchema, + ReplayTimingSummarySchema, ReplayUserSchema, ReplayViewOutputSchema, } from "./replay.js"; diff --git a/src/types/replay.ts b/src/types/replay.ts index 99e617405..894858acc 100644 --- a/src/types/replay.ts +++ b/src/types/replay.ts @@ -323,6 +323,201 @@ export const ReplayActivityEventSchema = z }) .describe("Summarized replay activity event"); +export const REPLAY_EVENT_KINDS = [ + "navigation", + "click", + "tap", + "input", + "focus", + "blur", + "scroll", + "viewport", + "mutation", + "dom-snapshot", + "breadcrumb", + "network", + "console", + "error", + "span", + "web-vital", + "memory", + "video", + "mobile", + "unknown", +] as const; + +/** Normalized replay event extracted from rrweb or Sentry custom frames. */ +export const ReplayEventSchema = z + .object({ + replayId: z.string().describe("Replay ID"), + segmentIndex: z.number().describe("Zero-based recording segment index"), + frameIndex: z.number().describe("Zero-based frame index within segment"), + offsetMs: z + .number() + .nullable() + .describe("Milliseconds from replay start to the event"), + timestamp: z + .string() + .nullable() + .describe("Event timestamp as ISO 8601 when available"), + kind: z.enum(REPLAY_EVENT_KINDS).describe("Normalized event kind"), + category: z.string().describe("Broad event category"), + label: z.string().nullable().optional().describe("Short event label"), + message: z.string().nullable().optional().describe("Message or summary"), + url: z.string().nullable().optional().describe("Current or target URL"), + urlPath: z + .string() + .nullable() + .optional() + .describe("Parsed URL pathname when available"), + urlQuery: z + .string() + .nullable() + .optional() + .describe("Parsed URL query string when available"), + selector: z + .string() + .nullable() + .optional() + .describe("CSS selector or target selector when available"), + nodeId: z + .union([z.string(), z.number()]) + .nullable() + .optional() + .describe("rrweb node ID when available"), + rawType: z.string().nullable().optional().describe("Source frame type"), + rawSource: z + .string() + .nullable() + .optional() + .describe("Source frame subtype"), + data: z + .record(z.unknown()) + .optional() + .describe("Kind-specific normalized fields"), + raw: z + .unknown() + .optional() + .describe("Raw source frame, only present when requested"), + }) + .describe("Normalized replay event"); + +export const REPLAY_FRICTION_SIGNAL_KINDS = [ + "indexed_error", + "indexed_warning", + "rage_click", + "dead_click", + "repeated_click", + "long_wait_after_click", + "quick_bounce", + "slow_navigation", + "slow_resource", + "network_error", + "console_error", + "error_event", + "route_churn", +] as const; + +export const ReplayRouteSummarySchema = z + .object({ + path: z.string().describe("Route pathname"), + url: z.string().nullable().describe("Representative URL for the route"), + firstOffsetMs: z + .number() + .nullable() + .describe("First observed offset for this route"), + lastOffsetMs: z + .number() + .nullable() + .describe("Last observed offset for this route"), + eventCount: z.number().describe("Number of normalized events on the route"), + }) + .describe("Replay route summary"); + +export const ReplayEventCountsSchema = z + .object({ + total: z.number().describe("Total normalized event count"), + navigations: z.number().describe("Navigation event count"), + clicks: z.number().describe("Click/tap event count"), + inputs: z.number().describe("Input/focus/blur event count"), + network: z.number().describe("Network event count"), + console: z.number().describe("Console event count"), + errors: z.number().describe("Error event count"), + spans: z.number().describe("Performance span event count"), + }) + .describe("Replay event counts"); + +export const ReplayTimingSummarySchema = z + .object({ + firstPaintMs: z.number().nullable().describe("First paint offset"), + firstContentfulPaintMs: z + .number() + .nullable() + .describe("First contentful paint offset"), + largestContentfulPaintMs: z + .number() + .nullable() + .describe("Largest contentful paint offset"), + navigationDurationMs: z + .number() + .nullable() + .describe("Navigation span duration when available"), + }) + .describe("Replay timing summary"); + +export const ReplayFrictionSignalSchema = z + .object({ + kind: z + .enum(REPLAY_FRICTION_SIGNAL_KINDS) + .describe("Detected friction signal kind"), + severity: z.enum(["low", "medium", "high"]).describe("Heuristic severity"), + offsetMs: z + .number() + .nullable() + .describe("Primary signal offset when available"), + url: z.string().nullable().optional().describe("URL at the signal"), + urlPath: z + .string() + .nullable() + .optional() + .describe("Route path at the signal"), + message: z.string().describe("Human-readable signal summary"), + evidence: z + .array(ReplayEventSchema) + .describe("Nearby normalized events supporting the signal"), + }) + .describe("Replay friction signal"); + +export const ReplaySummaryOutputSchema = z + .object({ + replayId: z.string().describe("Replay ID"), + org: z.string().describe("Organization slug"), + project: z.string().nullable().optional().describe("Project slug"), + startedAt: z.string().nullable().optional().describe("Replay start time"), + durationSeconds: z + .number() + .nullable() + .optional() + .describe("Replay duration in seconds"), + entryUrl: z.string().nullable().describe("First replay URL"), + exitUrl: z.string().nullable().describe("Last replay URL"), + focusPath: z + .string() + .nullable() + .optional() + .describe("Optional route path used to focus the summary"), + counts: ReplayEventCountsSchema.describe("Normalized event counts"), + timings: ReplayTimingSummarySchema.describe("Key timing observations"), + routes: z.array(ReplayRouteSummarySchema).describe("Route timeline"), + signals: z + .array(ReplayFrictionSignalSchema) + .describe("Detected non-error and error friction signals"), + notableEvents: z + .array(ReplayEventSchema) + .describe("Representative events useful for agent narrative"), + }) + .describe("Replay behavior summary"); + /** Related issue metadata extracted from replay-linked event IDs. */ export const ReplayRelatedIssueSchema = z .object({ @@ -387,5 +582,14 @@ export type ReplayListResponse = z.infer; export type ReplayDetailsResponse = z.infer; export type ReplayIdsByResource = z.infer; export type ReplayActivityEvent = z.infer; +export type ReplayEventKind = (typeof REPLAY_EVENT_KINDS)[number]; +export type ReplayEvent = z.infer; +export type ReplayFrictionSignalKind = + (typeof REPLAY_FRICTION_SIGNAL_KINDS)[number]; +export type ReplayRouteSummary = z.infer; +export type ReplayEventCounts = z.infer; +export type ReplayTimingSummary = z.infer; +export type ReplayFrictionSignal = z.infer; +export type ReplaySummaryOutput = z.infer; export type ReplayRelatedIssue = z.infer; export type ReplayRelatedTrace = z.infer; diff --git a/test/commands/replay/event-list.test.ts b/test/commands/replay/event-list.test.ts new file mode 100644 index 000000000..0c1264df1 --- /dev/null +++ b/test/commands/replay/event-list.test.ts @@ -0,0 +1,176 @@ +/** + * Replay Event List Command Tests + */ + +import { + afterEach, + beforeEach, + describe, + expect, + mock, + spyOn, + test, +} from "bun:test"; +import { listCommand } from "../../../src/commands/replay/event/list.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as apiClient from "../../../src/lib/api-client.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as resolveTarget from "../../../src/lib/resolve-target.js"; +import type { + ReplayDetails, + ReplayRecordingSegments, +} from "../../../src/types/index.js"; + +const REPLAY_ID = "346789a703f6454384f1de473b8b9fcc"; + +function sampleReplay(overrides: Partial = {}): ReplayDetails { + return { + id: REPLAY_ID, + count_errors: 0, + count_segments: 1, + duration: 60, + error_ids: [], + info_ids: [], + project_id: "42", + started_at: "2025-01-01T00:00:00.000Z", + tags: {}, + trace_ids: [], + urls: ["/signup"], + user: null, + warning_ids: [], + ...overrides, + }; +} + +function sampleSegments(): ReplayRecordingSegments { + return [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "/signup" }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { + tag: "deadClick", + payload: { selector: "button[type=submit]", label: "Sign up" }, + }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:03.000Z"), + data: { + tag: "breadcrumb", + payload: { + category: "fetch", + message: "POST /api/signup", + data: { status_code: 500, url: "/api/signup" }, + }, + }, + }, + ], + ]; +} + +function createMockContext() { + const stdoutWrite = mock(() => true); + return { + context: { + stdout: { write: stdoutWrite }, + stderr: { write: mock(() => true) }, + cwd: "/tmp", + }, + stdoutWrite, + }; +} + +describe("replay event list", () => { + let getReplaySpy: ReturnType; + let getReplayRecordingSegmentsSpy: ReturnType; + let resolveTargetSpy: ReturnType; + + beforeEach(() => { + getReplaySpy = spyOn(apiClient, "getReplay").mockResolvedValue( + sampleReplay() + ); + getReplayRecordingSegmentsSpy = spyOn( + apiClient, + "getReplayRecordingSegments" + ).mockResolvedValue(sampleSegments()); + resolveTargetSpy = spyOn( + resolveTarget, + "resolveOrgOptionalProjectFromArg" + ).mockResolvedValue({ + org: "test-org", + project: "cli", + projectData: { id: "42", slug: "cli", name: "CLI" }, + }); + }); + + afterEach(() => { + getReplaySpy.mockRestore(); + getReplayRecordingSegmentsSpy.mockRestore(); + resolveTargetSpy.mockRestore(); + }); + + test("renders filtered JSON event envelope", async () => { + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + fresh: false, + json: true, + jsonl: false, + kind: ["click,network"], + limit: 10, + raw: false, + url: "/signup", + }, + `test-org/cli/${REPLAY_ID}` + ); + + expect(getReplayRecordingSegmentsSpy).toHaveBeenCalledWith( + "test-org", + "42", + REPLAY_ID, + { expectedSegments: 1 } + ); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(2); + expect(parsed.data[0].kind).toBe("click"); + expect(parsed.data[0].selector).toBe("button[type=submit]"); + expect(parsed.data[1].kind).toBe("network"); + expect(parsed.total).toBe(2); + expect(parsed.truncated).toBe(false); + }); + + test("emits JSONL when requested", async () => { + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + fresh: false, + json: true, + jsonl: true, + limit: 2, + raw: false, + }, + `test-org/${REPLAY_ID}` + ); + + const lines = stdoutWrite.mock.calls + .map((call) => call[0]) + .join("") + .trim() + .split("\n"); + expect(lines).toHaveLength(2); + expect(JSON.parse(lines[0]!).kind).toBe("navigation"); + expect(JSON.parse(lines[1]!).kind).toBe("click"); + }); +}); diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index 887d98598..eda700e5c 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -166,6 +166,106 @@ describe("listCommand.func", () => { }); }); + test("combines URL sugar with query and filters problem-only rows", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValue({ + data: [ + sampleReplays[0]!, + { + ...sampleReplays[0]!, + id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + count_errors: 0, + count_dead_clicks: 0, + count_rage_clicks: 0, + count_warnings: 0, + error_ids: [], + warning_ids: [], + }, + ], + nextCursor: undefined, + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + "problem-only": true, + limit: 25, + json: true, + period: parsePeriod("7d"), + query: "environment:production", + sort: "-started_at", + url: "/signup", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: 25, + projectSlugs: ["cli"], + query: "environment:production url:*/signup*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].id).toBe(sampleReplays[0]?.id); + }); + + test("filters --path by actual replay URL pathname", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValue({ + data: [ + { + ...sampleReplays[0]!, + urls: ["https://example.com/signup/direct"], + }, + { + ...sampleReplays[0]!, + id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + urls: ["https://example.com/replays/?query=/signup"], + }, + ], + nextCursor: undefined, + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + limit: 25, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: 25, + projectSlugs: ["cli"], + query: "url:*/signup*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].urls[0]).toBe("https://example.com/signup/direct"); + }); + test("renders human output with a replay hint", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); diff --git a/test/commands/replay/summarize.test.ts b/test/commands/replay/summarize.test.ts new file mode 100644 index 000000000..3b0d00dd5 --- /dev/null +++ b/test/commands/replay/summarize.test.ts @@ -0,0 +1,129 @@ +/** + * Replay Summarize Command Tests + */ + +import { + afterEach, + beforeEach, + describe, + expect, + mock, + spyOn, + test, +} from "bun:test"; +import { summarizeCommand } from "../../../src/commands/replay/summarize.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as apiClient from "../../../src/lib/api-client.js"; +// biome-ignore lint/performance/noNamespaceImport: needed for spyOn mocking +import * as resolveTarget from "../../../src/lib/resolve-target.js"; +import type { + ReplayDetails, + ReplayRecordingSegments, +} from "../../../src/types/index.js"; + +const REPLAY_ID = "346789a703f6454384f1de473b8b9fcc"; + +function sampleReplay(overrides: Partial = {}): ReplayDetails { + return { + id: REPLAY_ID, + count_errors: 0, + count_segments: 1, + duration: 12, + error_ids: [], + info_ids: [], + project_id: "42", + started_at: "2025-01-01T00:00:00.000Z", + tags: {}, + trace_ids: [], + urls: ["https://example.com/signup"], + user: null, + warning_ids: [], + ...overrides, + }; +} + +function sampleSegments(): ReplayRecordingSegments { + return [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "https://example.com/signup" }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { + tag: "deadClick", + payload: { selector: "button[type=submit]", label: "Sign up" }, + }, + }, + ], + ]; +} + +function createMockContext() { + const stdoutWrite = mock(() => true); + return { + context: { + stdout: { write: stdoutWrite }, + stderr: { write: mock(() => true) }, + cwd: "/tmp", + }, + stdoutWrite, + }; +} + +describe("replay summarize", () => { + let getReplaySpy: ReturnType; + let getReplayRecordingSegmentsSpy: ReturnType; + let resolveTargetSpy: ReturnType; + + beforeEach(() => { + getReplaySpy = spyOn(apiClient, "getReplay").mockResolvedValue( + sampleReplay() + ); + getReplayRecordingSegmentsSpy = spyOn( + apiClient, + "getReplayRecordingSegments" + ).mockResolvedValue(sampleSegments()); + resolveTargetSpy = spyOn( + resolveTarget, + "resolveOrgOptionalProjectFromArg" + ).mockResolvedValue({ + org: "test-org", + project: "web", + projectData: { id: "42", slug: "web", name: "Web" }, + }); + }); + + afterEach(() => { + getReplaySpy.mockRestore(); + getReplayRecordingSegmentsSpy.mockRestore(); + resolveTargetSpy.mockRestore(); + }); + + test("renders a JSON replay behavior summary", async () => { + const { context, stdoutWrite } = createMockContext(); + const func = await summarizeCommand.loader(); + await func.call( + context, + { + fresh: false, + json: true, + "limit-events": 5, + "limit-signals": 5, + path: "/signup", + }, + `test-org/web/${REPLAY_ID}` + ); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.replayId).toBe(REPLAY_ID); + expect(parsed.focusPath).toBe("/signup"); + expect(parsed.counts.clicks).toBe(1); + expect(parsed.routes[0].path).toBe("/signup"); + expect(parsed.signals[0].kind).toBe("dead_click"); + }); +}); diff --git a/test/lib/replay-events.test.ts b/test/lib/replay-events.test.ts new file mode 100644 index 000000000..6b97c10bf --- /dev/null +++ b/test/lib/replay-events.test.ts @@ -0,0 +1,149 @@ +import { describe, expect, test } from "bun:test"; +import { + extractNormalizedReplayEvents, + filterNormalizedReplayEvents, + parseReplayOffset, +} from "../../src/lib/replay-events.js"; +import type { + ReplayDetails, + ReplayRecordingSegments, +} from "../../src/types/index.js"; + +const REPLAY_ID = "346789a703f6454384f1de473b8b9fcc"; + +function replay(): ReplayDetails { + return { + id: REPLAY_ID, + count_errors: 0, + count_segments: 1, + duration: 60, + error_ids: [], + info_ids: [], + project_id: "42", + started_at: "2025-01-01T00:00:00.000Z", + tags: {}, + trace_ids: [], + urls: ["/signup"], + user: null, + warning_ids: [], + }; +} + +describe("extractNormalizedReplayEvents", () => { + test("normalizes navigation, clicks, breadcrumbs, and input events", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "/signup" }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { + tag: "rageClick", + payload: { selector: "button[type=submit]", label: "Sign up" }, + }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:03.000Z"), + data: { + tag: "breadcrumb", + payload: { + category: "fetch", + message: "POST /api/signup", + data: { status_code: 500, url: "/api/signup" }, + }, + }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:04.000Z"), + data: { source: 5, id: 12, text: "********" }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + + expect(events.map((event) => event.kind)).toEqual([ + "navigation", + "click", + "network", + "input", + ]); + expect(events[0]?.offsetMs).toBe(1000); + expect(events[0]?.urlPath).toBe("/signup"); + expect(events[1]?.selector).toBe("button[type=submit]"); + expect(events[1]?.data?.isRageClick).toBe(true); + expect(events[2]?.url).toBe("/api/signup"); + expect(events[3]?.data?.masked).toBe(true); + expect(events[3]?.data?.textLength).toBe(8); + }); + + test("filters by kind, url, and offset window", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "/signup" }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:20.000Z"), + data: { + tag: "breadcrumb", + payload: { category: "console", level: "error", message: "boom" }, + }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + const filtered = filterNormalizedReplayEvents(events, { + kinds: ["error"], + url: "/signup", + fromMs: 10_000, + toMs: 30_000, + }); + + expect(filtered).toHaveLength(1); + expect(filtered[0]?.message).toBe("boom"); + }); + + test("filters by parsed path without matching query text", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "https://example.com/replays/?query=/signup" }, + }, + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { href: "https://example.com/signup/direct" }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + const filtered = filterNormalizedReplayEvents(events, { path: "/signup" }); + + expect(filtered).toHaveLength(1); + expect(filtered[0]?.urlPath).toBe("/signup/direct"); + }); +}); + +describe("parseReplayOffset", () => { + test("parses common replay offset formats", () => { + expect(parseReplayOffset("90")).toBe(90_000); + expect(parseReplayOffset("90s")).toBe(90_000); + expect(parseReplayOffset("01:30")).toBe(90_000); + expect(parseReplayOffset("1:01:30")).toBe(3_690_000); + expect(parseReplayOffset("83000ms")).toBe(83_000); + }); +}); diff --git a/test/lib/replay-summary.test.ts b/test/lib/replay-summary.test.ts new file mode 100644 index 000000000..4c3e79021 --- /dev/null +++ b/test/lib/replay-summary.test.ts @@ -0,0 +1,92 @@ +import { describe, expect, test } from "bun:test"; +import { extractNormalizedReplayEvents } from "../../src/lib/replay-events.js"; +import { summarizeReplay } from "../../src/lib/replay-summary.js"; +import type { + ReplayDetails, + ReplayRecordingSegments, +} from "../../src/types/index.js"; + +const REPLAY_ID = "346789a703f6454384f1de473b8b9fcc"; + +function replay(): ReplayDetails { + return { + id: REPLAY_ID, + count_errors: 0, + count_segments: 1, + duration: 20, + error_ids: [], + info_ids: [], + project_id: "42", + started_at: "2025-01-01T00:00:00.000Z", + tags: {}, + trace_ids: [], + urls: ["https://example.com/signup", "https://example.com/signup/step-2"], + user: null, + warning_ids: [], + }; +} + +describe("summarizeReplay", () => { + test("summarizes routes, timings, and friction signals", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "https://example.com/signup" }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { + tag: "performanceSpan", + payload: { + op: "navigation.navigate", + description: "https://example.com/signup", + data: { duration: 3500 }, + }, + }, + }, + { + type: 5, + timestamp: Date.parse("2025-01-01T00:00:03.000Z"), + data: { + tag: "breadcrumb", + payload: { + category: "fetch", + message: "POST /api/signup", + data: { status_code: 500, url: "/api/signup" }, + }, + }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:04.000Z"), + data: { source: 2, type: 2, x: 100, y: 100 }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:05.000Z"), + data: { source: 2, type: 2, x: 105, y: 103 }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + const summary = summarizeReplay(replay(), events, { + org: "test-org", + project: "web", + }); + + expect(summary.routes.map((route) => route.path)).toContain("/signup"); + expect(summary.counts.clicks).toBe(2); + expect(summary.timings.navigationDurationMs).toBe(3500); + expect(summary.signals.map((signal) => signal.kind)).toEqual( + expect.arrayContaining([ + "slow_navigation", + "network_error", + "repeated_click", + ]) + ); + }); +}); From 075bc415bf1149bbf86c778785bf56e7a293b539 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 11:44:38 -0700 Subject: [PATCH 03/13] fix(replay): Clarify friction filtering Render missing replay summary durations without a seconds suffix and make --problem-only distinct from --friction by limiting it to indexed errors and warnings. Co-Authored-By: OpenAI Codex --- .../skills/sentry-cli/references/replay.md | 2 +- src/commands/replay/list.ts | 22 ++++++---- src/commands/replay/summarize.ts | 6 ++- test/commands/replay/list.test.ts | 43 ++++++++++++++++++- test/commands/replay/summarize.test.ts | 21 +++++++++ 5 files changed, 82 insertions(+), 12 deletions(-) diff --git a/plugins/sentry-cli/skills/sentry-cli/references/replay.md b/plugins/sentry-cli/skills/sentry-cli/references/replay.md index 9a618e4e0..5f84a0246 100644 --- a/plugins/sentry-cli/skills/sentry-cli/references/replay.md +++ b/plugins/sentry-cli/skills/sentry-cli/references/replay.md @@ -66,7 +66,7 @@ List recent Session Replays - `--entry-path - Filter by first visited URL pathname` - `--exit-path - Filter by last visited URL pathname` - `--friction - Only show replays with indexed friction signals (errors, warnings, rage clicks, or dead clicks)` -- `--problem-only - Only show replays with errors, warnings, rage clicks, or dead clicks` +- `--problem-only - Only show replays with indexed errors or warnings` - `-e, --environment ... - Filter by environment (repeatable, comma-separated)` - `-s, --sort - Sort by: date, oldest, duration, errors, warnings, rage, dead, activity, or a raw replay sort field - (default: "date")` - `-t, --period - Time range: "7d", "2026-04-01..2026-05-01", ">=2026-04-01" - (default: "7d")` diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 1286d9aec..3f5b87168 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -192,17 +192,23 @@ function buildReplaySearchQuery(filters: { return parts.length > 0 ? parts.join(" ") : undefined; } -function hasProblemSignals(replay: ReplayListItem): boolean { +function hasErrorOrWarningSignals(replay: ReplayListItem): boolean { return ( (replay.count_errors ?? 0) > 0 || (replay.count_warnings ?? 0) > 0 || - (replay.count_rage_clicks ?? 0) > 0 || - (replay.count_dead_clicks ?? 0) > 0 || replay.error_ids.length > 0 || replay.warning_ids.length > 0 ); } +function hasFrictionSignals(replay: ReplayListItem): boolean { + return ( + hasErrorOrWarningSignals(replay) || + (replay.count_rage_clicks ?? 0) > 0 || + (replay.count_dead_clicks ?? 0) > 0 + ); +} + function replayMatchesRouteFilters( replay: ReplayListItem, flags: ListFlags @@ -445,8 +451,7 @@ export const listCommand = buildListCommand("replay", { }, "problem-only": { kind: "boolean", - brief: - "Only show replays with errors, warnings, rage clicks, or dead clicks", + brief: "Only show replays with indexed errors or warnings", default: false, }, environment: { @@ -534,9 +539,10 @@ export const listCommand = buildListCommand("replay", { if (!replayMatchesRouteFilters(replay, flags)) { return false; } - return flags["problem-only"] || flags.friction - ? hasProblemSignals(replay) - : true; + if (flags["problem-only"]) { + return hasErrorOrWarningSignals(replay); + } + return flags.friction ? hasFrictionSignals(replay) : true; }); advancePaginationState(PAGINATION_KEY, contextKey, direction, nextCursor); diff --git a/src/commands/replay/summarize.ts b/src/commands/replay/summarize.ts index 5ed8a894b..51a762a5e 100644 --- a/src/commands/replay/summarize.ts +++ b/src/commands/replay/summarize.ts @@ -64,6 +64,10 @@ function formatOffset(offsetMs: number | null | undefined): string { : formatDurationCompactMs(offsetMs); } +function formatDurationSeconds(seconds: number | null | undefined): string { + return seconds === null || seconds === undefined ? "-" : `${seconds}s`; +} + const SIGNAL_COLUMNS: Column[] = [ { header: "OFFSET", @@ -129,7 +133,7 @@ function formatSummaryHuman(summary: ReplaySummaryOutput): string { "", `Entry: ${summary.entryUrl ?? "-"}`, `Exit: ${summary.exitUrl ?? "-"}`, - `Duration: ${summary.durationSeconds ?? "-"}s`, + `Duration: ${formatDurationSeconds(summary.durationSeconds)}`, `Events: ${summary.counts.total} total, ${summary.counts.clicks} clicks, ${summary.counts.inputs} inputs, ${summary.counts.network} network, ${summary.counts.errors} errors`, ]; diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index eda700e5c..8310c6de5 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -166,7 +166,7 @@ describe("listCommand.func", () => { }); }); - test("combines URL sugar with query and filters problem-only rows", async () => { + test("combines URL sugar with query and filters friction rows", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: [ @@ -190,7 +190,7 @@ describe("listCommand.func", () => { await func.call( context, { - "problem-only": true, + friction: true, limit: 25, json: true, period: parsePeriod("7d"), @@ -218,6 +218,45 @@ describe("listCommand.func", () => { expect(parsed.data[0].id).toBe(sampleReplays[0]?.id); }); + test("problem-only filters to errors and warnings, not click friction", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValue({ + data: [ + sampleReplays[0]!, + { + ...sampleReplays[0]!, + id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + count_errors: 0, + count_dead_clicks: 3, + count_rage_clicks: 0, + count_warnings: 0, + error_ids: [], + warning_ids: [], + }, + ], + nextCursor: undefined, + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + "problem-only": true, + limit: 25, + json: true, + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].id).toBe(sampleReplays[0]?.id); + }); + test("filters --path by actual replay URL pathname", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ diff --git a/test/commands/replay/summarize.test.ts b/test/commands/replay/summarize.test.ts index 3b0d00dd5..56a11c07e 100644 --- a/test/commands/replay/summarize.test.ts +++ b/test/commands/replay/summarize.test.ts @@ -126,4 +126,25 @@ describe("replay summarize", () => { expect(parsed.routes[0].path).toBe("/signup"); expect(parsed.signals[0].kind).toBe("dead_click"); }); + + test("renders missing human duration without seconds suffix", async () => { + getReplaySpy.mockResolvedValue(sampleReplay({ duration: null })); + + const { context, stdoutWrite } = createMockContext(); + const func = await summarizeCommand.loader(); + await func.call( + context, + { + fresh: false, + json: false, + "limit-events": 5, + "limit-signals": 5, + }, + `test-org/web/${REPLAY_ID}` + ); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + expect(output).toContain("Duration: -"); + expect(output).not.toContain("Duration: -s"); + }); }); From 239e2ea89593eff209d928d5b02faf2600561d6e Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 11:58:24 -0700 Subject: [PATCH 04/13] fix(replay): Normalize log levels case-insensitively Classify replay Log frames with mixed-case error levels as errors so --kind error and summary signal detection agree. Co-Authored-By: OpenAI Codex --- src/lib/replay-events.ts | 3 ++- test/lib/replay-events.test.ts | 28 ++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/src/lib/replay-events.ts b/src/lib/replay-events.ts index c65ea12c6..7a3df1520 100644 --- a/src/lib/replay-events.ts +++ b/src/lib/replay-events.ts @@ -306,12 +306,13 @@ function normalizeIncrementalFrame( }); case "Log": { const level = firstString(data.level); + const normalizedLevel = level?.toLowerCase(); const message = Array.isArray(data.payload) ? data.payload.map(String).join(" ") : firstString(data.payload, data.message); return buildBaseEvent({ ...location, - kind: level === "error" ? "error" : "console", + kind: normalizedLevel === "error" ? "error" : "console", category: "console", label: level ?? "console", message, diff --git a/test/lib/replay-events.test.ts b/test/lib/replay-events.test.ts index 6b97c10bf..84aa73772 100644 --- a/test/lib/replay-events.test.ts +++ b/test/lib/replay-events.test.ts @@ -114,6 +114,34 @@ describe("extractNormalizedReplayEvents", () => { expect(filtered[0]?.message).toBe("boom"); }); + test("normalizes mixed-case log levels as errors", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "/signup" }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { + source: 11, + level: "Error", + payload: ["boom"], + }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + const errors = filterNormalizedReplayEvents(events, { kinds: ["error"] }); + + expect(errors).toHaveLength(1); + expect(errors[0]?.kind).toBe("error"); + expect(errors[0]?.message).toBe("boom"); + }); + test("filters by parsed path without matching query text", () => { const segments: ReplayRecordingSegments = [ [ From fe3b35a740555d466a138fe3825b6796f3a0c16b Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:10:15 -0700 Subject: [PATCH 05/13] fix(replay): Tighten route path filtering Match root route filters against child paths and avoid adding multiple generic URL search clauses for positional replay route filters. Co-Authored-By: OpenAI Codex --- src/commands/replay/list.ts | 11 +++++------ src/lib/replay-search.ts | 3 +++ test/commands/replay/list.test.ts | 31 +++++++++++++++++++++++++++++++ test/lib/replay-search.test.ts | 17 +++++++++++++++++ 4 files changed, 56 insertions(+), 6 deletions(-) diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 3f5b87168..2a203094f 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -178,15 +178,14 @@ function buildReplaySearchQuery(filters: { exitPath?: string; }): string | undefined { const { entryPath, exitPath, path, query, url } = filters; + // Replay search only has a generic visited-URL field. Use one broad server + // prefilter, then apply entry/exit position checks against the URL list below. + const routePathPrefilter = path ?? entryPath ?? exitPath; const parts = [ query, url ? `url:${quoteSearchValue(wildcardSearchValue(url))}` : undefined, - path ? `url:${quoteSearchValue(wildcardSearchValue(path))}` : undefined, - entryPath - ? `url:${quoteSearchValue(wildcardSearchValue(entryPath))}` - : undefined, - exitPath - ? `url:${quoteSearchValue(wildcardSearchValue(exitPath))}` + routePathPrefilter + ? `url:${quoteSearchValue(wildcardSearchValue(routePathPrefilter))}` : undefined, ].filter((part): part is string => Boolean(part)); return parts.length > 0 ? parts.join(" ") : undefined; diff --git a/src/lib/replay-search.ts b/src/lib/replay-search.ts index ddcc82c8b..3947b96f0 100644 --- a/src/lib/replay-search.ts +++ b/src/lib/replay-search.ts @@ -128,6 +128,9 @@ export function replayUrlPathMatches( const normalizedFilter = normalizePathFilter(path); const normalizedPath = normalizePathFilter(parts.path); + if (normalizedFilter === "/") { + return normalizedPath.startsWith("/"); + } return ( normalizedPath === normalizedFilter || normalizedPath.startsWith(`${normalizedFilter}/`) diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index 8310c6de5..52f3307fe 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -305,6 +305,37 @@ describe("listCommand.func", () => { expect(parsed.data[0].urls[0]).toBe("https://example.com/signup/direct"); }); + test("uses one server URL prefilter for positional path filters", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValue({ data: sampleReplays }); + + const { context } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + "entry-path": "/home", + limit: 25, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: 25, + projectSlugs: ["cli"], + query: "url:*/signup*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + }); + test("renders human output with a replay hint", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); diff --git a/test/lib/replay-search.test.ts b/test/lib/replay-search.test.ts index 94818471a..2c32418cb 100644 --- a/test/lib/replay-search.test.ts +++ b/test/lib/replay-search.test.ts @@ -2,6 +2,7 @@ import { describe, expect, test } from "bun:test"; import { getReplayRequestFields, isSupportedReplayField, + replayUrlPathMatches, } from "../../src/lib/replay-search.js"; describe("getReplayRequestFields", () => { @@ -38,3 +39,19 @@ describe("isSupportedReplayField", () => { expect(isSupportedReplayField("replay_type")).toBe(false); }); }); + +describe("replayUrlPathMatches", () => { + test("matches root filter against child paths", () => { + expect(replayUrlPathMatches("https://example.com/signup", "/")).toBe(true); + expect(replayUrlPathMatches("https://example.com/", "/")).toBe(true); + }); + + test("matches child paths without matching siblings", () => { + expect( + replayUrlPathMatches("https://example.com/signup/team", "/signup") + ).toBe(true); + expect( + replayUrlPathMatches("https://example.com/signup-flow", "/signup") + ).toBe(false); + }); +}); From b675f5abb65729fa95ba4a949b677691fd060901 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:37:05 -0700 Subject: [PATCH 06/13] feat(replay): Summarize route visits chronologically Represent replay routes as chronological visits instead of unique path aggregates. Add bounded route timing fields, next-path context, per-visit event counts, and explicit user-interaction metadata so agents can reason about repeated navigation paths. Split replay event counts for clicks, taps, inputs, focuses, blurs, and scrolls. This avoids overloading input counts and keeps the summary JSON useful for generalized replay analysis. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/summarize.ts | 60 ++++++++- src/lib/replay-summary.ts | 202 +++++++++++++++++++++++-------- src/types/replay.ts | 67 +++++++--- test/lib/replay-summary.test.ts | 79 +++++++++++- 4 files changed, 335 insertions(+), 73 deletions(-) diff --git a/src/commands/replay/summarize.ts b/src/commands/replay/summarize.ts index 51a762a5e..1c03f1a0a 100644 --- a/src/commands/replay/summarize.ts +++ b/src/commands/replay/summarize.ts @@ -95,14 +95,14 @@ const SIGNAL_COLUMNS: Column[] = [ const ROUTE_COLUMNS: Column[] = [ { - header: "FIRST", - value: (route) => formatOffset(route.firstOffsetMs), + header: "ENTER", + value: (route) => formatOffset(route.enteredAtOffsetMs), minWidth: 8, shrinkable: false, }, { - header: "LAST", - value: (route) => formatOffset(route.lastOffsetMs), + header: "DURATION", + value: (route) => formatOffset(route.durationMs), minWidth: 8, shrinkable: false, }, @@ -112,14 +112,64 @@ const ROUTE_COLUMNS: Column[] = [ align: "right", minWidth: 6, }, + { + header: "INTERACTIONS", + value: (route) => formatRouteInteractions(route), + minWidth: 12, + truncate: true, + }, { header: "PATH", value: (route) => escapeMarkdownCell(route.path), minWidth: 24, truncate: true, }, + { + header: "NEXT", + value: (route) => escapeMarkdownCell(route.nextPath ?? "-"), + minWidth: 16, + truncate: true, + }, ]; +function formatCount(count: number, singular: string, plural = `${singular}s`) { + return `${count} ${count === 1 ? singular : plural}`; +} + +function formatNonZeroCount( + count: number, + singular: string, + plural = `${singular}s` +): string | undefined { + return count > 0 ? formatCount(count, singular, plural) : undefined; +} + +function formatRouteInteractions(route: ReplayRouteSummary): string { + const parts = [ + formatNonZeroCount(route.counts.clicks, "click"), + formatNonZeroCount(route.counts.taps, "tap"), + formatNonZeroCount(route.counts.inputs, "input"), + formatNonZeroCount(route.counts.scrolls, "scroll"), + formatNonZeroCount(route.counts.focuses, "focus", "focuses"), + formatNonZeroCount(route.counts.blurs, "blur"), + ].filter((part): part is string => Boolean(part)); + return parts.length > 0 ? parts.join(", ") : "-"; +} + +function formatEventCounts(summary: ReplaySummaryOutput): string { + return [ + formatCount(summary.counts.total, "event"), + formatCount(summary.counts.clicks, "click"), + formatCount(summary.counts.taps, "tap"), + formatCount(summary.counts.inputs, "input"), + formatCount(summary.counts.scrolls, "scroll"), + formatCount(summary.counts.focuses, "focus", "focuses"), + formatCount(summary.counts.blurs, "blur"), + formatCount(summary.counts.network, "network event"), + formatCount(summary.counts.errors, "error"), + ].join(", "); +} + function jsonTransformSummary( summary: ReplaySummaryOutput, fields?: string[] @@ -134,7 +184,7 @@ function formatSummaryHuman(summary: ReplaySummaryOutput): string { `Entry: ${summary.entryUrl ?? "-"}`, `Exit: ${summary.exitUrl ?? "-"}`, `Duration: ${formatDurationSeconds(summary.durationSeconds)}`, - `Events: ${summary.counts.total} total, ${summary.counts.clicks} clicks, ${summary.counts.inputs} inputs, ${summary.counts.network} network, ${summary.counts.errors} errors`, + `Events: ${formatEventCounts(summary)}`, ]; if (summary.focusPath) { diff --git a/src/lib/replay-summary.ts b/src/lib/replay-summary.ts index 1f13d5d0a..5fa7660e9 100644 --- a/src/lib/replay-summary.ts +++ b/src/lib/replay-summary.ts @@ -42,7 +42,6 @@ const SLOW_RESOURCE_MS = 3000; const ROUTE_CHURN_WINDOW_MS = 15_000; const ROUTE_CHURN_COUNT = 3; -const INPUT_KINDS = new Set(["input", "focus", "blur"]); const NOTABLE_EVENT_KINDS = new Set([ "navigation", "click", @@ -79,66 +78,155 @@ function routeKey(event: ReplayEvent): string | undefined { return event.urlPath ?? undefined; } -function countEvents(events: ReplayEvent[]): ReplayEventCounts { +function emptyEventCounts(): ReplayEventCounts { return { - total: events.length, - navigations: events.filter((event) => event.kind === "navigation").length, - clicks: events.filter( - (event) => event.kind === "click" || event.kind === "tap" - ).length, - inputs: events.filter((event) => INPUT_KINDS.has(event.kind)).length, - network: events.filter((event) => event.kind === "network").length, - console: events.filter((event) => event.kind === "console").length, - errors: events.filter((event) => event.kind === "error").length, - spans: events.filter((event) => event.kind === "span").length, + total: 0, + navigations: 0, + clicks: 0, + taps: 0, + inputs: 0, + focuses: 0, + blurs: 0, + scrolls: 0, + network: 0, + console: 0, + errors: 0, + spans: 0, }; } -function buildRouteSummaries(events: ReplayEvent[]): ReplayRouteSummary[] { - const routes = new Map(); +function countEvents(events: ReplayEvent[]): ReplayEventCounts { + const counts = emptyEventCounts(); + counts.total = events.length; for (const event of events) { - const path = routeKey(event); - if (!path) { - continue; + switch (event.kind) { + case "navigation": + counts.navigations += 1; + break; + case "click": + counts.clicks += 1; + break; + case "tap": + counts.taps += 1; + break; + case "input": + counts.inputs += 1; + break; + case "focus": + counts.focuses += 1; + break; + case "blur": + counts.blurs += 1; + break; + case "scroll": + counts.scrolls += 1; + break; + case "network": + counts.network += 1; + break; + case "console": + counts.console += 1; + break; + case "error": + counts.errors += 1; + break; + case "span": + counts.spans += 1; + break; + default: + break; } + } + + return counts; +} + +function hadUserInteraction(counts: ReplayEventCounts): boolean { + return ( + counts.clicks > 0 || + counts.taps > 0 || + counts.inputs > 0 || + counts.scrolls > 0 + ); +} - const existing = routes.get(path); - if (!existing) { - routes.set(path, { - path, +type RouteVisitDraft = { + path: string; + url: string | null; + enteredAtOffsetMs: number | null; + events: ReplayEvent[]; +}; + +function durationBetween( + startMs: number | null, + endMs: number | null +): number | null { + if (startMs === null || endMs === null || endMs < startMs) { + return null; + } + return endMs - startMs; +} + +function finalizeRouteVisit( + visit: RouteVisitDraft, + leftAtOffsetMs: number | null, + nextPath: string | null +): ReplayRouteSummary { + const eventOffsets = visit.events + .map((event) => event.offsetMs) + .filter((offset): offset is number => offset !== null); + const counts = countEvents(visit.events); + return { + path: visit.path, + url: visit.url, + enteredAtOffsetMs: visit.enteredAtOffsetMs, + leftAtOffsetMs, + durationMs: durationBetween(visit.enteredAtOffsetMs, leftAtOffsetMs), + nextPath, + firstOffsetMs: eventOffsets.length > 0 ? Math.min(...eventOffsets) : null, + lastOffsetMs: eventOffsets.length > 0 ? Math.max(...eventOffsets) : null, + eventCount: visit.events.length, + counts, + hadUserInteraction: hadUserInteraction(counts), + }; +} + +function buildRouteSummaries( + events: ReplayEvent[], + replayDuration: number | null +): ReplayRouteSummary[] { + const routes: ReplayRouteSummary[] = []; + let current: RouteVisitDraft | undefined; + + for (const event of events) { + const navigationPath = + event.kind === "navigation" ? routeKey(event) : undefined; + if (navigationPath && (!current || current.path !== navigationPath)) { + if (current) { + routes.push( + finalizeRouteVisit(current, event.offsetMs, navigationPath) + ); + } + current = { + path: navigationPath, url: event.url ?? null, - firstOffsetMs: event.offsetMs, - lastOffsetMs: event.offsetMs, - eventCount: 1, - }); + enteredAtOffsetMs: event.offsetMs, + events: [event], + }; continue; } - existing.eventCount += 1; - if (event.offsetMs !== null) { - existing.lastOffsetMs = event.offsetMs; - if ( - existing.firstOffsetMs === null || - event.offsetMs < existing.firstOffsetMs - ) { - existing.firstOffsetMs = event.offsetMs; - } + if (current) { + current.events.push(event); } } - return [...routes.values()].sort((a, b) => { - if (a.firstOffsetMs === null && b.firstOffsetMs === null) { - return 0; - } - if (a.firstOffsetMs === null) { - return 1; - } - if (b.firstOffsetMs === null) { - return -1; - } - return a.firstOffsetMs - b.firstOffsetMs; - }); + if (current) { + routes.push(finalizeRouteVisit(current, replayDuration, null)); + } + + return routes; } function firstOffsetForSpan( @@ -553,7 +641,9 @@ function detectSessionShapeSignals( typeof replay.duration === "number" && replay.duration <= QUICK_BOUNCE_SECONDS && counts.clicks === 0 && - counts.inputs === 0 + counts.taps === 0 && + counts.inputs === 0 && + counts.scrolls === 0 ) { pushSignal( signals, @@ -563,7 +653,7 @@ function detectSessionShapeSignals( offsetMs: events[0]?.offsetMs ?? null, url: events[0]?.url ?? null, urlPath: events[0]?.urlPath ?? null, - message: "Replay ended quickly without clicks or inputs.", + message: "Replay ended quickly without user interactions.", evidence: events.slice(0, 5), }, maxSignals @@ -633,15 +723,29 @@ function focusEvents(events: ReplayEvent[], focusPath?: string): ReplayEvent[] { return events.filter((event) => replayUrlPathMatches(event.url, focusPath)); } +function routeMatchesFocus( + route: ReplayRouteSummary, + focusPath?: string +): boolean { + if (!focusPath) { + return true; + } + return replayUrlPathMatches(route.url ?? route.path, focusPath); +} + export function summarizeReplay( replay: ReplayDetails, events: ReplayEvent[], options: SummaryOptions ): ReplaySummaryOutput { const focusedEvents = focusEvents(events, options.focusPath); + const replayDuration = replayDurationMs(replay); const maxSignals = options.maxSignals ?? DEFAULT_MAX_SIGNALS; const maxNotableEvents = options.maxNotableEvents ?? DEFAULT_MAX_NOTABLE_EVENTS; + const routes = buildRouteSummaries(events, replayDuration).filter((route) => + routeMatchesFocus(route, options.focusPath) + ); return { replayId: replay.id, @@ -654,7 +758,7 @@ export function summarizeReplay( focusPath: options.focusPath ?? null, counts: countEvents(focusedEvents), timings: timingSummary(focusedEvents), - routes: buildRouteSummaries(focusedEvents), + routes, signals: detectFrictionSignals(replay, focusedEvents, maxSignals), notableEvents: notableEvents(focusedEvents, maxNotableEvents), }; diff --git a/src/types/replay.ts b/src/types/replay.ts index 894858acc..ec617536a 100644 --- a/src/types/replay.ts +++ b/src/types/replay.ts @@ -418,28 +418,16 @@ export const REPLAY_FRICTION_SIGNAL_KINDS = [ "route_churn", ] as const; -export const ReplayRouteSummarySchema = z - .object({ - path: z.string().describe("Route pathname"), - url: z.string().nullable().describe("Representative URL for the route"), - firstOffsetMs: z - .number() - .nullable() - .describe("First observed offset for this route"), - lastOffsetMs: z - .number() - .nullable() - .describe("Last observed offset for this route"), - eventCount: z.number().describe("Number of normalized events on the route"), - }) - .describe("Replay route summary"); - export const ReplayEventCountsSchema = z .object({ total: z.number().describe("Total normalized event count"), navigations: z.number().describe("Navigation event count"), - clicks: z.number().describe("Click/tap event count"), - inputs: z.number().describe("Input/focus/blur event count"), + clicks: z.number().describe("Click event count"), + taps: z.number().describe("Tap event count"), + inputs: z.number().describe("Input event count"), + focuses: z.number().describe("Focus event count"), + blurs: z.number().describe("Blur event count"), + scrolls: z.number().describe("Scroll event count"), network: z.number().describe("Network event count"), console: z.number().describe("Console event count"), errors: z.number().describe("Error event count"), @@ -447,6 +435,49 @@ export const ReplayEventCountsSchema = z }) .describe("Replay event counts"); +export const ReplayRouteSummarySchema = z + .object({ + path: z.string().describe("Route pathname"), + url: z + .string() + .nullable() + .describe("Representative URL for this route visit"), + enteredAtOffsetMs: z + .number() + .nullable() + .describe("Offset where this route visit started"), + leftAtOffsetMs: z + .number() + .nullable() + .describe("Offset where this route visit ended"), + durationMs: z + .number() + .nullable() + .describe("Duration of this route visit when bounded"), + nextPath: z + .string() + .nullable() + .describe("Next route pathname after this visit"), + firstOffsetMs: z + .number() + .nullable() + .describe("First observed event offset for this route visit"), + lastOffsetMs: z + .number() + .nullable() + .describe("Last observed event offset for this route visit"), + eventCount: z + .number() + .describe("Number of normalized events in this route visit"), + counts: ReplayEventCountsSchema.describe( + "Normalized event counts within this route visit" + ), + hadUserInteraction: z + .boolean() + .describe("Whether this route visit had click, tap, input, or scroll"), + }) + .describe("Replay route visit summary"); + export const ReplayTimingSummarySchema = z .object({ firstPaintMs: z.number().nullable().describe("First paint offset"), diff --git a/test/lib/replay-summary.test.ts b/test/lib/replay-summary.test.ts index 4c3e79021..dc6d283e0 100644 --- a/test/lib/replay-summary.test.ts +++ b/test/lib/replay-summary.test.ts @@ -78,7 +78,8 @@ describe("summarizeReplay", () => { project: "web", }); - expect(summary.routes.map((route) => route.path)).toContain("/signup"); + expect(summary.routes.map((route) => route.path)).toEqual(["/signup"]); + expect(summary.routes[0]?.counts.network).toBe(1); expect(summary.counts.clicks).toBe(2); expect(summary.timings.navigationDurationMs).toBe(3500); expect(summary.signals.map((signal) => signal.kind)).toEqual( @@ -89,4 +90,80 @@ describe("summarizeReplay", () => { ]) ); }); + + test("summarizes repeated route visits as route windows", () => { + const segments: ReplayRecordingSegments = [ + [ + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:01.000Z"), + data: { href: "https://example.com/signup" }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:02.000Z"), + data: { source: 2, type: 2, x: 100, y: 100 }, + }, + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:03.000Z"), + data: { href: "https://example.com/dashboard" }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:04.000Z"), + data: { source: 5, id: 12, text: "hello" }, + }, + { + type: 4, + timestamp: Date.parse("2025-01-01T00:00:05.000Z"), + data: { href: "https://example.com/signup" }, + }, + { + type: 3, + timestamp: Date.parse("2025-01-01T00:00:06.000Z"), + data: { source: 3, id: 12, x: 0, y: 500 }, + }, + ], + ]; + + const events = extractNormalizedReplayEvents(replay(), segments); + const summary = summarizeReplay(replay(), events, { + org: "test-org", + project: "web", + }); + + expect(summary.routes.map((route) => route.path)).toEqual([ + "/signup", + "/dashboard", + "/signup", + ]); + expect(summary.routes[0]).toMatchObject({ + enteredAtOffsetMs: 1000, + leftAtOffsetMs: 3000, + durationMs: 2000, + nextPath: "/dashboard", + eventCount: 2, + hadUserInteraction: true, + }); + expect(summary.routes[0]?.counts.clicks).toBe(1); + expect(summary.routes[1]?.counts.inputs).toBe(1); + expect(summary.routes[2]?.counts.scrolls).toBe(1); + expect(summary.routes[2]?.leftAtOffsetMs).toBe(20_000); + expect(summary.counts.inputs).toBe(1); + expect(summary.counts.focuses).toBe(0); + expect(summary.counts.scrolls).toBe(1); + + const focusedSummary = summarizeReplay(replay(), events, { + org: "test-org", + project: "web", + focusPath: "/signup", + }); + expect(focusedSummary.routes.map((route) => route.path)).toEqual([ + "/signup", + "/signup", + ]); + expect(focusedSummary.counts.inputs).toBe(0); + expect(focusedSummary.counts.scrolls).toBe(1); + }); }); From 55182be5cff9ef47fdeabc99fcd7e9a26fbca219 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:44:12 -0700 Subject: [PATCH 07/13] feat(replay): Surface recording parser context Expose replay platform, SDK, replay type, and recording parser stats from replay summarize. This keeps the summary output honest for non-web or sparsely parsed recordings without adding a new command surface. Include the generated replay skill reference updates from the summary schema change. Refs GH-907 Co-Authored-By: OpenAI Codex --- .../skills/sentry-cli/references/replay.md | 5 ++++ src/commands/replay/summarize.ts | 28 +++++++++++++++++++ src/lib/replay-summary.ts | 22 +++++++++++++++ src/types/replay.ts | 28 +++++++++++++++++++ test/commands/replay/summarize.test.ts | 6 ++++ test/lib/replay-summary.test.ts | 7 +++++ 6 files changed, 96 insertions(+) diff --git a/plugins/sentry-cli/skills/sentry-cli/references/replay.md b/plugins/sentry-cli/skills/sentry-cli/references/replay.md index 5f84a0246..902a8650b 100644 --- a/plugins/sentry-cli/skills/sentry-cli/references/replay.md +++ b/plugins/sentry-cli/skills/sentry-cli/references/replay.md @@ -152,12 +152,17 @@ Summarize Session Replay behavior | `replayId` | string | Replay ID | | `org` | string | Organization slug | | `project` | string \| null | Project slug | +| `platform` | string \| null | Replay platform | +| `sdkName` | string \| null | Replay SDK name | +| `sdkVersion` | string \| null | Replay SDK version | +| `replayType` | string \| null | Replay type | | `startedAt` | string \| null | Replay start time | | `durationSeconds` | number \| null | Replay duration in seconds | | `entryUrl` | string \| null | First replay URL | | `exitUrl` | string \| null | Last replay URL | | `focusPath` | string \| null | Optional route path used to focus the summary | | `counts` | object | Normalized event counts | +| `recording` | object | Downloaded recording and parser stats | | `timings` | object | Key timing observations | | `routes` | array | Route timeline | | `signals` | array | Detected non-error and error friction signals | diff --git a/src/commands/replay/summarize.ts b/src/commands/replay/summarize.ts index 1c03f1a0a..a33374209 100644 --- a/src/commands/replay/summarize.ts +++ b/src/commands/replay/summarize.ts @@ -156,6 +156,23 @@ function formatRouteInteractions(route: ReplayRouteSummary): string { return parts.length > 0 ? parts.join(", ") : "-"; } +function formatRecordingStats(summary: ReplaySummaryOutput): string { + return [ + summary.recording.segmentCount !== null + ? formatCount(summary.recording.segmentCount, "segment") + : undefined, + summary.recording.frameCount !== null + ? formatCount(summary.recording.frameCount, "raw frame") + : undefined, + formatCount(summary.recording.normalizedEventCount, "normalized event"), + summary.recording.focusedEventCount !== null + ? formatCount(summary.recording.focusedEventCount, "focused event") + : undefined, + ] + .filter((part): part is string => Boolean(part)) + .join(", "); +} + function formatEventCounts(summary: ReplaySummaryOutput): string { return [ formatCount(summary.counts.total, "event"), @@ -181,9 +198,13 @@ function formatSummaryHuman(summary: ReplaySummaryOutput): string { const lines = [ `Replay summary for ${summary.org}/${summary.replayId.slice(0, 8)}`, "", + `Platform: ${summary.platform ?? "-"}`, + `SDK: ${[summary.sdkName, summary.sdkVersion].filter(Boolean).join(" ") || "-"}`, + `Replay type: ${summary.replayType ?? "-"}`, `Entry: ${summary.entryUrl ?? "-"}`, `Exit: ${summary.exitUrl ?? "-"}`, `Duration: ${formatDurationSeconds(summary.durationSeconds)}`, + `Recording: ${formatRecordingStats(summary)}`, `Events: ${formatEventCounts(summary)}`, ]; @@ -215,6 +236,7 @@ export const summarizeCommand = buildCommand({ fullDescription: "Summarize a Session Replay into route flow, event counts, timing facts, and deterministic friction signals.\n\n" + "This command does not use AI. It returns factual evidence that an agent can use for analysis.\n\n" + + "Recording parsing is best-effort. Summary metadata includes platform, SDK, replay type, and raw recording counts so agents can tell when a replay fetched successfully but produced sparse normalized events.\n\n" + "Examples:\n" + " sentry replay summarize sentry/346789a703f6454384f1de473b8b9fcc --json\n" + " sentry replay summarize sentry/346789a703f6454384f1de473b8b9fcc --path /signup --json\n" + @@ -298,12 +320,18 @@ export const summarizeCommand = buildCommand({ }); const events = extractNormalizedReplayEvents(replay, segments); + const recordingFrameCount = segments.reduce( + (count, segment) => count + segment.length, + 0 + ); const summary = summarizeReplay(replay, events, { org: resolved.org, project: resolved.project, focusPath: flags.path, maxSignals: flags["limit-signals"], maxNotableEvents: flags["limit-events"], + recordingFrameCount, + recordingSegmentCount: segments.length, }); yield new CommandOutput(summary); diff --git a/src/lib/replay-summary.ts b/src/lib/replay-summary.ts index 5fa7660e9..be60caf29 100644 --- a/src/lib/replay-summary.ts +++ b/src/lib/replay-summary.ts @@ -23,6 +23,8 @@ type SummaryOptions = { focusPath?: string; maxSignals?: number; maxNotableEvents?: number; + recordingFrameCount?: number | null; + recordingSegmentCount?: number | null; }; type ClickPoint = { @@ -733,6 +735,16 @@ function routeMatchesFocus( return replayUrlPathMatches(route.url ?? route.path, focusPath); } +function recordingSegmentCount( + replay: ReplayDetails, + options: SummaryOptions +): number | null { + if (options.recordingSegmentCount !== undefined) { + return options.recordingSegmentCount; + } + return replay.count_segments ?? null; +} + export function summarizeReplay( replay: ReplayDetails, events: ReplayEvent[], @@ -751,12 +763,22 @@ export function summarizeReplay( replayId: replay.id, org: options.org, project: options.project ?? null, + platform: replay.platform ?? null, + sdkName: replay.sdk?.name ?? null, + sdkVersion: replay.sdk?.version ?? null, + replayType: replay.replay_type ?? null, startedAt: replay.started_at ?? null, durationSeconds: replay.duration ?? null, entryUrl: replay.urls[0] ?? null, exitUrl: replay.urls.at(-1) ?? null, focusPath: options.focusPath ?? null, counts: countEvents(focusedEvents), + recording: { + segmentCount: recordingSegmentCount(replay, options), + frameCount: options.recordingFrameCount ?? null, + normalizedEventCount: events.length, + focusedEventCount: options.focusPath ? focusedEvents.length : null, + }, timings: timingSummary(focusedEvents), routes, signals: detectFrictionSignals(replay, focusedEvents, maxSignals), diff --git a/src/types/replay.ts b/src/types/replay.ts index ec617536a..f1b458251 100644 --- a/src/types/replay.ts +++ b/src/types/replay.ts @@ -496,6 +496,26 @@ export const ReplayTimingSummarySchema = z }) .describe("Replay timing summary"); +export const ReplayRecordingStatsSchema = z + .object({ + segmentCount: z + .number() + .nullable() + .describe("Downloaded recording segment count when available"), + frameCount: z + .number() + .nullable() + .describe("Downloaded raw recording frame count when available"), + normalizedEventCount: z + .number() + .describe("Normalized event count extracted from the recording"), + focusedEventCount: z + .number() + .nullable() + .describe("Normalized event count after the optional focus path"), + }) + .describe("Replay recording parser stats"); + export const ReplayFrictionSignalSchema = z .object({ kind: z @@ -524,6 +544,10 @@ export const ReplaySummaryOutputSchema = z replayId: z.string().describe("Replay ID"), org: z.string().describe("Organization slug"), project: z.string().nullable().optional().describe("Project slug"), + platform: z.string().nullable().optional().describe("Replay platform"), + sdkName: z.string().nullable().optional().describe("Replay SDK name"), + sdkVersion: z.string().nullable().optional().describe("Replay SDK version"), + replayType: z.string().nullable().optional().describe("Replay type"), startedAt: z.string().nullable().optional().describe("Replay start time"), durationSeconds: z .number() @@ -538,6 +562,9 @@ export const ReplaySummaryOutputSchema = z .optional() .describe("Optional route path used to focus the summary"), counts: ReplayEventCountsSchema.describe("Normalized event counts"), + recording: ReplayRecordingStatsSchema.describe( + "Downloaded recording and parser stats" + ), timings: ReplayTimingSummarySchema.describe("Key timing observations"), routes: z.array(ReplayRouteSummarySchema).describe("Route timeline"), signals: z @@ -619,6 +646,7 @@ export type ReplayFrictionSignalKind = (typeof REPLAY_FRICTION_SIGNAL_KINDS)[number]; export type ReplayRouteSummary = z.infer; export type ReplayEventCounts = z.infer; +export type ReplayRecordingStats = z.infer; export type ReplayTimingSummary = z.infer; export type ReplayFrictionSignal = z.infer; export type ReplaySummaryOutput = z.infer; diff --git a/test/commands/replay/summarize.test.ts b/test/commands/replay/summarize.test.ts index 56a11c07e..ca52c69db 100644 --- a/test/commands/replay/summarize.test.ts +++ b/test/commands/replay/summarize.test.ts @@ -123,6 +123,12 @@ describe("replay summarize", () => { expect(parsed.replayId).toBe(REPLAY_ID); expect(parsed.focusPath).toBe("/signup"); expect(parsed.counts.clicks).toBe(1); + expect(parsed.recording).toMatchObject({ + segmentCount: 1, + frameCount: 2, + normalizedEventCount: 2, + focusedEventCount: 2, + }); expect(parsed.routes[0].path).toBe("/signup"); expect(parsed.signals[0].kind).toBe("dead_click"); }); diff --git a/test/lib/replay-summary.test.ts b/test/lib/replay-summary.test.ts index dc6d283e0..e8aee6166 100644 --- a/test/lib/replay-summary.test.ts +++ b/test/lib/replay-summary.test.ts @@ -80,6 +80,12 @@ describe("summarizeReplay", () => { expect(summary.routes.map((route) => route.path)).toEqual(["/signup"]); expect(summary.routes[0]?.counts.network).toBe(1); + expect(summary.recording).toEqual({ + segmentCount: 1, + frameCount: null, + normalizedEventCount: 5, + focusedEventCount: null, + }); expect(summary.counts.clicks).toBe(2); expect(summary.timings.navigationDurationMs).toBe(3500); expect(summary.signals.map((signal) => signal.kind)).toEqual( @@ -163,6 +169,7 @@ describe("summarizeReplay", () => { "/signup", "/signup", ]); + expect(focusedSummary.recording.focusedEventCount).toBe(4); expect(focusedSummary.counts.inputs).toBe(0); expect(focusedSummary.counts.scrolls).toBe(1); }); From 8185730dd924e0b19220e11f1b482537c3e3148c Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:46:51 -0700 Subject: [PATCH 08/13] fix(replay): Fill client-filtered list pages Fetch additional replay pages when client-side filters are active so --limit applies to the filtered result set instead of only the first server page. Keep the loop bounded by the shared pagination limit and preserve the final server cursor for navigation. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/list.ts | 103 +++++++++++++++++++++++++----- test/commands/replay/list.test.ts | 72 ++++++++++++++++++++- 2 files changed, 155 insertions(+), 20 deletions(-) diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 2a203094f..e404c6d07 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -5,7 +5,9 @@ */ import type { SentryContext } from "../../context.js"; +import { MAX_PAGINATION_PAGES } from "../../lib/api/infrastructure.js"; import { + API_MAX_PER_PAGE, isReplaySortValue, listReplays, type ReplaySortValue, @@ -230,6 +232,83 @@ function replayMatchesRouteFilters( return true; } +function hasClientSideFilters(flags: ListFlags): boolean { + return Boolean( + flags.path || + flags["entry-path"] || + flags["exit-path"] || + flags.friction || + flags["problem-only"] + ); +} + +function replayMatchesClientFilters( + replay: ReplayListItem, + flags: ListFlags +): boolean { + if (!replayMatchesRouteFilters(replay, flags)) { + return false; + } + if (flags["problem-only"]) { + return hasErrorOrWarningSignals(replay); + } + return flags.friction ? hasFrictionSignals(replay) : true; +} + +type FetchReplayListOptions = { + cursor?: string; + environment?: string[]; + flags: ListFlags; + org: string; + project?: string; + query?: string; + timeRange: TimeRange; +}; + +async function fetchReplayListForCommand( + options: FetchReplayListOptions +): Promise<{ replays: ReplayListItem[]; nextCursor?: string }> { + const { environment, flags, org, project, query, timeRange } = options; + const shouldFillClientFilteredLimit = hasClientSideFilters(flags); + const requestedServerLimit = shouldFillClientFilteredLimit + ? API_MAX_PER_PAGE + : flags.limit; + const replays: ReplayListItem[] = []; + let pageCursor = options.cursor; + let nextCursor: string | undefined; + + for (let page = 0; page < MAX_PAGINATION_PAGES; page += 1) { + const pageResult = await listReplays(org, { + environment, + fields: [...REPLAY_LIST_FIELDS], + limit: requestedServerLimit, + query, + projectSlugs: project ? [project] : undefined, + sort: flags.sort, + cursor: pageCursor, + ...timeRangeToApiParams(timeRange), + }); + + nextCursor = pageResult.nextCursor; + replays.push( + ...pageResult.data.filter((replay) => + replayMatchesClientFilters(replay, flags) + ) + ); + + if ( + !shouldFillClientFilteredLimit || + replays.length >= flags.limit || + !nextCursor + ) { + break; + } + pageCursor = nextCursor; + } + + return { replays: replays.slice(0, flags.limit), nextCursor }; +} + const REPLAY_COLUMNS: Column[] = [ { header: "ID", @@ -517,32 +596,22 @@ export const listCommand = buildListCommand("replay", { contextKey ); - const { data: fetchedReplays, nextCursor } = await withProgress( + const { replays, nextCursor } = await withProgress( { message: `Fetching replays (up to ${flags.limit})...`, json: flags.json, }, () => - listReplays(resolved.org, { + fetchReplayListForCommand({ + cursor, environment, - fields: [...REPLAY_LIST_FIELDS], - limit: flags.limit, + flags, + org: resolved.org, + project: resolved.project, query, - projectSlugs: resolved.project ? [resolved.project] : undefined, - sort: flags.sort, - cursor, - ...timeRangeToApiParams(timeRange), + timeRange, }) ); - const replays = fetchedReplays.filter((replay) => { - if (!replayMatchesRouteFilters(replay, flags)) { - return false; - } - if (flags["problem-only"]) { - return hasErrorOrWarningSignals(replay); - } - return flags.friction ? hasFrictionSignals(replay) : true; - }); advancePaginationState(PAGINATION_KEY, contextKey, direction, nextCursor); const hasPrev = hasPreviousPage(PAGINATION_KEY, contextKey); diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index 52f3307fe..ff4c4d48a 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -204,7 +204,7 @@ describe("listCommand.func", () => { expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { environment: undefined, fields: [...REPLAY_LIST_FIELDS], - limit: 25, + limit: apiClient.API_MAX_PER_PAGE, projectSlugs: ["cli"], query: "environment:production url:*/signup*", sort: "-started_at", @@ -291,7 +291,7 @@ describe("listCommand.func", () => { expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { environment: undefined, fields: [...REPLAY_LIST_FIELDS], - limit: 25, + limit: apiClient.API_MAX_PER_PAGE, projectSlugs: ["cli"], query: "url:*/signup*", sort: "-started_at", @@ -305,6 +305,72 @@ describe("listCommand.func", () => { expect(parsed.data[0].urls[0]).toBe("https://example.com/signup/direct"); }); + test("fills the requested limit across client-filtered replay pages", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy + .mockResolvedValueOnce({ + data: [ + { + ...sampleReplays[0]!, + urls: ["https://example.com/replays/?query=/signup"], + }, + ], + nextCursor: "0:100:0", + }) + .mockResolvedValueOnce({ + data: [ + { + ...sampleReplays[0]!, + id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + urls: ["https://example.com/signup/direct"], + }, + ], + nextCursor: "0:200:0", + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + limit: 1, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledTimes(2); + expect(listReplaysSpy).toHaveBeenNthCalledWith(1, "test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: "url:*/signup*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + expect(listReplaysSpy).toHaveBeenNthCalledWith(2, "test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: "url:*/signup*", + sort: "-started_at", + cursor: "0:100:0", + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].id).toBe("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); + expect(parsed.nextCursor).toBe("0:200:0"); + }); + test("uses one server URL prefilter for positional path filters", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); @@ -327,7 +393,7 @@ describe("listCommand.func", () => { expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { environment: undefined, fields: [...REPLAY_LIST_FIELDS], - limit: 25, + limit: apiClient.API_MAX_PER_PAGE, projectSlugs: ["cli"], query: "url:*/signup*", sort: "-started_at", From 8c8f33864b0dcf70a583dba3632111b51a6a7048 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:53:51 -0700 Subject: [PATCH 09/13] fix(replay): Avoid duplicate URL filters When --url is provided with route path flags, use the explicit URL value as the single server-side URL prefilter and leave path/entry/exit semantics to client-side filtering. This avoids accidental AND narrowing from duplicate url: search tokens. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/list.ts | 6 ++--- test/commands/replay/list.test.ts | 42 +++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index e404c6d07..07e1040d1 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -180,9 +180,9 @@ function buildReplaySearchQuery(filters: { exitPath?: string; }): string | undefined { const { entryPath, exitPath, path, query, url } = filters; - // Replay search only has a generic visited-URL field. Use one broad server - // prefilter, then apply entry/exit position checks against the URL list below. - const routePathPrefilter = path ?? entryPath ?? exitPath; + // Replay search only has a generic visited-URL field. Use at most one broad + // server prefilter, then apply path and position checks against the URL list. + const routePathPrefilter = url ? undefined : (path ?? entryPath ?? exitPath); const parts = [ query, url ? `url:${quoteSearchValue(wildcardSearchValue(url))}` : undefined, diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index ff4c4d48a..35f19ab74 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -402,6 +402,48 @@ describe("listCommand.func", () => { }); }); + test("does not duplicate server URL filters when --url and --path combine", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValue({ + data: [ + { + ...sampleReplays[0]!, + urls: ["https://example.com/signup/direct"], + }, + ], + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + limit: 25, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + url: "example.com", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: "url:*example.com*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + }); + test("renders human output with a replay hint", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); From 97de06c1c5f53cfbd0fefa2c250fd2a1abad0138 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 12:59:45 -0700 Subject: [PATCH 10/13] fix(replay): Preserve filtered replay pagination Store a mid-page cursor when client-side replay filters fill a result page before the underlying API page is exhausted. This keeps subsequent -c next calls from skipping matching replays on path or friction-filtered searches. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/list.ts | 125 ++++++++++++++++++++++++++---- test/commands/replay/list.test.ts | 94 ++++++++++++++++++++++ 2 files changed, 205 insertions(+), 14 deletions(-) diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 07e1040d1..856129d31 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -128,6 +128,37 @@ const PAGINATION_KEY = "replay-list"; const COMMAND_NAME = "replay list"; const SIMPLE_SEARCH_VALUE_RE = /^[^\s:"]+$/; +function encodeReplayCursor( + serverCursor: string | undefined, + afterReplayId?: string +): string | undefined { + if (afterReplayId) { + return `${serverCursor ?? ""}|${afterReplayId}`; + } + return serverCursor; +} + +function decodeReplayCursor(cursor: string | undefined): { + serverCursor: string | undefined; + afterReplayId: string | undefined; +} { + if (!cursor) { + return { serverCursor: undefined, afterReplayId: undefined }; + } + + const pipeIndex = cursor.lastIndexOf("|"); + if (pipeIndex === -1) { + return { serverCursor: cursor, afterReplayId: undefined }; + } + + const serverCursor = cursor.slice(0, pipeIndex); + const afterReplayId = cursor.slice(pipeIndex + 1); + return { + serverCursor: serverCursor || undefined, + afterReplayId: afterReplayId || undefined, + }; +} + function parseLimit(value: string): number { return validateLimit(value, LIST_MIN_LIMIT, LIST_MAX_LIMIT); } @@ -265,6 +296,57 @@ type FetchReplayListOptions = { timeRange: TimeRange; }; +type FilteredPageResult = { + filled: boolean; + cursorToStore: string | undefined; +}; + +function replayStartIndex( + replays: ReplayListItem[], + afterReplayId: string | undefined +): number { + if (!afterReplayId) { + return 0; + } + + const afterIndex = replays.findIndex((replay) => replay.id === afterReplayId); + return afterIndex === -1 ? 0 : afterIndex + 1; +} + +function processFilteredReplayPage( + pageReplays: ReplayListItem[], + results: ReplayListItem[], + flags: ListFlags, + options: { + serverCursor: string | undefined; + afterReplayId: string | undefined; + nextCursor: string | undefined; + } +): FilteredPageResult { + const startIndex = replayStartIndex(pageReplays, options.afterReplayId); + + for (let index = startIndex; index < pageReplays.length; index += 1) { + const replay = pageReplays[index] as ReplayListItem; + if (!replayMatchesClientFilters(replay, flags)) { + continue; + } + + results.push(replay); + if (results.length >= flags.limit) { + let cursorToStore = encodeReplayCursor(options.serverCursor, replay.id); + if ( + cursorToStore === + encodeReplayCursor(options.serverCursor, pageReplays.at(-1)?.id) + ) { + cursorToStore = options.nextCursor; + } + return { filled: true, cursorToStore }; + } + } + + return { filled: false, cursorToStore: undefined }; +} + async function fetchReplayListForCommand( options: FetchReplayListOptions ): Promise<{ replays: ReplayListItem[]; nextCursor?: string }> { @@ -274,8 +356,10 @@ async function fetchReplayListForCommand( ? API_MAX_PER_PAGE : flags.limit; const replays: ReplayListItem[] = []; - let pageCursor = options.cursor; - let nextCursor: string | undefined; + const decodedCursor = decodeReplayCursor(options.cursor); + let pageCursor = decodedCursor.serverCursor; + let afterReplayId = decodedCursor.afterReplayId; + let cursorToStore: string | undefined; for (let page = 0; page < MAX_PAGINATION_PAGES; page += 1) { const pageResult = await listReplays(org, { @@ -289,24 +373,37 @@ async function fetchReplayListForCommand( ...timeRangeToApiParams(timeRange), }); - nextCursor = pageResult.nextCursor; - replays.push( - ...pageResult.data.filter((replay) => - replayMatchesClientFilters(replay, flags) - ) + if (!shouldFillClientFilteredLimit) { + return { + replays: pageResult.data.slice(0, flags.limit), + nextCursor: pageResult.nextCursor, + }; + } + + const processed = processFilteredReplayPage( + pageResult.data, + replays, + flags, + { + serverCursor: pageCursor, + afterReplayId, + nextCursor: pageResult.nextCursor, + } ); + afterReplayId = undefined; + + if (processed.filled) { + return { replays, nextCursor: processed.cursorToStore }; + } - if ( - !shouldFillClientFilteredLimit || - replays.length >= flags.limit || - !nextCursor - ) { + cursorToStore = pageResult.nextCursor; + if (!pageResult.nextCursor) { break; } - pageCursor = nextCursor; + pageCursor = pageResult.nextCursor; } - return { replays: replays.slice(0, flags.limit), nextCursor }; + return { replays, nextCursor: cursorToStore }; } const REPLAY_COLUMNS: Column[] = [ diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index 35f19ab74..321273d9e 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -371,6 +371,100 @@ describe("listCommand.func", () => { expect(parsed.nextCursor).toBe("0:200:0"); }); + test("stores a mid-page cursor when client filters fill before page end", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy.mockResolvedValueOnce({ + data: [ + { + ...sampleReplays[0]!, + id: "11111111111111111111111111111111", + urls: ["https://example.com/signup/one"], + }, + { + ...sampleReplays[0]!, + id: "22222222222222222222222222222222", + urls: ["https://example.com/signup/two"], + }, + ], + nextCursor: "0:100:0", + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + limit: 1, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].id).toBe("11111111111111111111111111111111"); + expect(parsed.nextCursor).toBe("|11111111111111111111111111111111"); + }); + + test("resumes from a mid-page cursor for client-filtered replays", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + resolveCursorSpy.mockReturnValueOnce({ + cursor: "|11111111111111111111111111111111", + direction: "next" as const, + }); + listReplaysSpy.mockResolvedValueOnce({ + data: [ + { + ...sampleReplays[0]!, + id: "11111111111111111111111111111111", + urls: ["https://example.com/signup/one"], + }, + { + ...sampleReplays[0]!, + id: "22222222222222222222222222222222", + urls: ["https://example.com/signup/two"], + }, + ], + nextCursor: "0:100:0", + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + cursor: "next", + limit: 1, + json: true, + path: "/signup", + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: "url:*/signup*", + sort: "-started_at", + cursor: undefined, + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + expect(parsed.data).toHaveLength(1); + expect(parsed.data[0].id).toBe("22222222222222222222222222222222"); + expect(parsed.nextCursor).toBe("0:100:0"); + }); + test("uses one server URL prefilter for positional path filters", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); From bcea1b628036125cb273c259a76c3e71072f6f59 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 13:07:01 -0700 Subject: [PATCH 11/13] fix(replay): Validate event window flags Reject --before and --after unless --around is also present. This prevents replay event list from silently ignoring window options and fails before fetching replay data. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/event/list.ts | 10 ++++++++-- test/commands/replay/event-list.test.ts | 23 +++++++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/src/commands/replay/event/list.ts b/src/commands/replay/event/list.ts index 8e9496e4a..bc1bf5976 100644 --- a/src/commands/replay/event/list.ts +++ b/src/commands/replay/event/list.ts @@ -128,6 +128,12 @@ function resolveWindow(flags: EventListFlags): { } if (flags.around === undefined) { + if (flags.before !== undefined || flags.after !== undefined) { + throw new ValidationError( + "--before and --after require --around", + flags.before !== undefined ? "before" : "after" + ); + } return { fromMs: flags.from, toMs: flags.to }; } @@ -350,6 +356,8 @@ export const listCommand = buildCommand({ async *func(this: SentryContext, flags: EventListFlags, ...args: string[]) { validateJsonlMode(flags); applyFreshFlag(flags); + const kinds = parseEventKinds(flags.kind); + const window = resolveWindow(flags); const parsedArgs = parseReplayTargetArgs(args, USAGE_HINT); const replayId = validateHexId(parsedArgs.replayId, "replay ID"); @@ -386,8 +394,6 @@ export const listCommand = buildCommand({ json: flags.json, }); - const kinds = parseEventKinds(flags.kind); - const window = resolveWindow(flags); const allEvents = extractNormalizedReplayEvents(replay, segments, { includeRaw: flags.raw, }); diff --git a/test/commands/replay/event-list.test.ts b/test/commands/replay/event-list.test.ts index 0c1264df1..07ed0ce48 100644 --- a/test/commands/replay/event-list.test.ts +++ b/test/commands/replay/event-list.test.ts @@ -173,4 +173,27 @@ describe("replay event list", () => { expect(JSON.parse(lines[0]!).kind).toBe("navigation"); expect(JSON.parse(lines[1]!).kind).toBe("click"); }); + + test("rejects before or after windows without around", async () => { + const { context } = createMockContext(); + const func = await listCommand.loader(); + + await expect( + func.call( + context, + { + before: 5000, + fresh: false, + json: true, + jsonl: false, + limit: 10, + raw: false, + }, + `test-org/cli/${REPLAY_ID}` + ) + ).rejects.toThrow("--before and --after require --around"); + + expect(getReplaySpy).not.toHaveBeenCalled(); + expect(getReplayRecordingSegmentsSpy).not.toHaveBeenCalled(); + }); }); From 8b9a386c1e0d9cbf2df6a4684264b8ae979f7e33 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Mon, 4 May 2026 13:34:43 -0700 Subject: [PATCH 12/13] fix(replay): Cap replay list page size Fetch multiple API pages when an unfiltered replay list asks for more than the API page size. Reuse mid-page cursor bookmarks so pagination can resume without duplicating rows. Refs GH-907 Co-Authored-By: OpenAI Codex --- src/commands/replay/list.ts | 38 +++++----- test/commands/replay/list.test.ts | 117 ++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+), 20 deletions(-) diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 856129d31..0f3039827 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -296,7 +296,7 @@ type FetchReplayListOptions = { timeRange: TimeRange; }; -type FilteredPageResult = { +type ReplayPageResult = { filled: boolean; cursorToStore: string | undefined; }; @@ -313,7 +313,7 @@ function replayStartIndex( return afterIndex === -1 ? 0 : afterIndex + 1; } -function processFilteredReplayPage( +function processReplayPage( pageReplays: ReplayListItem[], results: ReplayListItem[], flags: ListFlags, @@ -322,11 +322,10 @@ function processFilteredReplayPage( afterReplayId: string | undefined; nextCursor: string | undefined; } -): FilteredPageResult { +): ReplayPageResult { const startIndex = replayStartIndex(pageReplays, options.afterReplayId); - for (let index = startIndex; index < pageReplays.length; index += 1) { - const replay = pageReplays[index] as ReplayListItem; + for (const replay of pageReplays.slice(startIndex)) { if (!replayMatchesClientFilters(replay, flags)) { continue; } @@ -351,12 +350,16 @@ async function fetchReplayListForCommand( options: FetchReplayListOptions ): Promise<{ replays: ReplayListItem[]; nextCursor?: string }> { const { environment, flags, org, project, query, timeRange } = options; - const shouldFillClientFilteredLimit = hasClientSideFilters(flags); - const requestedServerLimit = shouldFillClientFilteredLimit + const decodedCursor = decodeReplayCursor(options.cursor); + const hasClientFilters = hasClientSideFilters(flags); + const needsFullPageScan = + hasClientFilters || Boolean(decodedCursor.afterReplayId); + const shouldFetchMultiplePages = + needsFullPageScan || flags.limit > API_MAX_PER_PAGE; + const requestedServerLimit = needsFullPageScan ? API_MAX_PER_PAGE - : flags.limit; + : Math.min(flags.limit, API_MAX_PER_PAGE); const replays: ReplayListItem[] = []; - const decodedCursor = decodeReplayCursor(options.cursor); let pageCursor = decodedCursor.serverCursor; let afterReplayId = decodedCursor.afterReplayId; let cursorToStore: string | undefined; @@ -373,23 +376,18 @@ async function fetchReplayListForCommand( ...timeRangeToApiParams(timeRange), }); - if (!shouldFillClientFilteredLimit) { + if (!shouldFetchMultiplePages) { return { replays: pageResult.data.slice(0, flags.limit), nextCursor: pageResult.nextCursor, }; } - const processed = processFilteredReplayPage( - pageResult.data, - replays, - flags, - { - serverCursor: pageCursor, - afterReplayId, - nextCursor: pageResult.nextCursor, - } - ); + const processed = processReplayPage(pageResult.data, replays, flags, { + serverCursor: pageCursor, + afterReplayId, + nextCursor: pageResult.nextCursor, + }); afterReplayId = undefined; if (processed.filled) { diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index 321273d9e..f0ce69ff9 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -63,6 +63,19 @@ describe("listCommand.func", () => { }, ]; + function replayWithIndex(index: number): ReplayListItem { + return { + ...sampleReplays[0]!, + id: index.toString(16).padStart(32, "0"), + }; + } + + function replayPage(start: number, count: number): ReplayListItem[] { + return Array.from({ length: count }, (_, offset) => + replayWithIndex(start + offset) + ); + } + function createMockContext() { const stdoutWrite = mock(() => true); return { @@ -465,6 +478,110 @@ describe("listCommand.func", () => { expect(parsed.nextCursor).toBe("0:100:0"); }); + test("caps unfiltered replay API page size when filling large limits", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + listReplaysSpy + .mockResolvedValueOnce({ + data: replayPage(0, apiClient.API_MAX_PER_PAGE), + nextCursor: "0:100:0", + }) + .mockResolvedValueOnce({ + data: replayPage(100, apiClient.API_MAX_PER_PAGE), + nextCursor: "0:200:0", + }) + .mockResolvedValueOnce({ + data: replayPage(200, apiClient.API_MAX_PER_PAGE), + nextCursor: "0:300:0", + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + limit: 250, + json: true, + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledTimes(3); + for (const [callIndex, cursor] of [ + undefined, + "0:100:0", + "0:200:0", + ].entries()) { + expect(listReplaysSpy).toHaveBeenNthCalledWith( + callIndex + 1, + "test-org", + { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: undefined, + sort: "-started_at", + cursor, + statsPeriod: "7d", + } + ); + } + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + const lastReplay = replayWithIndex(249); + expect(parsed.data).toHaveLength(250); + expect(parsed.data[249].id).toBe(lastReplay.id); + expect(parsed.nextCursor).toBe(`0:200:0|${lastReplay.id}`); + }); + + test("resumes from a mid-page cursor for unfiltered replays", async () => { + resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); + resolveCursorSpy.mockReturnValueOnce({ + cursor: `0:200:0|${replayWithIndex(249).id}`, + direction: "next" as const, + }); + listReplaysSpy.mockResolvedValueOnce({ + data: replayPage(200, apiClient.API_MAX_PER_PAGE), + nextCursor: "0:300:0", + }); + + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + cursor: "next", + limit: 25, + json: true, + period: parsePeriod("7d"), + sort: "-started_at", + }, + "test-org/cli" + ); + + expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { + environment: undefined, + fields: [...REPLAY_LIST_FIELDS], + limit: apiClient.API_MAX_PER_PAGE, + projectSlugs: ["cli"], + query: undefined, + sort: "-started_at", + cursor: "0:200:0", + statsPeriod: "7d", + }); + + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + const parsed = JSON.parse(output); + const lastReplay = replayWithIndex(274); + expect(parsed.data).toHaveLength(25); + expect(parsed.data[0].id).toBe(replayWithIndex(250).id); + expect(parsed.data[24].id).toBe(lastReplay.id); + expect(parsed.nextCursor).toBe(`0:200:0|${lastReplay.id}`); + }); + test("uses one server URL prefilter for positional path filters", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ data: sampleReplays }); From 17b8e28433c72e2de654309f0a03485a71296450 Mon Sep 17 00:00:00 2001 From: David Cramer Date: Wed, 6 May 2026 13:28:30 -0700 Subject: [PATCH 13/13] feat(replay): Simplify replay event MVP Reduce replay list and event commands to the reviewed MVP surface. Use shared search, pagination, and JSON output hooks instead of bespoke filters and JSONL flags. Co-Authored-By: OpenAI Codex --- docs/src/fragments/commands/replay.md | 15 +- plugins/sentry-cli/skills/sentry-cli/SKILL.md | 2 +- .../skills/sentry-cli/references/replay.md | 31 +- script/generate-docs-sections.ts | 8 +- src/commands/replay/event/list.ts | 218 +++------ src/commands/replay/index.ts | 4 +- src/commands/replay/list.ts | 372 +-------------- src/lib/command.ts | 2 +- src/lib/formatters/output.ts | 8 + test/commands/replay/event-list.test.ts | 93 ++-- test/commands/replay/list.test.ts | 448 +----------------- test/lib/command.test.ts | 24 + 12 files changed, 210 insertions(+), 1015 deletions(-) diff --git a/docs/src/fragments/commands/replay.md b/docs/src/fragments/commands/replay.md index e8f26adb8..00aa977be 100644 --- a/docs/src/fragments/commands/replay.md +++ b/docs/src/fragments/commands/replay.md @@ -8,16 +8,14 @@ sentry replay list my-org/frontend # Search across all projects in an org -sentry replay list my-org/ --query "environment:production" +sentry replay list my-org/ --search "environment:production" # Change the time window and sort sentry replay list my-org/frontend --period 24h --sort errors -# Find recent sessions that actually visited a route path -sentry replay list my-org/frontend --path /signup --json - -# Find recent sessions with indexed friction signals -sentry replay list my-org/frontend --path /signup --friction --json +# Find recent sessions with replay search syntax +sentry replay list my-org/frontend \ + --search "url:*signup* count_errors:>0" --json # Paginate through results sentry replay list my-org/frontend -c next @@ -62,12 +60,9 @@ sentry replay events my-org/346789a703f6454384f1de473b8b9fcc --json # Focus on user actions and failures on a page sentry replay events my-org/346789a703f6454384f1de473b8b9fcc \ - --path /signup --kind click,network,console,error --json + /signup --kind click,network,console,error --json # Pull an evidence window around a timestamp sentry replay events my-org/346789a703f6454384f1de473b8b9fcc \ --around 01:23 --json - -# Emit newline-delimited JSON for large timelines -sentry replay events my-org/346789a703f6454384f1de473b8b9fcc --json --jsonl ``` diff --git a/plugins/sentry-cli/skills/sentry-cli/SKILL.md b/plugins/sentry-cli/skills/sentry-cli/SKILL.md index dca697e39..6b1377b37 100644 --- a/plugins/sentry-cli/skills/sentry-cli/SKILL.md +++ b/plugins/sentry-cli/skills/sentry-cli/SKILL.md @@ -367,7 +367,7 @@ Manage Sentry dashboards Search and inspect Session Replays -- `sentry replay event list ` — List normalized events from a Session Replay +- `sentry replay event list ` — List normalized events from a Session Replay - `sentry replay list ` — List recent Session Replays - `sentry replay summarize ` — Summarize Session Replay behavior - `sentry replay view ` — View a Session Replay diff --git a/plugins/sentry-cli/skills/sentry-cli/references/replay.md b/plugins/sentry-cli/skills/sentry-cli/references/replay.md index 902a8650b..0705e83b7 100644 --- a/plugins/sentry-cli/skills/sentry-cli/references/replay.md +++ b/plugins/sentry-cli/skills/sentry-cli/references/replay.md @@ -11,24 +11,17 @@ requires: Search and inspect Session Replays -### `sentry replay event list ` +### `sentry replay event list ` List normalized events from a Session Replay **Flags:** - `-k, --kind ... - Event kind filter (navigation, click, tap, input, focus, blur, scroll, viewport, mutation, dom-snapshot, breadcrumb, network, console, error, span, web-vital, memory, video, mobile, unknown)` -- `-u, --url - Filter events by current or target URL substring` - `--path - Filter events by parsed URL pathname` -- `-q, --contains - Filter events by text in labels, messages, URLs, selectors, or data` -- `--selector - Filter events by selector substring` -- `--from - Start offset (seconds, 90s, 01:23, or 1:02:03)` -- `--to - End offset (seconds, 90s, 01:23, or 1:02:03)` -- `--around - Center an evidence window around this offset` -- `--before - Window before --around (default: 10s)` -- `--after - Window after --around (default: 30s)` +- `-q, --search - Filter events by text in labels, messages, URLs, selectors, or data` +- `--around - Show an evidence window around this replay offset` - `-n, --limit - Number of events (1-1000) - (default: "200")` - `--raw - Include raw source frame payloads in JSON output` -- `--jsonl - Emit one JSON object per event (requires --json)` - `-f, --fresh - Bypass cache, re-detect projects, and fetch fresh data` **JSON Fields** (use `--json --fields` to select specific fields): @@ -60,13 +53,7 @@ List recent Session Replays **Flags:** - `-n, --limit - Number of replays (1-1000) - (default: "25")` -- `-q, --query - Search query (Sentry replay search syntax)` -- `-u, --url - Filter by visited URL text using replay search` -- `--path - Filter by actual visited URL pathname` -- `--entry-path - Filter by first visited URL pathname` -- `--exit-path - Filter by last visited URL pathname` -- `--friction - Only show replays with indexed friction signals (errors, warnings, rage clicks, or dead clicks)` -- `--problem-only - Only show replays with indexed errors or warnings` +- `-q, --search - Search query (Sentry replay search syntax)` - `-e, --environment ... - Filter by environment (repeatable, comma-separated)` - `-s, --sort - Sort by: date, oldest, duration, errors, warnings, rage, dead, activity, or a raw replay sort field - (default: "date")` - `-t, --period - Time range: "7d", "2026-04-01..2026-05-01", ">=2026-04-01" - (default: "7d")` @@ -116,16 +103,14 @@ List recent Session Replays sentry replay list my-org/frontend # Search across all projects in an org -sentry replay list my-org/ --query "environment:production" +sentry replay list my-org/ --search "environment:production" # Change the time window and sort sentry replay list my-org/frontend --period 24h --sort errors -# Find recent sessions that actually visited a route path -sentry replay list my-org/frontend --path /signup --json - -# Find recent sessions with indexed friction signals -sentry replay list my-org/frontend --path /signup --friction --json +# Find recent sessions with replay search syntax +sentry replay list my-org/frontend \ + --search "url:*signup* count_errors:>0" --json # Paginate through results sentry replay list my-org/frontend -c next diff --git a/script/generate-docs-sections.ts b/script/generate-docs-sections.ts index bd7994878..16bd6a22b 100644 --- a/script/generate-docs-sections.ts +++ b/script/generate-docs-sections.ts @@ -132,15 +132,15 @@ function isStandaloneCommand(route: RouteInfo): boolean { */ function getSubcommandNames(route: RouteInfo): string[] { const prefix = `sentry ${route.name} `; - return [ - ...new Set( + return Array.from( + new Set( route.commands.map((cmd) => cmd.path.startsWith(prefix) ? cmd.path.slice(prefix.length) : (cmd.path.split(" ").at(-1) ?? route.name) ) - ), - ]; + ) + ); } /** diff --git a/src/commands/replay/event/list.ts b/src/commands/replay/event/list.ts index bc1bf5976..a57de6c30 100644 --- a/src/commands/replay/event/list.ts +++ b/src/commands/replay/event/list.ts @@ -13,7 +13,11 @@ import { formatTable, } from "../../../lib/formatters/index.js"; import { filterFields } from "../../../lib/formatters/json.js"; -import { CommandOutput } from "../../../lib/formatters/output.js"; +import { + CommandOutput, + formatFooter, + type HumanRenderer, +} from "../../../lib/formatters/output.js"; import type { Column } from "../../../lib/formatters/table.js"; import { formatDurationCompactMs } from "../../../lib/formatters/time-utils.js"; import { validateHexId } from "../../../lib/hex-id.js"; @@ -45,38 +49,20 @@ import { import { parseReplayTargetArgs } from "../target.js"; type EventListFlags = { - readonly after?: number; readonly around?: number; - readonly before?: number; - readonly contains?: string; readonly fields?: string[]; readonly fresh: boolean; - readonly from?: number; readonly json: boolean; - readonly jsonl: boolean; readonly kind?: readonly string[]; readonly limit: number; readonly path?: string; readonly raw: boolean; - readonly selector?: string; - readonly to?: number; - readonly url?: string; -}; - -type EventListResult = { - events: ReplayEvent[]; - total: number; - truncated: boolean; - replayId: string; - org: string; - project?: string; + readonly search?: string; }; -type ReplayEventOutput = EventListResult | ReplayEvent; - const COMMAND_NAME = "replay event list"; const USAGE_HINT = - "sentry replay event list [//] | "; + "sentry replay event list [//] [path] | [path]"; const DEFAULT_LIMIT = 200; const DEFAULT_BEFORE_MS = 10_000; const DEFAULT_AFTER_MS = 30_000; @@ -117,32 +103,32 @@ function resolveWindow(flags: EventListFlags): { fromMs?: number; toMs?: number; } { - if ( - flags.around !== undefined && - (flags.from !== undefined || flags.to !== undefined) - ) { - throw new ValidationError( - "--around cannot be combined with --from or --to", - "around" - ); + if (flags.around === undefined) { + return {}; } - if (flags.around === undefined) { - if (flags.before !== undefined || flags.after !== undefined) { + return { + fromMs: Math.max(0, flags.around - DEFAULT_BEFORE_MS), + toMs: flags.around + DEFAULT_AFTER_MS, + }; +} + +function splitTargetAndPathArgs( + args: string[], + flagPath: string | undefined +): { targetArgs: string[]; path?: string } { + const lastArg = args.at(-1); + if (args.length > 1 && lastArg?.startsWith("/")) { + if (flagPath) { throw new ValidationError( - "--before and --after require --around", - flags.before !== undefined ? "before" : "after" + "Path provided both positionally and with --path", + "path" ); } - return { fromMs: flags.from, toMs: flags.to }; + return { targetArgs: args.slice(0, -1), path: lastArg }; } - const before = flags.before ?? DEFAULT_BEFORE_MS; - const after = flags.after ?? DEFAULT_AFTER_MS; - return { - fromMs: Math.max(0, flags.around - before), - toMs: flags.around + after, - }; + return { targetArgs: args, path: flagPath }; } function eventLabel(event: ReplayEvent): string { @@ -187,50 +173,33 @@ const EVENT_COLUMNS: Column[] = [ }, ]; -function formatEventListHuman(result: EventListResult): string { - if (result.events.length === 0) { - return "No replay events matched the filters."; - } - - const scope = result.project - ? `${result.org}/${result.project}` - : `${result.org}`; - return ( - `Replay events for ${scope}/${result.replayId.slice(0, 8)}:\n\n` + - formatTable(result.events, EVENT_COLUMNS, { truncate: true }) - ); -} +function createEventListHumanRenderer(): HumanRenderer { + const events: ReplayEvent[] = []; + return { + render(event) { + events.push(event); + return ""; + }, + finalize(hint) { + if (events.length === 0) { + return `No replay events matched the filters.${hint ? formatFooter(hint) : "\n"}`; + } -function isEventListResult(data: ReplayEventOutput): data is EventListResult { - return "events" in data; + const replayId = events[0]?.replayId; + const title = replayId + ? `Replay events for ${replayId.slice(0, 8)}:` + : "Replay events:"; + const output = `${title}\n\n${formatTable(events, EVENT_COLUMNS, { truncate: true })}`; + return hint ? `${output}${formatFooter(hint)}` : `${output}\n`; + }, + }; } -function jsonTransformEventOutput( - data: ReplayEventOutput, +function jsonTransformReplayEvent( + event: ReplayEvent, fields?: string[] ): unknown { - if (!isEventListResult(data)) { - return fields && fields.length > 0 ? filterFields(data, fields) : data; - } - - const items = - fields && fields.length > 0 - ? data.events.map((event) => filterFields(event, fields)) - : data.events; - return { - data: items, - total: data.total, - truncated: data.truncated, - replayId: data.replayId, - org: data.org, - project: data.project, - }; -} - -function validateJsonlMode(flags: EventListFlags): void { - if (flags.jsonl && !flags.json) { - throw new ValidationError("--jsonl requires --json", "jsonl"); - } + return fields && fields.length > 0 ? filterFields(event, fields) : event; } export const listCommand = buildCommand({ @@ -243,24 +212,25 @@ export const listCommand = buildCommand({ " / - explicit organization\n" + " // - explicit org/project context\n" + " - parse org and replay ID from a Sentry URL\n\n" + + "Add a trailing /path argument to focus the timeline on one route.\n\n" + "Examples:\n" + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --json\n" + - " sentry replay event list sentry/cli/346789a703f6454384f1de473b8b9fcc --kind click,network,error\n" + - " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --path /signup --json\n" + - " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --around 01:23 --json\n" + - " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --json --jsonl", + " sentry replay events sentry/cli/346789a703f6454384f1de473b8b9fcc --kind click,network,error --json\n" + + ' sentry replay events sentry/346789a703f6454384f1de473b8b9fcc /signup -q "button[type=submit]" --json\n' + + " sentry replay events sentry/346789a703f6454384f1de473b8b9fcc --around 01:23 --json", }, output: { - human: formatEventListHuman, - jsonTransform: jsonTransformEventOutput, + human: createEventListHumanRenderer, + jsonTransform: jsonTransformReplayEvent, + jsonLines: true, schema: ReplayEventSchema, }, parameters: { positional: { kind: "array", parameter: { - placeholder: "replay-id-or-url", - brief: "[/] or ", + placeholder: "replay-target", + brief: "[/] [path] or [path]", parse: String, }, }, @@ -272,59 +242,23 @@ export const listCommand = buildCommand({ variadic: true, optional: true, }, - url: { - kind: "parsed", - parse: String, - brief: "Filter events by current or target URL substring", - optional: true, - }, path: { kind: "parsed", parse: String, brief: "Filter events by parsed URL pathname", optional: true, }, - contains: { + search: { kind: "parsed", parse: String, brief: "Filter events by text in labels, messages, URLs, selectors, or data", optional: true, }, - selector: { - kind: "parsed", - parse: String, - brief: "Filter events by selector substring", - optional: true, - }, - from: { - kind: "parsed", - parse: parseOffsetFlag, - brief: "Start offset (seconds, 90s, 01:23, or 1:02:03)", - optional: true, - }, - to: { - kind: "parsed", - parse: parseOffsetFlag, - brief: "End offset (seconds, 90s, 01:23, or 1:02:03)", - optional: true, - }, around: { kind: "parsed", parse: parseOffsetFlag, - brief: "Center an evidence window around this offset", - optional: true, - }, - before: { - kind: "parsed", - parse: parseOffsetFlag, - brief: "Window before --around (default: 10s)", - optional: true, - }, - after: { - kind: "parsed", - parse: parseOffsetFlag, - brief: "Window after --around (default: 30s)", + brief: "Show an evidence window around this replay offset", optional: true, }, limit: { @@ -338,28 +272,22 @@ export const listCommand = buildCommand({ brief: "Include raw source frame payloads in JSON output", default: false, }, - jsonl: { - kind: "boolean", - brief: "Emit one JSON object per event (requires --json)", - default: false, - }, fresh: FRESH_FLAG, }, aliases: { ...FRESH_ALIASES, k: "kind", n: "limit", - q: "contains", - u: "url", + q: "search", }, }, async *func(this: SentryContext, flags: EventListFlags, ...args: string[]) { - validateJsonlMode(flags); applyFreshFlag(flags); const kinds = parseEventKinds(flags.kind); const window = resolveWindow(flags); + const { path, targetArgs } = splitTargetAndPathArgs(args, flags.path); - const parsedArgs = parseReplayTargetArgs(args, USAGE_HINT); + const parsedArgs = parseReplayTargetArgs(targetArgs, USAGE_HINT); const replayId = validateHexId(parsedArgs.replayId, "replay ID"); const resolved = await resolveOrgOptionalProjectFromArg( parsedArgs.targetArg, @@ -399,31 +327,17 @@ export const listCommand = buildCommand({ }); const filtered = filterNormalizedReplayEvents(allEvents, { kinds, - url: flags.url, - path: flags.path, - contains: flags.contains, - selector: flags.selector, + path, + contains: flags.search, ...window, }); const events = filtered.slice(0, flags.limit); const truncated = filtered.length > events.length; - if (flags.jsonl) { - for (const event of events) { - yield new CommandOutput(event); - } - return; + for (const event of events) { + yield new CommandOutput(event); } - yield new CommandOutput({ - events, - total: filtered.length, - truncated, - replayId, - org: resolved.org, - project: resolved.project, - }); - const countText = `Showing ${events.length} of ${filtered.length} replay event${filtered.length === 1 ? "" : "s"}.`; const truncationHint = truncated ? ` Increase --limit or narrow filters to inspect the remaining ${filtered.length - events.length}.` diff --git a/src/commands/replay/index.ts b/src/commands/replay/index.ts index 2953a640b..9748c92d9 100644 --- a/src/commands/replay/index.ts +++ b/src/commands/replay/index.ts @@ -24,8 +24,8 @@ export const replayRoute = buildRouteMap({ fullDescription: "Search and inspect Session Replays from your Sentry organization.\n\n" + "Commands:\n" + - " event Inspect normalized events from a replay (alias: events)\n" + - " list List recent replays in an org or project\n" + + " list Search replay sessions in an org or project\n" + + " event Expand one replay into a normalized event timeline (alias: events)\n" + " summarize Summarize replay behavior and friction signals\n" + " view View details of a specific replay\n\n" + "Alias: `sentry replays` → `sentry replay list`", diff --git a/src/commands/replay/list.ts b/src/commands/replay/list.ts index 0f3039827..f0642f1c4 100644 --- a/src/commands/replay/list.ts +++ b/src/commands/replay/list.ts @@ -5,9 +5,7 @@ */ import type { SentryContext } from "../../context.js"; -import { MAX_PAGINATION_PAGES } from "../../lib/api/infrastructure.js"; import { - API_MAX_PER_PAGE, isReplaySortValue, listReplays, type ReplaySortValue, @@ -44,7 +42,6 @@ import { withProgress } from "../../lib/polling.js"; import { getReplayUserLabel, parseReplayEnvironmentFilter, - replayMatchesPath, } from "../../lib/replay-search.js"; import { resolveOrgOptionalProjectFromArg } from "../../lib/resolve-target.js"; import { sanitizeQuery } from "../../lib/search-query.js"; @@ -63,19 +60,13 @@ import { type ListFlags = { readonly environment?: readonly string[]; readonly limit: number; - readonly "problem-only": boolean; - readonly friction: boolean; - readonly "entry-path"?: string; - readonly "exit-path"?: string; - readonly path?: string; - readonly query?: string; + readonly search?: string; readonly sort: ReplaySortValue; readonly period: TimeRange; readonly json: boolean; readonly cursor?: string; readonly fresh: boolean; readonly fields?: string[]; - readonly url?: string; }; type ReplayListResult = { @@ -97,20 +88,6 @@ type ReplaySortKey = | "rage" | "warnings"; -type ReplayListHintFlags = Pick< - ListFlags, - | "entry-path" - | "environment" - | "exit-path" - | "friction" - | "path" - | "problem-only" - | "query" - | "sort" - | "period" - | "url" ->; - const SORT_MAP: Record = { activity: "-activity", date: "-started_at", @@ -126,38 +103,6 @@ const DEFAULT_PERIOD = LIST_PERIOD_FLAG.default; const DEFAULT_SORT: ReplaySortValue = SORT_MAP.date; const PAGINATION_KEY = "replay-list"; const COMMAND_NAME = "replay list"; -const SIMPLE_SEARCH_VALUE_RE = /^[^\s:"]+$/; - -function encodeReplayCursor( - serverCursor: string | undefined, - afterReplayId?: string -): string | undefined { - if (afterReplayId) { - return `${serverCursor ?? ""}|${afterReplayId}`; - } - return serverCursor; -} - -function decodeReplayCursor(cursor: string | undefined): { - serverCursor: string | undefined; - afterReplayId: string | undefined; -} { - if (!cursor) { - return { serverCursor: undefined, afterReplayId: undefined }; - } - - const pipeIndex = cursor.lastIndexOf("|"); - if (pipeIndex === -1) { - return { serverCursor: cursor, afterReplayId: undefined }; - } - - const serverCursor = cursor.slice(0, pipeIndex); - const afterReplayId = cursor.slice(pipeIndex + 1); - return { - serverCursor: serverCursor || undefined, - afterReplayId: afterReplayId || undefined, - }; -} function parseLimit(value: string): number { return validateLimit(value, LIST_MIN_LIMIT, LIST_MAX_LIMIT); @@ -191,219 +136,6 @@ function replayUserLabel(replay: ReplayListItem): string { return getReplayUserLabel(replay) ?? "—"; } -function quoteSearchValue(value: string): string { - return SIMPLE_SEARCH_VALUE_RE.test(value) ? value : JSON.stringify(value); -} - -function wildcardSearchValue(value: string): string { - const trimmed = value.trim(); - if (trimmed.includes("*")) { - return trimmed; - } - return `*${trimmed}*`; -} - -function buildReplaySearchQuery(filters: { - query?: string; - url?: string; - path?: string; - entryPath?: string; - exitPath?: string; -}): string | undefined { - const { entryPath, exitPath, path, query, url } = filters; - // Replay search only has a generic visited-URL field. Use at most one broad - // server prefilter, then apply path and position checks against the URL list. - const routePathPrefilter = url ? undefined : (path ?? entryPath ?? exitPath); - const parts = [ - query, - url ? `url:${quoteSearchValue(wildcardSearchValue(url))}` : undefined, - routePathPrefilter - ? `url:${quoteSearchValue(wildcardSearchValue(routePathPrefilter))}` - : undefined, - ].filter((part): part is string => Boolean(part)); - return parts.length > 0 ? parts.join(" ") : undefined; -} - -function hasErrorOrWarningSignals(replay: ReplayListItem): boolean { - return ( - (replay.count_errors ?? 0) > 0 || - (replay.count_warnings ?? 0) > 0 || - replay.error_ids.length > 0 || - replay.warning_ids.length > 0 - ); -} - -function hasFrictionSignals(replay: ReplayListItem): boolean { - return ( - hasErrorOrWarningSignals(replay) || - (replay.count_rage_clicks ?? 0) > 0 || - (replay.count_dead_clicks ?? 0) > 0 - ); -} - -function replayMatchesRouteFilters( - replay: ReplayListItem, - flags: ListFlags -): boolean { - if (flags.path && !replayMatchesPath(replay, flags.path)) { - return false; - } - if ( - flags["entry-path"] && - !replayMatchesPath(replay, flags["entry-path"], "entry") - ) { - return false; - } - if ( - flags["exit-path"] && - !replayMatchesPath(replay, flags["exit-path"], "exit") - ) { - return false; - } - return true; -} - -function hasClientSideFilters(flags: ListFlags): boolean { - return Boolean( - flags.path || - flags["entry-path"] || - flags["exit-path"] || - flags.friction || - flags["problem-only"] - ); -} - -function replayMatchesClientFilters( - replay: ReplayListItem, - flags: ListFlags -): boolean { - if (!replayMatchesRouteFilters(replay, flags)) { - return false; - } - if (flags["problem-only"]) { - return hasErrorOrWarningSignals(replay); - } - return flags.friction ? hasFrictionSignals(replay) : true; -} - -type FetchReplayListOptions = { - cursor?: string; - environment?: string[]; - flags: ListFlags; - org: string; - project?: string; - query?: string; - timeRange: TimeRange; -}; - -type ReplayPageResult = { - filled: boolean; - cursorToStore: string | undefined; -}; - -function replayStartIndex( - replays: ReplayListItem[], - afterReplayId: string | undefined -): number { - if (!afterReplayId) { - return 0; - } - - const afterIndex = replays.findIndex((replay) => replay.id === afterReplayId); - return afterIndex === -1 ? 0 : afterIndex + 1; -} - -function processReplayPage( - pageReplays: ReplayListItem[], - results: ReplayListItem[], - flags: ListFlags, - options: { - serverCursor: string | undefined; - afterReplayId: string | undefined; - nextCursor: string | undefined; - } -): ReplayPageResult { - const startIndex = replayStartIndex(pageReplays, options.afterReplayId); - - for (const replay of pageReplays.slice(startIndex)) { - if (!replayMatchesClientFilters(replay, flags)) { - continue; - } - - results.push(replay); - if (results.length >= flags.limit) { - let cursorToStore = encodeReplayCursor(options.serverCursor, replay.id); - if ( - cursorToStore === - encodeReplayCursor(options.serverCursor, pageReplays.at(-1)?.id) - ) { - cursorToStore = options.nextCursor; - } - return { filled: true, cursorToStore }; - } - } - - return { filled: false, cursorToStore: undefined }; -} - -async function fetchReplayListForCommand( - options: FetchReplayListOptions -): Promise<{ replays: ReplayListItem[]; nextCursor?: string }> { - const { environment, flags, org, project, query, timeRange } = options; - const decodedCursor = decodeReplayCursor(options.cursor); - const hasClientFilters = hasClientSideFilters(flags); - const needsFullPageScan = - hasClientFilters || Boolean(decodedCursor.afterReplayId); - const shouldFetchMultiplePages = - needsFullPageScan || flags.limit > API_MAX_PER_PAGE; - const requestedServerLimit = needsFullPageScan - ? API_MAX_PER_PAGE - : Math.min(flags.limit, API_MAX_PER_PAGE); - const replays: ReplayListItem[] = []; - let pageCursor = decodedCursor.serverCursor; - let afterReplayId = decodedCursor.afterReplayId; - let cursorToStore: string | undefined; - - for (let page = 0; page < MAX_PAGINATION_PAGES; page += 1) { - const pageResult = await listReplays(org, { - environment, - fields: [...REPLAY_LIST_FIELDS], - limit: requestedServerLimit, - query, - projectSlugs: project ? [project] : undefined, - sort: flags.sort, - cursor: pageCursor, - ...timeRangeToApiParams(timeRange), - }); - - if (!shouldFetchMultiplePages) { - return { - replays: pageResult.data.slice(0, flags.limit), - nextCursor: pageResult.nextCursor, - }; - } - - const processed = processReplayPage(pageResult.data, replays, flags, { - serverCursor: pageCursor, - afterReplayId, - nextCursor: pageResult.nextCursor, - }); - afterReplayId = undefined; - - if (processed.filled) { - return { replays, nextCursor: processed.cursorToStore }; - } - - cursorToStore = pageResult.nextCursor; - if (!pageResult.nextCursor) { - break; - } - pageCursor = pageResult.nextCursor; - } - - return { replays, nextCursor: cursorToStore }; -} - const REPLAY_COLUMNS: Column[] = [ { header: "ID", @@ -453,28 +185,13 @@ function formatScope(org: string, project?: string): string { return project ? `${org}/${project}` : `${org}/`; } -function appendReplayFlags(base: string, flags: ReplayListHintFlags): string { +function appendReplayFlags( + base: string, + flags: Pick +): string { const parts: string[] = []; - appendQueryHint(parts, flags.query); + appendQueryHint(parts, flags.search); appendSortHint(parts, flags.sort, DEFAULT_SORT); - if (flags.url) { - parts.push(`--url "${flags.url}"`); - } - if (flags.path) { - parts.push(`--path "${flags.path}"`); - } - if (flags["entry-path"]) { - parts.push(`--entry-path "${flags["entry-path"]}"`); - } - if (flags["exit-path"]) { - parts.push(`--exit-path "${flags["exit-path"]}"`); - } - if (flags.friction) { - parts.push("--friction"); - } - if (flags["problem-only"]) { - parts.push("--problem-only"); - } if (flags.environment && flags.environment.length > 0) { for (const environment of flags.environment) { parts.push(`-e "${environment}"`); @@ -487,7 +204,7 @@ function appendReplayFlags(base: string, flags: ReplayListHintFlags): string { function nextPageHint( org: string, project: string | undefined, - flags: ReplayListHintFlags + flags: Pick ): string { return appendReplayFlags( `sentry replay list ${formatScope(org, project)} -c next`, @@ -498,7 +215,7 @@ function nextPageHint( function prevPageHint( org: string, project: string | undefined, - flags: ReplayListHintFlags + flags: Pick ): string { return appendReplayFlags( `sentry replay list ${formatScope(org, project)} -c prev`, @@ -556,7 +273,6 @@ export const listCommand = buildListCommand("replay", { " sentry replay list sentry/\n" + " sentry replay list sentry/cli --limit 50\n" + " sentry replay list sentry/cli --sort duration\n" + - " sentry replay list sentry/cli --path /signup --friction\n" + ' sentry replay list sentry/cli -q "user.email:foo@example.com"\n' + " sentry replay list sentry/cli -e production -e canary\n" + " sentry replay list sentry/cli --period 24h\n\n" + @@ -586,47 +302,12 @@ export const listCommand = buildListCommand("replay", { brief: `Number of replays (${LIST_MIN_LIMIT}-${LIST_MAX_LIMIT})`, default: String(LIST_DEFAULT_LIMIT), }, - query: { + search: { kind: "parsed", parse: sanitizeQuery, brief: "Search query (Sentry replay search syntax)", optional: true, }, - url: { - kind: "parsed", - parse: String, - brief: "Filter by visited URL text using replay search", - optional: true, - }, - path: { - kind: "parsed", - parse: String, - brief: "Filter by actual visited URL pathname", - optional: true, - }, - "entry-path": { - kind: "parsed", - parse: String, - brief: "Filter by first visited URL pathname", - optional: true, - }, - "exit-path": { - kind: "parsed", - parse: String, - brief: "Filter by last visited URL pathname", - optional: true, - }, - friction: { - kind: "boolean", - brief: - "Only show replays with indexed friction signals (errors, warnings, rage clicks, or dead clicks)", - default: false, - }, - "problem-only": { - kind: "boolean", - brief: "Only show replays with indexed errors or warnings", - default: false, - }, environment: { kind: "parsed", parse: String, @@ -647,22 +328,15 @@ export const listCommand = buildListCommand("replay", { ...PERIOD_ALIASES, e: "environment", n: "limit", - q: "query", + q: "search", s: "sort", - u: "url", }, }, async *func(this: SentryContext, flags: ListFlags, target?: string) { const { cwd } = this; const timeRange = flags.period; const environment = parseReplayEnvironmentFilter(flags.environment); - const query = buildReplaySearchQuery({ - query: flags.query, - url: flags.url, - path: flags.path, - entryPath: flags["entry-path"], - exitPath: flags["exit-path"], - }); + const { search } = flags; const resolved = await resolveOrgOptionalProjectFromArg( target, @@ -675,13 +349,8 @@ export const listCommand = buildListCommand("replay", { formatScope(resolved.org, resolved.project), { env: environment?.join(","), - entryPath: flags["entry-path"], - exitPath: flags["exit-path"], - friction: flags.friction ? "1" : undefined, - path: flags.path, - problem: flags["problem-only"] ? "1" : undefined, sort: flags.sort, - q: query, + q: search, period: serializeTimeRange(timeRange), } ); @@ -691,20 +360,21 @@ export const listCommand = buildListCommand("replay", { contextKey ); - const { replays, nextCursor } = await withProgress( + const { data: replays, nextCursor } = await withProgress( { message: `Fetching replays (up to ${flags.limit})...`, json: flags.json, }, () => - fetchReplayListForCommand({ - cursor, + listReplays(resolved.org, { environment, - flags, - org: resolved.org, - project: resolved.project, - query, - timeRange, + fields: [...REPLAY_LIST_FIELDS], + limit: flags.limit, + query: search, + projectSlugs: resolved.project ? [resolved.project] : undefined, + sort: flags.sort, + cursor, + ...timeRangeToApiParams(timeRange), }) ); diff --git a/src/lib/command.ts b/src/lib/command.ts index afac9c9f8..eb46353ea 100644 --- a/src/lib/command.ts +++ b/src/lib/command.ts @@ -556,7 +556,7 @@ export function buildCommand< renderCommandOutput(stdout, value.data, outputConfig, renderer, { json: Boolean(flags.json), fields: flags.fields as string[] | undefined, - jsonCompact: Boolean(flags.jsonl), + jsonCompact: outputConfig.jsonLines, clearPrefix: pendingClear ? "\x1b[H\x1b[J" : undefined, }); pendingClear = false; diff --git a/src/lib/formatters/output.ts b/src/lib/formatters/output.ts index 2b4595090..205f71ac5 100644 --- a/src/lib/formatters/output.ts +++ b/src/lib/formatters/output.ts @@ -145,6 +145,14 @@ export type OutputConfig = { * - `generate-skill.ts`: SKILL.md field tables for AI agents */ schema?: ZodType; + /** + * Emit compact one-line JSON for each yielded value. + * + * Use this for commands that intentionally yield a stream of records in + * `--json` mode so the output is newline-delimited JSON without a separate + * command-specific flag. + */ + jsonLines?: boolean; }; /** diff --git a/test/commands/replay/event-list.test.ts b/test/commands/replay/event-list.test.ts index 07ed0ce48..9f48931b3 100644 --- a/test/commands/replay/event-list.test.ts +++ b/test/commands/replay/event-list.test.ts @@ -115,7 +115,7 @@ describe("replay event list", () => { resolveTargetSpy.mockRestore(); }); - test("renders filtered JSON event envelope", async () => { + test("streams filtered JSON events with --json", async () => { const { context, stdoutWrite } = createMockContext(); const func = await listCommand.loader(); await func.call( @@ -123,11 +123,9 @@ describe("replay event list", () => { { fresh: false, json: true, - jsonl: false, kind: ["click,network"], limit: 10, raw: false, - url: "/signup", }, `test-org/cli/${REPLAY_ID}` ); @@ -139,17 +137,20 @@ describe("replay event list", () => { { expectedSegments: 1 } ); - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(2); - expect(parsed.data[0].kind).toBe("click"); - expect(parsed.data[0].selector).toBe("button[type=submit]"); - expect(parsed.data[1].kind).toBe("network"); - expect(parsed.total).toBe(2); - expect(parsed.truncated).toBe(false); + const lines = stdoutWrite.mock.calls + .map((call) => call[0]) + .join("") + .trim() + .split("\n"); + expect(lines).toHaveLength(2); + const first = JSON.parse(lines[0]!); + const second = JSON.parse(lines[1]!); + expect(first.kind).toBe("click"); + expect(first.selector).toBe("button[type=submit]"); + expect(second.kind).toBe("network"); }); - test("emits JSONL when requested", async () => { + test("uses -q search text for normalized event fields", async () => { const { context, stdoutWrite } = createMockContext(); const func = await listCommand.loader(); await func.call( @@ -157,9 +158,9 @@ describe("replay event list", () => { { fresh: false, json: true, - jsonl: true, - limit: 2, + limit: 10, raw: false, + search: "button[type=submit]", }, `test-org/${REPLAY_ID}` ); @@ -169,31 +170,53 @@ describe("replay event list", () => { .join("") .trim() .split("\n"); - expect(lines).toHaveLength(2); - expect(JSON.parse(lines[0]!).kind).toBe("navigation"); - expect(JSON.parse(lines[1]!).kind).toBe("click"); + expect(lines).toHaveLength(1); + expect(JSON.parse(lines[0]!).kind).toBe("click"); }); - test("rejects before or after windows without around", async () => { - const { context } = createMockContext(); + test("accepts a trailing positional path filter", async () => { + const { context, stdoutWrite } = createMockContext(); const func = await listCommand.loader(); + await func.call( + context, + { + fresh: false, + json: true, + kind: ["click,network"], + limit: 10, + raw: false, + }, + `test-org/${REPLAY_ID}`, + "/signup" + ); - await expect( - func.call( - context, - { - before: 5000, - fresh: false, - json: true, - jsonl: false, - limit: 10, - raw: false, - }, - `test-org/cli/${REPLAY_ID}` - ) - ).rejects.toThrow("--before and --after require --around"); + const lines = stdoutWrite.mock.calls + .map((call) => call[0]) + .join("") + .trim() + .split("\n"); + expect(lines).toHaveLength(1); + expect(JSON.parse(lines[0]!).kind).toBe("click"); + }); + + test("renders a human table from streamed events", async () => { + const { context, stdoutWrite } = createMockContext(); + const func = await listCommand.loader(); + await func.call( + context, + { + fresh: false, + json: false, + kind: ["click"], + limit: 10, + raw: false, + }, + `test-org/${REPLAY_ID}` + ); - expect(getReplaySpy).not.toHaveBeenCalled(); - expect(getReplayRecordingSegmentsSpy).not.toHaveBeenCalled(); + const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); + expect(output).toContain("Replay events for 346789a7:"); + expect(output).toContain("Sign up"); + expect(output).toContain("Showing 1 of 1 replay event."); }); }); diff --git a/test/commands/replay/list.test.ts b/test/commands/replay/list.test.ts index f0ce69ff9..72a6a96ad 100644 --- a/test/commands/replay/list.test.ts +++ b/test/commands/replay/list.test.ts @@ -30,6 +30,7 @@ describe("parseSort", () => { expect(parseSort("date")).toBe("-started_at"); expect(parseSort("duration")).toBe("-duration"); expect(parseSort("errors")).toBe("-count_errors"); + expect(parseSort("rage")).toBe("-count_rage_clicks"); expect(parseSort("-count_rage_clicks")).toBe("-count_rage_clicks"); }); @@ -63,19 +64,6 @@ describe("listCommand.func", () => { }, ]; - function replayWithIndex(index: number): ReplayListItem { - return { - ...sampleReplays[0]!, - id: index.toString(16).padStart(32, "0"), - }; - } - - function replayPage(start: number, count: number): ReplayListItem[] { - return Array.from({ length: count }, (_, offset) => - replayWithIndex(start + offset) - ); - } - function createMockContext() { const stdoutWrite = mock(() => true); return { @@ -179,22 +167,10 @@ describe("listCommand.func", () => { }); }); - test("combines URL sugar with query and filters friction rows", async () => { + test("passes one search query through Sentry search syntax", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ - data: [ - sampleReplays[0]!, - { - ...sampleReplays[0]!, - id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - count_errors: 0, - count_dead_clicks: 0, - count_rage_clicks: 0, - count_warnings: 0, - error_ids: [], - warning_ids: [], - }, - ], + data: sampleReplays, nextCursor: undefined, }); @@ -203,13 +179,11 @@ describe("listCommand.func", () => { await func.call( context, { - friction: true, limit: 25, json: true, period: parsePeriod("7d"), - query: "environment:production", + search: "environment:production url:*signup* count_errors:>0", sort: "-started_at", - url: "/signup", }, "test-org/cli" ); @@ -217,9 +191,9 @@ describe("listCommand.func", () => { expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { environment: undefined, fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, + limit: 25, projectSlugs: ["cli"], - query: "environment:production url:*/signup*", + query: "environment:production url:*signup* count_errors:>0", sort: "-started_at", cursor: undefined, statsPeriod: "7d", @@ -231,428 +205,30 @@ describe("listCommand.func", () => { expect(parsed.data[0].id).toBe(sampleReplays[0]?.id); }); - test("problem-only filters to errors and warnings, not click friction", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy.mockResolvedValue({ - data: [ - sampleReplays[0]!, - { - ...sampleReplays[0]!, - id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - count_errors: 0, - count_dead_clicks: 3, - count_rage_clicks: 0, - count_warnings: 0, - error_ids: [], - warning_ids: [], - }, - ], - nextCursor: undefined, - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - "problem-only": true, - limit: 25, - json: true, - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); - expect(parsed.data[0].id).toBe(sampleReplays[0]?.id); - }); - - test("filters --path by actual replay URL pathname", async () => { + test("passes large limits to the API layer for auto-pagination", async () => { resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); listReplaysSpy.mockResolvedValue({ - data: [ - { - ...sampleReplays[0]!, - urls: ["https://example.com/signup/direct"], - }, - { - ...sampleReplays[0]!, - id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - urls: ["https://example.com/replays/?query=/signup"], - }, - ], + data: sampleReplays, nextCursor: undefined, }); - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - limit: 25, - json: true, - path: "/signup", - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*/signup*", - sort: "-started_at", - cursor: undefined, - statsPeriod: "7d", - }); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); - expect(parsed.data[0].urls[0]).toBe("https://example.com/signup/direct"); - }); - - test("fills the requested limit across client-filtered replay pages", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy - .mockResolvedValueOnce({ - data: [ - { - ...sampleReplays[0]!, - urls: ["https://example.com/replays/?query=/signup"], - }, - ], - nextCursor: "0:100:0", - }) - .mockResolvedValueOnce({ - data: [ - { - ...sampleReplays[0]!, - id: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - urls: ["https://example.com/signup/direct"], - }, - ], - nextCursor: "0:200:0", - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - limit: 1, - json: true, - path: "/signup", - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - expect(listReplaysSpy).toHaveBeenCalledTimes(2); - expect(listReplaysSpy).toHaveBeenNthCalledWith(1, "test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*/signup*", - sort: "-started_at", - cursor: undefined, - statsPeriod: "7d", - }); - expect(listReplaysSpy).toHaveBeenNthCalledWith(2, "test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*/signup*", - sort: "-started_at", - cursor: "0:100:0", - statsPeriod: "7d", - }); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); - expect(parsed.data[0].id).toBe("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); - expect(parsed.nextCursor).toBe("0:200:0"); - }); - - test("stores a mid-page cursor when client filters fill before page end", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy.mockResolvedValueOnce({ - data: [ - { - ...sampleReplays[0]!, - id: "11111111111111111111111111111111", - urls: ["https://example.com/signup/one"], - }, - { - ...sampleReplays[0]!, - id: "22222222222222222222222222222222", - urls: ["https://example.com/signup/two"], - }, - ], - nextCursor: "0:100:0", - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - limit: 1, - json: true, - path: "/signup", - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); - expect(parsed.data[0].id).toBe("11111111111111111111111111111111"); - expect(parsed.nextCursor).toBe("|11111111111111111111111111111111"); - }); - - test("resumes from a mid-page cursor for client-filtered replays", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - resolveCursorSpy.mockReturnValueOnce({ - cursor: "|11111111111111111111111111111111", - direction: "next" as const, - }); - listReplaysSpy.mockResolvedValueOnce({ - data: [ - { - ...sampleReplays[0]!, - id: "11111111111111111111111111111111", - urls: ["https://example.com/signup/one"], - }, - { - ...sampleReplays[0]!, - id: "22222222222222222222222222222222", - urls: ["https://example.com/signup/two"], - }, - ], - nextCursor: "0:100:0", - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - cursor: "next", - limit: 1, - json: true, - path: "/signup", - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*/signup*", - sort: "-started_at", - cursor: undefined, - statsPeriod: "7d", - }); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); - expect(parsed.data[0].id).toBe("22222222222222222222222222222222"); - expect(parsed.nextCursor).toBe("0:100:0"); - }); - - test("caps unfiltered replay API page size when filling large limits", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy - .mockResolvedValueOnce({ - data: replayPage(0, apiClient.API_MAX_PER_PAGE), - nextCursor: "0:100:0", - }) - .mockResolvedValueOnce({ - data: replayPage(100, apiClient.API_MAX_PER_PAGE), - nextCursor: "0:200:0", - }) - .mockResolvedValueOnce({ - data: replayPage(200, apiClient.API_MAX_PER_PAGE), - nextCursor: "0:300:0", - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - limit: 250, - json: true, - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - expect(listReplaysSpy).toHaveBeenCalledTimes(3); - for (const [callIndex, cursor] of [ - undefined, - "0:100:0", - "0:200:0", - ].entries()) { - expect(listReplaysSpy).toHaveBeenNthCalledWith( - callIndex + 1, - "test-org", - { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: undefined, - sort: "-started_at", - cursor, - statsPeriod: "7d", - } - ); - } - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - const lastReplay = replayWithIndex(249); - expect(parsed.data).toHaveLength(250); - expect(parsed.data[249].id).toBe(lastReplay.id); - expect(parsed.nextCursor).toBe(`0:200:0|${lastReplay.id}`); - }); - - test("resumes from a mid-page cursor for unfiltered replays", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - resolveCursorSpy.mockReturnValueOnce({ - cursor: `0:200:0|${replayWithIndex(249).id}`, - direction: "next" as const, - }); - listReplaysSpy.mockResolvedValueOnce({ - data: replayPage(200, apiClient.API_MAX_PER_PAGE), - nextCursor: "0:300:0", - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - cursor: "next", - limit: 25, - json: true, - period: parsePeriod("7d"), - sort: "-started_at", - }, - "test-org/cli" - ); - - expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: undefined, - sort: "-started_at", - cursor: "0:200:0", - statsPeriod: "7d", - }); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - const lastReplay = replayWithIndex(274); - expect(parsed.data).toHaveLength(25); - expect(parsed.data[0].id).toBe(replayWithIndex(250).id); - expect(parsed.data[24].id).toBe(lastReplay.id); - expect(parsed.nextCursor).toBe(`0:200:0|${lastReplay.id}`); - }); - - test("uses one server URL prefilter for positional path filters", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy.mockResolvedValue({ data: sampleReplays }); - const { context } = createMockContext(); const func = await listCommand.loader(); await func.call( context, { - "entry-path": "/home", - limit: 25, + limit: 250, json: true, - path: "/signup", period: parsePeriod("7d"), sort: "-started_at", }, "test-org/cli" ); - expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*/signup*", - sort: "-started_at", - cursor: undefined, - statsPeriod: "7d", - }); - }); - - test("does not duplicate server URL filters when --url and --path combine", async () => { - resolveTargetSpy.mockResolvedValue({ org: "test-org", project: "cli" }); - listReplaysSpy.mockResolvedValue({ - data: [ - { - ...sampleReplays[0]!, - urls: ["https://example.com/signup/direct"], - }, - ], - }); - - const { context, stdoutWrite } = createMockContext(); - const func = await listCommand.loader(); - await func.call( - context, - { - limit: 25, - json: true, - path: "/signup", - period: parsePeriod("7d"), - sort: "-started_at", - url: "example.com", - }, - "test-org/cli" + expect(listReplaysSpy).toHaveBeenCalledWith( + "test-org", + expect.objectContaining({ limit: 250 }) ); - - expect(listReplaysSpy).toHaveBeenCalledWith("test-org", { - environment: undefined, - fields: [...REPLAY_LIST_FIELDS], - limit: apiClient.API_MAX_PER_PAGE, - projectSlugs: ["cli"], - query: "url:*example.com*", - sort: "-started_at", - cursor: undefined, - statsPeriod: "7d", - }); - - const output = stdoutWrite.mock.calls.map((call) => call[0]).join(""); - const parsed = JSON.parse(output); - expect(parsed.data).toHaveLength(1); }); test("renders human output with a replay hint", async () => { diff --git a/test/lib/command.test.ts b/test/lib/command.test.ts index 0873e3a01..dfb00ae99 100644 --- a/test/lib/command.test.ts +++ b/test/lib/command.test.ts @@ -872,6 +872,30 @@ describe("buildCommand output config", () => { expect(receivedFlags!.fields).toBeUndefined(); }); + test("jsonLines emits compact JSON records without a command-specific flag", async () => { + const command = buildCommand<{ json: boolean }, [], TestContext>({ + auth: false, + docs: { brief: "Test" }, + output: { human: () => "unused", jsonLines: true }, + parameters: {}, + async *func(this: TestContext, _flags: { json: boolean }) { + yield new CommandOutput({ id: 1 }); + yield new CommandOutput({ id: 2 }); + }, + }); + + const routeMap = buildRouteMap({ + routes: { test: command }, + docs: { brief: "Test app" }, + }); + const app = buildApplication(routeMap, { name: "test" }); + const ctx = createTestContext(); + + await run(app, ["test", "--json"], ctx as TestContext); + + expect(ctx.output.join("")).toBe('{"id":1}\n{"id":2}\n'); + }); + test("does not inject --json/--fields without output: 'json'", async () => { let funcCalled = false;