diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7923bdd..5a07dbc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,6 +3,7 @@ name: CI on: push: branches: [main] + tags: ["v*"] pull_request: branches: [main] @@ -14,10 +15,10 @@ jobs: python-version: ["3.12", "3.13"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 - name: Set up Python ${{ matrix.python-version }} run: uv python install ${{ matrix.python-version }} @@ -36,16 +37,17 @@ jobs: publish: needs: test runs-on: ubuntu-latest - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') + if: startsWith(github.ref, 'refs/tags/v') + environment: pypi permissions: id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v4 + uses: astral-sh/setup-uv@v7 - name: Build package run: uv build diff --git a/README.md b/README.md index 2cf8d54..0758c5b 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,27 @@ -# ytstudio +# YT Studio CLI [![CI](https://github.com/jdwit/ytstudio/actions/workflows/ci.yml/badge.svg)](https://github.com/jdwit/ytstudio/actions/workflows/ci.yml) +[![PyPI](https://img.shields.io/pypi/v/ytstudio-cli)](https://pypi.org/project/ytstudio-cli/) +[![Python](https://img.shields.io/pypi/pyversions/ytstudio-cli)](https://pypi.org/project/ytstudio-cli/) [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -Manage and analyze your YouTube channel from the terminal. Ideal for agent workflows and automation. +Manage and analyze your YouTube channel from the terminal. Ideal for automation and AI workflows. + +![demo](demo.gif) ## Motivation -I built this tool to bulk update video titles on my channel, something YouTube Studio doesn't support. It uses the YouTube Data API for search-and-replace operations, plus analytics and other channel management features. Simple and scriptable for automating common tasks. +I built this because I needed to bulk update video titles for a YouTube channel I manage with 300+ videos. YouTube +Studio does not support bulk search-replace operations, which made it a tedious manual process. This tool uses the +YouTube Data API to perform bulk operations on video metadata. Furthermore, it provides features for analytics and +comment moderation, all accesible from the command line. ## Installation I recommend the excellent [uv](https://uv.io/) tool for installation: ```bash -uv tool install ytstudio +uv tool install ytstudio-cli ``` ## Setup @@ -40,3 +47,8 @@ ytstudio login ``` Credentials stored in `~/.config/ytstudio/`. + +## Disclaimer + +This project is not affiliated with or endorsed by Google. YouTube and YouTube Studio are trademarks of Google. +All channel data is accessed exclusively through the official [YouTube Data API](https://developers.google.com/youtube/v3) and [YouTube Analytics API](https://developers.google.com/youtube/analytics). \ No newline at end of file diff --git a/demo.gif b/demo.gif new file mode 100644 index 0000000..312fa9c Binary files /dev/null and b/demo.gif differ diff --git a/src/ytstudio/auth.py b/src/ytstudio/api.py similarity index 88% rename from src/ytstudio/auth.py rename to src/ytstudio/api.py index 0162c55..3a4510c 100644 --- a/src/ytstudio/auth.py +++ b/src/ytstudio/api.py @@ -1,4 +1,5 @@ import typer +from google.auth.exceptions import RefreshError from google.auth.transport.requests import Request from google.oauth2.credentials import Credentials from google_auth_oauthlib.flow import InstalledAppFlow @@ -25,11 +26,11 @@ def handle_api_error(error: HttpError) -> None: "Quota resets at midnight Pacific Time (PT).\n" "See: https://developers.google.com/youtube/v3/guides/quota_and_compliance_audits" ) - raise SystemExit(1) + raise SystemExit(1) from None if reason == "forbidden": console.print("[red]Access denied. You may not have permission for this action.[/red]") - raise SystemExit(1) + raise SystemExit(1) from None # Re-raise for other errors raise error @@ -45,6 +46,11 @@ def api(request): return request.execute() except HttpError as e: handle_api_error(e) + except RefreshError: + console.print( + "[red]Session expired or revoked.[/red] Run [bold]ytstudio login[/bold] to re-authenticate." + ) + raise SystemExit(1) from None # YouTube API scopes @@ -58,7 +64,7 @@ def api(request): def authenticate() -> None: if not CLIENT_SECRETS_FILE.exists(): console.print("[red]No client secrets found. Run 'ytstudio init' first.[/red]") - raise SystemExit(1) + raise SystemExit(1) from None console.print("[bold]Authenticating with YouTube...[/bold]\n") @@ -112,7 +118,13 @@ def get_credentials() -> Credentials | None: ) if credentials.expired and credentials.refresh_token: - credentials.refresh(Request()) + try: + credentials.refresh(Request()) + except RefreshError: + console.print( + "[red]Session expired or revoked.[/red] Run [bold]ytstudio login[/bold] to re-authenticate." + ) + raise SystemExit(1) from None # Save refreshed credentials creds_data["token"] = credentials.token save_credentials(creds_data) diff --git a/src/ytstudio/commands/analytics.py b/src/ytstudio/commands/analytics.py index c307d56..2fad1fd 100644 --- a/src/ytstudio/commands/analytics.py +++ b/src/ytstudio/commands/analytics.py @@ -1,29 +1,29 @@ +import csv +import io import json from dataclasses import asdict, dataclass from datetime import datetime, timedelta import typer -from ytstudio.auth import api, get_authenticated_service -from ytstudio.demo import DEMO_ANALYTICS, is_demo_mode -from ytstudio.ui import console, create_kv_table, create_table, dim, format_number +from ytstudio.api import api +from ytstudio.registry import ( + DIMENSION_GROUPS, + DIMENSIONS, + METRIC_GROUPS, + METRICS, + DimensionName, + MetricName, + find_closest_dimension, + validate_dimensions, + validate_metrics, +) +from ytstudio.services import get_analytics_service, get_data_service +from ytstudio.ui import console, create_kv_table, create_table, dim, format_number, set_raw_output app = typer.Typer(help="Analytics commands") -@dataclass -class ChannelAnalytics: - views: int - watch_time_hours: int - avg_view_duration: str - subscribers_gained: int - subscribers_lost: int - likes: int - comments: int - impressions: int | None = None - ctr: float | None = None - - @dataclass class VideoAnalytics: views: int @@ -34,44 +34,6 @@ class VideoAnalytics: comments: int -# Available metrics: https://developers.google.com/youtube/analytics/metrics - -CHANNEL_METRICS = ( - "views", - "estimatedMinutesWatched", - "averageViewDuration", - "subscribersGained", - "subscribersLost", - "likes", - "comments", -) - -VIDEO_METRICS = ( - "views", - "estimatedMinutesWatched", - "averageViewDuration", - "averageViewPercentage", - "likes", - "comments", -) - -TOP_VIDEO_METRICS = ( - "views", - "estimatedMinutesWatched", - "likes", -) - - -def get_services(): - if is_demo_mode(): - return None, None - - data_service = get_authenticated_service("youtube", "v3") - analytics_service = get_authenticated_service("youtubeAnalytics", "v2") - - return data_service, analytics_service - - def get_channel_id(service) -> str: response = api(service.channels().list(part="id", mine=True)) if not response.get("items"): @@ -80,52 +42,41 @@ def get_channel_id(service) -> str: return response["items"][0]["id"] -def fetch_channel_analytics(data_service, analytics_service, days: int) -> ChannelAnalytics | None: - if is_demo_mode(): - return ChannelAnalytics( - views=DEMO_ANALYTICS["views"], - watch_time_hours=DEMO_ANALYTICS["watch_time_hours"], - avg_view_duration=DEMO_ANALYTICS["avg_view_duration"], - subscribers_gained=DEMO_ANALYTICS["subscribers_gained"], - subscribers_lost=DEMO_ANALYTICS["subscribers_lost"], - likes=DEMO_ANALYTICS["likes"], - comments=DEMO_ANALYTICS["comments"], - impressions=DEMO_ANALYTICS.get("impressions"), - ctr=DEMO_ANALYTICS.get("ctr"), - ) - - if not analytics_service: - return None - +def fetch_query( + data_service, + analytics_service, + *, + metric_names: list[str], + dimension_names: list[str], + start_date: str, + end_date: str, + days: int, + filters_str: str | None = None, + sort: str | None = None, + max_results: int | None = None, + currency: str | None = None, +) -> dict: channel_id = get_channel_id(data_service) - end_date = datetime.now().strftime("%Y-%m-%d") - start_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") - response = api( - analytics_service.reports().query( - ids=f"channel=={channel_id}", - startDate=start_date, - endDate=end_date, - metrics=",".join(CHANNEL_METRICS), - ) - ) + query_params = { + "ids": f"channel=={channel_id}", + "startDate": start_date, + "endDate": end_date, + "metrics": ",".join(metric_names), + } - if not response.get("rows"): - return None + if dimension_names: + query_params["dimensions"] = ",".join(dimension_names) + if filters_str: + query_params["filters"] = filters_str + if sort: + query_params["sort"] = sort + if max_results: + query_params["maxResults"] = max_results + if currency: + query_params["currency"] = currency - row = response["rows"][0] - metrics = {h["name"]: row[i] for i, h in enumerate(response["columnHeaders"])} - avg_duration_secs = int(metrics.get("averageViewDuration", 0)) - - return ChannelAnalytics( - views=int(metrics.get("views", 0)), - watch_time_hours=int(metrics.get("estimatedMinutesWatched", 0) / 60), - avg_view_duration=f"{avg_duration_secs // 60}:{avg_duration_secs % 60:02d}", - subscribers_gained=int(metrics.get("subscribersGained", 0)), - subscribers_lost=int(metrics.get("subscribersLost", 0)), - likes=int(metrics.get("likes", 0)), - comments=int(metrics.get("comments", 0)), - ) + return api(analytics_service.reports().query(**query_params)) @app.command() @@ -134,28 +85,62 @@ def overview( output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), ): """Get channel overview analytics""" - data_service, analytics_service = get_services() - analytics = fetch_channel_analytics(data_service, analytics_service, days) + metric_names = [ + MetricName.VIEWS, + MetricName.ESTIMATED_MINUTES_WATCHED, + MetricName.AVERAGE_VIEW_DURATION, + MetricName.SUBSCRIBERS_GAINED, + MetricName.SUBSCRIBERS_LOST, + MetricName.LIKES, + MetricName.COMMENTS, + ] + + end_date = datetime.now().strftime("%Y-%m-%d") + start_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") + + data_service = get_data_service() + analytics_service = get_analytics_service() + response = fetch_query( + data_service, + analytics_service, + metric_names=metric_names, + dimension_names=[], + start_date=start_date, + end_date=end_date, + days=days, + ) + + headers = [h["name"] for h in response.get("columnHeaders", [])] + rows = response.get("rows", []) + + if not rows: + console.print("[yellow]No analytics data available[/yellow]") + return + + metrics = dict(zip(headers, rows[0], strict=False)) if output == "json": - print(json.dumps({"analytics": asdict(analytics), "days": days}, indent=2)) + print(json.dumps({"analytics": metrics, "days": days}, indent=2)) return + views = int(metrics.get("views", 0)) + watch_hours = int(metrics.get("estimatedMinutesWatched", 0)) // 60 + avg_secs = int(metrics.get("averageViewDuration", 0)) + subs_gained = int(metrics.get("subscribersGained", 0)) + subs_lost = int(metrics.get("subscribersLost", 0)) + likes = int(metrics.get("likes", 0)) + comments = int(metrics.get("comments", 0)) + console.print(f"\n[bold]Channel Analytics[/bold] {dim(f'(last {days} days)')}\n") table = create_kv_table() - table.add_row(dim("views"), format_number(analytics.views)) - table.add_row(dim("watch time"), f"{analytics.watch_time_hours} hours") - table.add_row(dim("avg duration"), analytics.avg_view_duration) - table.add_row(dim("subscribers gained"), f"[green]+{analytics.subscribers_gained}[/green]") - table.add_row(dim("subscribers lost"), f"[red]-{analytics.subscribers_lost}[/red]") - table.add_row(dim("likes"), format_number(analytics.likes)) - table.add_row(dim("comments"), format_number(analytics.comments)) - - if analytics.impressions: - table.add_row(dim("impressions"), format_number(analytics.impressions)) - if analytics.ctr: - table.add_row(dim("CTR"), f"{analytics.ctr}%") + table.add_row(dim("views"), format_number(views)) + table.add_row(dim("watch time"), f"{watch_hours} hours") + table.add_row(dim("avg duration"), f"{avg_secs // 60}:{avg_secs % 60:02d}") + table.add_row(dim("subscribers gained"), f"[green]+{subs_gained}[/green]") + table.add_row(dim("subscribers lost"), f"[red]-{subs_lost}[/red]") + table.add_row(dim("likes"), format_number(likes)) + table.add_row(dim("comments"), format_number(comments)) console.print(table) @@ -172,7 +157,16 @@ def fetch_video_analytics( ids=f"channel=={channel_id}", startDate=start_date, endDate=end_date, - metrics=",".join(VIDEO_METRICS), + metrics=",".join( + ( + MetricName.VIEWS, + MetricName.ESTIMATED_MINUTES_WATCHED, + MetricName.AVERAGE_VIEW_DURATION, + MetricName.AVERAGE_VIEW_PERCENTAGE, + MetricName.LIKES, + MetricName.COMMENTS, + ) + ), filters=f"video=={video_id}", ) ) @@ -200,7 +194,8 @@ def video( output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), ): """Get analytics for a specific video""" - data_service, analytics_service = get_services() + data_service = get_data_service() + analytics_service = get_analytics_service() video_response = api(data_service.videos().list(part="snippet,statistics", id=video_id)) @@ -238,96 +233,338 @@ def video( console.print(table) -@app.command() -def traffic( - video_id: str = typer.Argument(..., help="Video ID"), - days: int = typer.Option(28, "--days", "-d", help="Number of days to analyze"), - output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), -): - """Get traffic source data for a video""" - data_service, analytics_service = get_services() - channel_id = get_channel_id(data_service) - end_date = datetime.now().strftime("%Y-%m-%d") - start_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") +# --- Raw query engine --- - response = api( - analytics_service.reports().query( - ids=f"channel=={channel_id}", - startDate=start_date, - endDate=end_date, - metrics="views", - dimensions="insightTrafficSourceType", - filters=f"video=={video_id}", - sort="-views", - ) - ) + +def _parse_comma_list(value: str) -> list[str]: + """Split a comma-separated string, stripping whitespace.""" + return [v.strip() for v in value.split(",") if v.strip()] + + +def _format_query_response(response: dict, output: str) -> None: + headers = [h["name"] for h in response.get("columnHeaders", [])] + rows = response.get("rows", []) if output == "json": - print(json.dumps(response, indent=2)) + records = [dict(zip(headers, row, strict=False)) for row in rows] + print(json.dumps(records, indent=2)) return - if response.get("rows"): - console.print(f"\n[bold]Traffic Sources[/bold] {dim(f'(last {days} days)')}\n") - table = create_table() - table.add_column("Source", style="dim") - table.add_column("Views", justify="right") + if output == "csv": + buf = io.StringIO() + writer = csv.writer(buf) + writer.writerow(headers) + writer.writerows(rows) + print(buf.getvalue(), end="") + return - for row in response["rows"]: - table.add_row(row[0], format_number(int(row[1]))) + # table output + if not rows: + console.print("[yellow]No data returned[/yellow]") + return - console.print(table) - else: - console.print("[yellow]No traffic data available[/yellow]") + table = create_table() + for header in headers: + is_numeric = header in METRICS + table.add_column( + header, + justify="right" if is_numeric else "left", + style="yellow" if header in DIMENSIONS else None, + ) + + for row in rows: + table.add_row(*[_format_cell(headers[i], v) for i, v in enumerate(row)]) + + console.print(table) + + +def _format_cell(header: str, value) -> str: + if isinstance(value, int): + return format_number(value) + if isinstance(value, float): + if "rate" in header.lower() or "percentage" in header.lower() or "ctr" in header.lower(): + return f"{value:.2f}%" + if "cpm" in header.lower(): + return f"${value:.2f}" + if value == int(value): + return format_number(int(value)) + return f"{value:.1f}" + return str(value) @app.command() -def top( - days: int = typer.Option(28, "--days", "-d", help="Number of days to analyze"), - limit: int = typer.Option(10, "--limit", "-n", help="Number of videos"), - output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), +def query( + metrics_str: str = typer.Option( + ..., "--metrics", "-m", help="Comma-separated metrics (e.g. views,likes,shares)" + ), + dimensions_str: str = typer.Option( + None, "--dimensions", "-d", help="Comma-separated dimensions (e.g. day,country)" + ), + filter_list: list[str] = typer.Option( + None, + "--filter", + "-f", + help="Filter in key==value format (repeatable, e.g. -f video==ID -f country==NL)", + ), + start: str = typer.Option( + None, "--start", "-s", help="Start date (YYYY-MM-DD). Defaults to --days ago" + ), + end: str = typer.Option(None, "--end", "-e", help="End date (YYYY-MM-DD). Defaults to today"), + days: int = typer.Option(28, "--days", help="Number of days (used if --start not set)"), + sort: str = typer.Option(None, "--sort", help="Sort field (prefix with - for descending)"), + limit: int = typer.Option(None, "--limit", "-n", help="Maximum number of rows"), + currency: str = typer.Option(None, "--currency", help="Currency code for revenue (e.g. EUR)"), + output: str = typer.Option("table", "--output", "-o", help="Output format: table, json, csv"), + raw: bool = typer.Option(False, "--raw", help="Show raw numbers instead of human-readable"), ): - """Show top performing videos""" - data_service, analytics_service = get_services() - channel_id = get_channel_id(data_service) - end_date = datetime.now().strftime("%Y-%m-%d") - start_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") + """Run a custom analytics query with any metrics and dimensions. - response = api( - analytics_service.reports().query( - ids=f"channel=={channel_id}", - startDate=start_date, - endDate=end_date, - metrics=",".join(TOP_VIDEO_METRICS), - dimensions="video", - sort="-views", - maxResults=limit, - ) + Direct access to the YouTube Analytics API reports.query endpoint. + Supports all available metrics and dimensions. + + Examples: + + ytstudio analytics query -m views,likes --dimensions day --days 7 + + ytstudio analytics query -m views,shares -d country --sort -views -n 10 + + ytstudio analytics query -m views,estimatedMinutesWatched -d video \\ + --sort -views -n 5 -o json + + ytstudio analytics query -m videoThumbnailImpressions,videoThumbnailImpressionsClickRate \\ + -d video --sort -videoThumbnailImpressions -n 10 + + ytstudio analytics query -m views -d insightTrafficSourceType \\ + -f video==dMH0bHeiRNg --sort -views + """ + set_raw_output(raw) + + # Parse and validate + metric_names = _parse_comma_list(metrics_str) + if not metric_names: + console.print("[red]At least one metric is required[/red]") + raise typer.Exit(1) + + errors = validate_metrics(metric_names) + if errors: + for err in errors: + console.print(f"[red]{err}[/red]") + console.print("\nRun [bold]ytstudio analytics metrics[/bold] to see available metrics.") + raise typer.Exit(1) + + dimension_names = _parse_comma_list(dimensions_str) if dimensions_str else [] + if dimension_names: + errors = validate_dimensions(dimension_names) + if errors: + for err in errors: + console.print(f"[red]{err}[/red]") + console.print( + "\nRun [bold]ytstudio analytics dimensions[/bold] to see available dimensions." + ) + raise typer.Exit(1) + + # Build filters string + filters_str = None + if filter_list: + for f in filter_list: + if "==" not in f: + console.print(f"[red]Invalid filter format: '{f}'. Use key==value[/red]") + raise typer.Exit(1) + filters_str = ";".join(filter_list) + + # Build dates + end_date = end or datetime.now().strftime("%Y-%m-%d") + start_date = start or (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d") + + # The video dimension requires sort + maxResults per YouTube API docs + if DimensionName.VIDEO in dimension_names and (not sort or not limit): + missing = [x for x, v in [("--sort", sort), ("--limit", limit)] if not v] + console.print(f"[red]The 'video' dimension requires {' and '.join(missing)}[/red]") + console.print(f"Example: -d video --sort -{metric_names[0]} -n 10") + raise typer.Exit(1) + + # Execute query + data_service = get_data_service() + analytics_service = get_analytics_service() + response = fetch_query( + data_service, + analytics_service, + metric_names=metric_names, + dimension_names=dimension_names, + start_date=start_date, + end_date=end_date, + days=days, + filters_str=filters_str, + sort=sort, + max_results=limit, + currency=currency, ) + _format_query_response(response, output) + + +# --- Discovery commands --- + + +@app.command("metrics") +def list_metrics( + group: str = typer.Option(None, "--group", "-g", help="Filter by group"), + output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), +): + """List available analytics metrics. + + Examples: + + ytstudio analytics metrics + + ytstudio analytics metrics --group engagement + """ + filtered = METRICS.values() + if group: + if group not in METRIC_GROUPS: + console.print( + f"[red]Unknown group '{group}'. Available: {', '.join(METRIC_GROUPS)}[/red]" + ) + raise typer.Exit(1) + filtered = [m for m in filtered if m.group == group] + if output == "json": - print(json.dumps(response, indent=2)) + print( + json.dumps( + [ + { + "name": m.name, + "description": m.description, + "group": m.group, + "core": m.core, + "monetary": m.monetary, + } + for m in filtered + ], + indent=2, + ) + ) return - if response.get("rows"): - video_ids = [row[0] for row in response["rows"]] - videos_response = api(data_service.videos().list(part="snippet", id=",".join(video_ids))) + title = "Available Metrics" + if group: + title += f" ({group})" + console.print(f"\n[bold]{title}[/bold]\n") + + table = create_table() + table.add_column("Metric", style="bold") + table.add_column("Description") + table.add_column("Group", style="dim") + table.add_column("", justify="right") # tags + + for m in filtered: + tags = [] + if m.core: + tags.append("[cyan]core[/cyan]") + if m.monetary: + tags.append("[yellow]$[/yellow]") + table.add_row(m.name, m.description, m.group, " ".join(tags)) - title_map = {v["id"]: v["snippet"]["title"] for v in videos_response.get("items", [])} + console.print(table) - console.print(f"\n[bold]Top {limit} Videos[/bold] {dim(f'(last {days} days)')}\n") - table = create_table() - table.add_column("Title", max_width=40) - table.add_column("Views", justify="right") - table.add_column("Watch time", justify="right", style="dim") + if not group: + console.print(f"\n{dim(f'Groups: {", ".join(METRIC_GROUPS)}')}") + console.print(dim("Filter with --group ")) + console.print() - for row in response["rows"]: - video_id, views, watch_time, _likes = row[0], row[1], row[2], row[3] - title = title_map.get(video_id, video_id) - table.add_row( - title[:40], - format_number(int(views)), - f"{int(watch_time / 60)}h", +@app.command("dimensions") +def list_dimensions( + group: str = typer.Option(None, "--group", "-g", help="Filter by group"), + name: str = typer.Argument(None, help="Show details for a specific dimension"), + output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), +): + """List available analytics dimensions. + + Examples: + + ytstudio analytics dimensions + + ytstudio analytics dimensions --group geographic + + ytstudio analytics dimensions country + """ + if name: + if name not in DIMENSIONS: + console.print(f"[red]Unknown dimension '{name}'[/red]") + suggestion = find_closest_dimension(name) + if suggestion: + console.print(f"Did you mean [bold]{suggestion}[/bold]?") + raise typer.Exit(1) + + d = DIMENSIONS[name] + if output == "json": + print( + json.dumps( + { + "name": d.name, + "description": d.description, + "group": d.group, + "filter_only": d.filter_only, + }, + indent=2, + ) ) + return + + console.print(f"\n[bold]{d.name}[/bold]") + console.print(f" {d.description}") + console.print(f" group: {dim(d.group)}") + if d.filter_only: + console.print(" [yellow]filter only[/yellow] (cannot be used as a dimension)") + console.print() + return - console.print(table) + # List dimensions + filtered = DIMENSIONS.values() + if group: + if group not in DIMENSION_GROUPS: + console.print( + f"[red]Unknown group '{group}'. Available: {', '.join(DIMENSION_GROUPS)}[/red]" + ) + raise typer.Exit(1) + filtered = [d for d in filtered if d.group == group] + + if output == "json": + print( + json.dumps( + [ + { + "name": d.name, + "description": d.description, + "group": d.group, + "filter_only": d.filter_only, + } + for d in filtered + ], + indent=2, + ) + ) + return + + title = "Available Dimensions" + if group: + title += f" ({group})" + console.print(f"\n[bold]{title}[/bold]\n") + + table = create_table() + table.add_column("Dimension", style="bold") + table.add_column("Description") + table.add_column("Group", style="dim") + table.add_column("", justify="right") + + for d in filtered: + tag = "[yellow]filter[/yellow]" if d.filter_only else "" + table.add_row(d.name, d.description, d.group, tag) + + console.print(table) + + if not group: + console.print(f"\n{dim(f'Groups: {", ".join(DIMENSION_GROUPS)}')}") + console.print(dim("Filter with --group ")) + console.print() diff --git a/src/ytstudio/commands/auth.py b/src/ytstudio/commands/auth.py deleted file mode 100644 index 4a09d7e..0000000 --- a/src/ytstudio/commands/auth.py +++ /dev/null @@ -1,26 +0,0 @@ -import typer -from rich.console import Console - -from ytstudio.auth import authenticate, get_status -from ytstudio.auth import logout as do_logout - -app = typer.Typer(help="Authentication commands") -console = Console() - - -@app.command() -def login(): - """Authenticate with YouTube via OAuth""" - authenticate() - - -@app.command() -def logout(): - """Remove stored credentials""" - do_logout() - - -@app.command() -def status(): - """Show current authentication status""" - get_status() diff --git a/src/ytstudio/commands/comments.py b/src/ytstudio/commands/comments.py index 56154d9..a29e7b5 100644 --- a/src/ytstudio/commands/comments.py +++ b/src/ytstudio/commands/comments.py @@ -5,9 +5,9 @@ import typer from googleapiclient.errors import HttpError -from ytstudio.auth import api, get_authenticated_service, handle_api_error -from ytstudio.demo import DEMO_COMMENTS, is_demo_mode -from ytstudio.ui import console, time_ago, truncate +from ytstudio.api import api, handle_api_error +from ytstudio.services import get_data_service +from ytstudio.ui import console, create_table, time_ago, truncate app = typer.Typer(help="Comment commands") @@ -37,10 +37,6 @@ class Comment: video_id: str = "" -def get_service(): - return get_authenticated_service() - - def get_channel_id(service) -> str: response = api(service.channels().list(part="id", mine=True)) if not response.get("items"): @@ -56,21 +52,6 @@ def fetch_comments( order: SortOrder = SortOrder.relevance, moderation_status: ModerationStatus = ModerationStatus.published, ) -> list[Comment]: - if is_demo_mode(): - return [ - Comment( - id=c.get("id", f"comment_{i}"), - author=c["author"], - text=c["text"], - likes=c["likes"], - published_at=c["published"].isoformat() - if hasattr(c["published"], "isoformat") - else c["published"], - video_id=c.get("video_id", "demo_video"), - ) - for i, c in enumerate(DEMO_COMMENTS[:limit]) - ] - try: # Build query parameters based on filters params = { @@ -80,13 +61,12 @@ def fetch_comments( } if video_id: - # Video-specific query (moderationStatus not supported) params["videoId"] = video_id else: - # Channel-wide query (supports moderation filtering) channel_id = get_channel_id(data_service) params["allThreadsRelatedToChannelId"] = channel_id - params["moderationStatus"] = moderation_status.to_api_value() + if moderation_status != ModerationStatus.published: + params["moderationStatus"] = moderation_status.to_api_value() response = api(data_service.commentThreads().list(**params)) except HttpError as e: @@ -118,11 +98,15 @@ def list_comments( ModerationStatus.published, "--status", help="Moderation status: published, held, spam" ), limit: int = typer.Option(20, "--limit", "-n", help="Number of comments"), - sort: SortOrder = typer.Option(SortOrder.relevance, "--sort", "-s", help="Sort order"), + sort: SortOrder = typer.Option(SortOrder.time, "--sort", "-s", help="Sort order"), output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), ): """List comments across channel or for a specific video""" - service = get_service() + if sort == SortOrder.relevance and not video_id: + console.print("[red]--sort relevance requires --video (YouTube API limitation)[/red]") + raise typer.Exit(1) + + service = get_data_service() comments = fetch_comments(service, video_id, limit, sort, status) if output == "json": @@ -134,12 +118,59 @@ def list_comments( scope = f"video {video_id}" if video_id else "channel" console.print(f"\n[bold]{label} Comments ({len(comments)})[/bold] — {scope}\n") + table = create_table() + table.add_column("ID", style="yellow") + if not video_id: + table.add_column("Video", style="cyan") + table.add_column("Author") + table.add_column("Posted") + table.add_column("Comment") + for c in comments: - text = truncate(c.text, 150) + date = f"{c.published_at[:16].replace('T', ' ')} ({time_ago(c.published_at)})" + row = [c.id] + if not video_id: + row.append(c.video_id) + row += [c.author, date, truncate(c.text, 80)] + table.add_row(*row) + + console.print(table) + + +def _set_moderation_status(comment_ids: list[str], status: str, ban_author: bool = False) -> int: + service = get_data_service() + success = 0 + batch_size = 50 + for i in range(0, len(comment_ids), batch_size): + batch = comment_ids[i : i + batch_size] + try: + params = { + "id": ",".join(batch), + "moderationStatus": status, + } + if ban_author and status == "rejected": + params["banAuthor"] = True + api(service.comments().setModerationStatus(**params)) + success += len(batch) + except HttpError as e: + handle_api_error(e) + return success + + +@app.command() +def publish( + comment_ids: list[str] = typer.Argument(help="Comment IDs to publish"), +): + """Publish held comments (approve for public display)""" + count = _set_moderation_status(comment_ids, "published") + console.print(f"{count} comment(s) published") - like_str = f" [dim]({c.likes} likes)[/dim]" if c.likes else "" - video_str = f" [dim cyan]on {c.video_id}[/dim cyan]" if c.video_id and not video_id else "" - console.print( - f"[bold]{c.author}[/bold]{like_str}{video_str} [dim]{time_ago(c.published_at)}[/dim]" - ) - console.print(f" {text}\n") + +@app.command() +def reject( + comment_ids: list[str] = typer.Argument(help="Comment IDs to reject"), + ban: bool = typer.Option(False, "--ban", help="Also ban the comment author"), +): + """Reject comments (hide from public display)""" + count = _set_moderation_status(comment_ids, "rejected", ban_author=ban) + console.print(f"{count} comment(s) rejected") diff --git a/src/ytstudio/commands/videos.py b/src/ytstudio/commands/videos.py index 6a4fcbc..56ff50a 100644 --- a/src/ytstudio/commands/videos.py +++ b/src/ytstudio/commands/videos.py @@ -5,8 +5,8 @@ import typer from googleapiclient.errors import HttpError -from ytstudio.auth import api, get_authenticated_service, handle_api_error -from ytstudio.demo import DEMO_VIDEOS, get_demo_video, is_demo_mode +from ytstudio.api import api, handle_api_error +from ytstudio.services import get_data_service from ytstudio.ui import ( console, create_kv_table, @@ -57,12 +57,6 @@ def format_duration(iso_duration: str) -> str: return f"{minutes}:{seconds:02d}" -def get_service(): - if is_demo_mode(): - return None - return get_authenticated_service() - - def get_channel_uploads_playlist(service) -> str: response = api(service.channels().list(part="contentDetails", mine=True)) if not response.get("items"): @@ -72,26 +66,6 @@ def get_channel_uploads_playlist(service) -> str: def fetch_video(data_service, video_id: str) -> Video | None: - if is_demo_mode(): - demo = get_demo_video(video_id) - if not demo: - return None - return Video( - id=demo["id"], - title=demo["title"], - description=demo.get("description", ""), - published_at=demo["published"].strftime("%Y-%m-%dT%H:%M:%SZ"), - views=demo["views"], - likes=demo["likes"], - comments=demo["comments"], - duration=demo["duration"], - privacy=demo["privacy"], - tags=demo.get("tags", []), - default_language=demo.get("defaultLanguage"), - default_audio_language=demo.get("defaultAudioLanguage"), - localizations=demo.get("localizations", {}), - ) - response = api( data_service.videos().list( part="snippet,statistics,contentDetails,status,localizations", @@ -127,27 +101,6 @@ def fetch_video(data_service, video_id: str) -> Video | None: def fetch_videos( data_service, limit: int = 50, page_token: str | None = None ) -> dict[str, list[Video] | str | int | None]: - if is_demo_mode(): - videos = [ - Video( - id=v["id"], - title=v["title"], - description=v.get("description", ""), - published_at=v["published"].strftime("%Y-%m-%dT%H:%M:%SZ"), - views=v["views"], - likes=v["likes"], - comments=v["comments"], - privacy=v["privacy"], - tags=v.get("tags", []), - duration=v["duration"], - localizations=v.get("localizations", {}), - default_language=v.get("defaultLanguage"), - default_audio_language=v.get("defaultAudioLanguage"), - ) - for v in DEMO_VIDEOS[:limit] - ] - return {"videos": videos, "next_page_token": None, "total_results": len(DEMO_VIDEOS)} - uploads_playlist_id = get_channel_uploads_playlist(data_service) all_videos = [] @@ -243,7 +196,7 @@ def list_videos( ), ): """List your YouTube videos""" - service = get_service() + service = get_data_service() result = fetch_videos(service, limit, page_token) videos: list[Video] = result["videos"] @@ -312,7 +265,7 @@ def show( output: str = typer.Option("table", "--output", "-o", help="Output format: table, json"), ): """Show details for a specific video""" - service = get_service() + service = get_data_service() video = fetch_video(service, video_id) if not video: @@ -365,7 +318,7 @@ def update( ) raise typer.Exit(1) - service = get_service() + service = get_data_service() response = api(service.videos().list(part="snippet", id=video_id)) if not response.get("items"): @@ -413,7 +366,7 @@ def search_replace( execute: bool = typer.Option(False, "--execute", help="Apply changes (default is dry-run)"), ): """Bulk update videos using search and replace""" - service = get_service() + service = get_data_service() uploads_playlist_id = get_channel_uploads_playlist(service) changes = [] diff --git a/src/ytstudio/demo.py b/src/ytstudio/demo.py index 1cff940..2537f0c 100644 --- a/src/ytstudio/demo.py +++ b/src/ytstudio/demo.py @@ -1,356 +1,188 @@ -"""Demo mode with mock data for screencasts""" - +import functools +import json import os +import time from datetime import UTC, datetime, timedelta +from pathlib import Path DEMO_MODE = os.environ.get("YTSTUDIO_DEMO", "").lower() in ("1", "true", "yes") -# Demo data based on Fireship (https://youtube.com/@Fireship) -# Used with appreciation for educational purposes - -DEMO_CHANNEL = { - "id": "UCsBjURrPoezykLs9EqgamOA", - "title": "Fireship", - "subscribers": 4060000, - "videos": 850, - "views": 654000000, -} - -DEMO_VIDEOS = [ - { - "id": "zQnBQ4tB3ZA", - "title": "TypeScript in 100 Seconds", - "published": datetime(2020, 11, 25), - "views": 3200000, - "likes": 98000, - "comments": 2400, - "duration": "PT2M1S", - "privacy": "public", - "tags": ["typescript", "javascript", "100SecondsOfCode", "programming"], - "description": "Learn the basics of TypeScript in 100 seconds...", - "defaultLanguage": "en", - "defaultAudioLanguage": "en", - "localizations": { - "en": { - "title": "TypeScript in 100 Seconds", - "description": "Learn the basics of TypeScript in 100 seconds...", +_DATA_DIR = Path(__file__).parent / "demo_data" + + +@functools.cache +def _load(name: str) -> dict: + return json.loads((_DATA_DIR / name).read_text()) + + +class DemoRequest: + def __init__(self, response: dict, delay: float = 0): + self._response = response + self._delay = delay + + def execute(self) -> dict: + if self._delay: + time.sleep(self._delay) + return self._response + + +class _DemoChannels: + def list(self, **kwargs): + return DemoRequest(_load("channel.json")) + + +class _DemoVideos: + def list(self, **kwargs): + id_param = kwargs.get("id", "") + requested_ids = [i.strip() for i in id_param.split(",") if i.strip()] + + if not requested_ids: + return DemoRequest(_load("videos.json")) + + matched = [v for v in _load("videos.json")["items"] if v["id"] in requested_ids] + return DemoRequest({"items": matched}) + + def update(self, **kwargs): + body = kwargs.get("body", {}) + return DemoRequest(body, delay=0.3) + + +class _DemoPlaylistItems: + def list(self, **kwargs): + max_results = kwargs.get("maxResults", 50) + data = _load("playlist_items.json") + items = data["items"][:max_results] + return DemoRequest( + { + "items": items, + "pageInfo": {"totalResults": len(data["items"])}, } - }, - }, - { - "id": "lHhRhPV--G0", - "title": "Flutter in 100 Seconds", - "published": datetime(2020, 4, 14), - "views": 2100000, - "likes": 72000, - "comments": 1800, - "duration": "PT2M8S", - "privacy": "public", - "tags": ["flutter", "dart", "mobile", "100SecondsOfCode"], - "description": "Build apps on iOS, Android, the web, and desktop with Flutter...", - }, - { - "id": "rf60MejMz3E", - "title": "Recursion in 100 Seconds", - "published": datetime(2019, 12, 30), - "views": 1800000, - "likes": 65000, - "comments": 1500, - "duration": "PT1M48S", - "privacy": "public", - "tags": ["recursion", "algorithms", "100SecondsOfCode", "compsci"], - "description": "Learn how recursion works in 100 seconds...", - }, - { - "id": "Ata9cSC2WpM", - "title": "React in 100 Seconds", - "published": datetime(2021, 5, 12), - "views": 4500000, - "likes": 125000, - "comments": 3200, - "duration": "PT2M15S", - "privacy": "public", - "tags": ["react", "javascript", "frontend", "100SecondsOfCode"], - "description": "Learn the basics of React in 100 seconds...", - }, - { - "id": "w7ejDZ8SWv8", - "title": "God-Tier Developer Roadmap", - "published": datetime(2022, 8, 15), - "views": 6800000, - "likes": 215000, - "comments": 8500, - "duration": "PT11M42S", - "privacy": "public", - "tags": ["roadmap", "developer", "career", "programming"], - "description": "The mass extinction satisfies both business and our lizard brain...", - }, - { - "id": "dQw4w9WgXcQ", - "title": "Python in 100 Seconds", - "published": datetime(2021, 3, 8), - "views": 5200000, - "likes": 142000, - "comments": 4100, - "duration": "PT2M12S", - "privacy": "public", - "tags": ["python", "programming", "100SecondsOfCode", "backend"], - "description": "Learn Python in 100 seconds...", - }, - { - "id": "7C2z4GqqS5E", - "title": "Rust in 100 Seconds", - "published": datetime(2021, 7, 22), - "views": 2800000, - "likes": 89000, - "comments": 2200, - "duration": "PT2M24S", - "privacy": "public", - "tags": ["rust", "systems", "100SecondsOfCode", "programming"], - "description": "Rust is a blazingly fast systems programming language...", - }, - { - "id": "8C3z4GqqS5F", - "title": "I built the same app 10 times", - "published": datetime(2023, 1, 18), - "views": 3900000, - "likes": 156000, - "comments": 5600, - "duration": "PT14M33S", - "privacy": "public", - "tags": ["frameworks", "comparison", "webdev", "javascript"], - "description": "Which JavaScript framework is best?...", - }, - { - "id": "9D4z5HrrT6G", - "title": "Docker in 100 Seconds", - "published": datetime(2020, 8, 3), - "views": 2400000, - "likes": 78000, - "comments": 1900, - "duration": "PT2M6S", - "privacy": "public", - "tags": ["docker", "devops", "100SecondsOfCode", "containers"], - "description": "Containerize your applications with Docker...", - }, - { - "id": "0E5z6IssU7H", - "title": "Kubernetes in 100 Seconds", - "published": datetime(2021, 2, 15), - "views": 1900000, - "likes": 62000, - "comments": 1400, - "duration": "PT2M18S", - "privacy": "public", - "tags": ["kubernetes", "k8s", "devops", "100SecondsOfCode"], - "description": "Orchestrate containers at scale with K8s...", - }, - { - "id": "1F6z7JttV8I", - "title": "GraphQL in 100 Seconds", - "published": datetime(2020, 6, 20), - "views": 1600000, - "likes": 54000, - "comments": 1200, - "duration": "PT2M4S", - "privacy": "public", - "tags": ["graphql", "api", "100SecondsOfCode", "backend"], - "description": "A query language for your API...", - }, - { - "id": "2G7z8KuuW9J", - "title": "10 CSS tricks you didn't know", - "published": datetime(2022, 11, 5), - "views": 1100000, - "likes": 45000, - "comments": 890, - "duration": "PT8M22S", - "privacy": "public", - "tags": ["css", "frontend", "webdev", "tricks"], - "description": "Level up your CSS game with these tips...", - }, - { - "id": "3H8z9LvvX0K", - "title": "SQL Explained in 100 Seconds", - "published": datetime(2021, 9, 12), - "views": 2100000, - "likes": 71000, - "comments": 1600, - "duration": "PT2M9S", - "privacy": "public", - "tags": ["sql", "database", "100SecondsOfCode", "backend"], - "description": "The most important language for data...", - }, - { - "id": "4I9z0MwwY1L", - "title": "Linux in 100 Seconds", - "published": datetime(2020, 10, 28), - "views": 2600000, - "likes": 82000, - "comments": 2100, - "duration": "PT2M15S", - "privacy": "public", - "tags": ["linux", "os", "100SecondsOfCode", "devops"], - "description": "The operating system that runs the world...", - }, - { - "id": "5J0z1NxxZ2M", - "title": "AI is getting satisfying", - "published": datetime(2024, 2, 1), - "views": 890000, - "likes": 38000, - "comments": 1200, - "duration": "PT5M44S", - "privacy": "public", - "tags": ["ai", "code", "news", "tech"], - "description": "This week in AI and code...", - }, - { - "id": "6K1z2OyyA3N", - "title": "Next.js in 100 Seconds", - "published": datetime(2021, 11, 3), - "views": 1800000, - "likes": 58000, - "comments": 1300, - "duration": "PT2M11S", - "privacy": "public", - "tags": ["nextjs", "react", "100SecondsOfCode", "fullstack"], - "description": "The React framework for production...", - }, - { - "id": "7L2z3PzzB4O", - "title": "Git in 100 Seconds", - "published": datetime(2020, 5, 18), - "views": 3100000, - "likes": 95000, - "comments": 2300, - "duration": "PT2M3S", - "privacy": "public", - "tags": ["git", "vcs", "100SecondsOfCode", "devtools"], - "description": "Version control for the modern developer...", - }, - { - "id": "8M3z4QaaC5P", - "title": "MongoDB in 100 Seconds", - "published": datetime(2021, 4, 7), - "views": 1500000, - "likes": 49000, - "comments": 1100, - "duration": "PT2M7S", - "privacy": "public", - "tags": ["mongodb", "nosql", "100SecondsOfCode", "database"], - "description": "A document database built for modern apps...", - }, - { - "id": "9N4z5RbbD6Q", - "title": "AWS in 100 Seconds", - "published": datetime(2022, 3, 14), - "views": 1400000, - "likes": 46000, - "comments": 980, - "duration": "PT2M19S", - "privacy": "public", - "tags": ["aws", "cloud", "100SecondsOfCode", "devops"], - "description": "The cloud platform that powers the internet...", - }, - { - "id": "0O5z6SccE7R", - "title": "Svelte in 100 Seconds", - "published": datetime(2021, 6, 25), - "views": 1700000, - "likes": 56000, - "comments": 1250, - "duration": "PT2M5S", - "privacy": "public", - "tags": ["svelte", "frontend", "100SecondsOfCode", "javascript"], - "description": "A radical new approach to building UIs...", - }, -] - -DEMO_ANALYTICS = { - "views": 2500000, - "watch_time_hours": 185000, - "subscribers_gained": 45000, - "subscribers_lost": 3200, - "likes": 89000, - "comments": 6200, - "shares": 12000, - "avg_view_duration": "3:45", - "ctr": 12.5, - "impressions": 20000000, -} - -DEMO_COMMENTS = [ - { - "id": "Ugw1abc123", - "author": "CodeNewbie", - "text": "This is the best explanation I've ever seen! 🔥", - "likes": 1542, - "published": datetime.now(UTC) - timedelta(hours=2), - "video_id": "zQnBQ4tB3ZA", - }, - { - "id": "Ugw2def456", - "author": "DevSenior", - "text": "100 seconds well spent. Subscribed!", - "likes": 856, - "published": datetime.now(UTC) - timedelta(hours=5), - "video_id": "Ata9cSC2WpM", - }, - { - "id": "Ugw3ghi789", - "author": "TechEnthusiast", - "text": "Fireship videos are like coffee for developers ☕", - "likes": 2341, - "published": datetime.now(UTC) - timedelta(hours=8), - "video_id": "w7ejDZ8SWv8", - }, - { - "id": "Ugw4jkl012", - "author": "JuniorDev2024", - "text": "Finally understand this after watching 10 other tutorials", - "likes": 634, - "published": datetime.now(UTC) - timedelta(days=1), - "video_id": "zQnBQ4tB3ZA", - }, - { - "id": "Ugw5mno345", - "author": "FullStackFan", - "text": "The production quality is insane for these short videos", - "likes": 421, - "published": datetime.now(UTC) - timedelta(days=1), - "video_id": "dQw4w9WgXcQ", - }, -] - -DEMO_SEO = { - "score": 92, - "title_length": 28, - "description_length": 850, - "tags_count": 15, - "has_thumbnail": True, - "has_end_screen": True, - "has_cards": True, - "issues": [ - "Consider a longer title for better discoverability", - ], - "passed": [ - "Strong keyword in title", - "Description well-optimized", - "Good tag coverage", - "Custom thumbnail", - "End screen configured", - "Cards added", - ], -} - - -def get_demo_video(video_id: str) -> dict | None: - for video in DEMO_VIDEOS: - if video["id"] == video_id: - return video - # Return first video as fallback for any ID in demo mode - return DEMO_VIDEOS[0] if DEMO_VIDEOS else None + ) + + +class _DemoCommentThreads: + def list(self, **kwargs): + max_results = kwargs.get("maxResults", 100) + video_id = kwargs.get("videoId") + + items = _load("comments.json")["items"] + if video_id: + items = [ + c + for c in items + if c["snippet"]["topLevelComment"]["snippet"].get("videoId") == video_id + ] + + return DemoRequest({"items": items[:max_results]}) + + +class _DemoComments: + def setModerationStatus(self, **kwargs): + return DemoRequest({}) + + +class DemoDataService: + def channels(self): + return _DemoChannels() + + def videos(self): + return _DemoVideos() + + def playlistItems(self): + return _DemoPlaylistItems() + + def comments(self): + return _DemoComments() + + def commentThreads(self): + return _DemoCommentThreads() + + +class _DemoReports: + def query(self, **params): + metrics = [m.strip() for m in params.get("metrics", "").split(",") if m.strip()] + dimensions = [d.strip() for d in params.get("dimensions", "").split(",") if d.strip()] + filters = params.get("filters", "") + sort = params.get("sort", "") + max_results = params.get("maxResults") + + analytics = _load("analytics_metrics.json") + video_metrics = {k: v for k, v in analytics.items() if k != "countries"} + countries = analytics.get("countries", ["US", "IN", "GB", "DE", "BR"]) + + headers = _make_column_headers(dimensions, metrics) + + def _metric_vals(base: dict) -> list: + return [base.get(m, 0) for m in metrics] + + rows = [] + + # Single video filter + filter_video_id = None + if filters: + for part in filters.split(";"): + if part.startswith("video=="): + filter_video_id = part.split("==", 1)[1] + + if not dimensions: + totals = {m: 0 for m in metrics} + sources = video_metrics + if filter_video_id and filter_video_id in video_metrics: + sources = {filter_video_id: video_metrics[filter_video_id]} + for base in sources.values(): + for m in metrics: + totals[m] += base.get(m, 0) + rows.append([totals[m] for m in metrics]) + + elif dimensions == ["day"]: + today = datetime.now(UTC).date() + n_days = 7 + for i in range(n_days, 0, -1): + date_str = (today - timedelta(days=i)).strftime("%Y-%m-%d") + vid = list(video_metrics.values())[i % len(video_metrics)] + rows.append([date_str, *_metric_vals(vid)]) + + elif dimensions == ["country"]: + for j, country in enumerate(countries): + vid = list(video_metrics.values())[j % len(video_metrics)] + rows.append([country, *_metric_vals(vid)]) + + elif "video" in dimensions: + for vid_id, base in video_metrics.items(): + rows.append([vid_id, *_metric_vals(base)]) + + else: + totals = {m: 0 for m in metrics} + for base in video_metrics.values(): + for m in metrics: + totals[m] += base.get(m, 0) + dim_vals = ["unknown"] * len(dimensions) + rows.append([*dim_vals, *[totals[m] for m in metrics]]) + + # Apply sort + if sort and rows and dimensions: + sort_desc = sort.startswith("-") + sort_field = sort.lstrip("-") + all_names = dimensions + metrics + if sort_field in all_names: + idx = all_names.index(sort_field) + rows.sort(key=lambda r: r[idx], reverse=sort_desc) + + if max_results and len(rows) > max_results: + rows = rows[:max_results] + + return DemoRequest({"columnHeaders": headers, "rows": rows}) + + +def _make_column_headers(dimensions: list[str], metrics: list[str]) -> list[dict]: + return [{"name": d, "columnType": "DIMENSION", "dataType": "STRING"} for d in dimensions] + [ + {"name": m, "columnType": "METRIC", "dataType": "INTEGER"} for m in metrics + ] + + +class DemoAnalyticsService: + def reports(self): + return _DemoReports() def is_demo_mode() -> bool: diff --git a/src/ytstudio/demo_data/analytics_metrics.json b/src/ytstudio/demo_data/analytics_metrics.json new file mode 100644 index 0000000..cac1909 --- /dev/null +++ b/src/ytstudio/demo_data/analytics_metrics.json @@ -0,0 +1,103 @@ +{ + "zQnBQ4tB3ZA": { + "views": 106666, "likes": 3266, "comments": 80, "shares": 1633, + "estimatedMinutesWatched": 213333, "averageViewDuration": 135, + "averageViewPercentage": 62.5, "subscribersGained": 490, "subscribersLost": 49 + }, + "lHhRhPV--G0": { + "views": 70000, "likes": 2400, "comments": 60, "shares": 1200, + "estimatedMinutesWatched": 140000, "averageViewDuration": 135, + "averageViewPercentage": 62.5, "subscribersGained": 360, "subscribersLost": 36 + }, + "rf60MejMz3E": { + "views": 60000, "likes": 2166, "comments": 50, "shares": 1083, + "estimatedMinutesWatched": 120000, "averageViewDuration": 135, + "averageViewPercentage": 62.5, "subscribersGained": 325, "subscribersLost": 32 + }, + "Ata9cSC2WpM": { + "views": 150000, "likes": 4166, "comments": 106, "shares": 2083, + "estimatedMinutesWatched": 300000, "averageViewDuration": 135, + "averageViewPercentage": 62.5, "subscribersGained": 625, "subscribersLost": 62 + }, + "w7ejDZ8SWv8": { + "views": 226666, "likes": 7166, "comments": 283, "shares": 3583, + "estimatedMinutesWatched": 453333, "averageViewDuration": 135, + "averageViewPercentage": 62.5, "subscribersGained": 1075, "subscribersLost": 107 + }, + "dQw4w9WgXcQ": { + "views": 173000, "likes": 5540, "comments": 155, "shares": 2770, + "estimatedMinutesWatched": 346000, "averageViewDuration": 128, + "averageViewPercentage": 59.3, "subscribersGained": 780, "subscribersLost": 78 + }, + "7C2z4GqqS5E": { + "views": 93000, "likes": 2976, "comments": 74, "shares": 1488, + "estimatedMinutesWatched": 195300, "averageViewDuration": 142, + "averageViewPercentage": 65.7, "subscribersGained": 418, "subscribersLost": 41 + }, + "8C3z4GqqS5F": { + "views": 130000, "likes": 5200, "comments": 210, "shares": 2340, + "estimatedMinutesWatched": 390000, "averageViewDuration": 180, + "averageViewPercentage": 55.2, "subscribersGained": 585, "subscribersLost": 58 + }, + "9D4z5HrrT6G": { + "views": 80000, "likes": 2640, "comments": 62, "shares": 1280, + "estimatedMinutesWatched": 160000, "averageViewDuration": 131, + "averageViewPercentage": 61.8, "subscribersGained": 352, "subscribersLost": 35 + }, + "0E5z6IssU7H": { + "views": 63000, "likes": 2016, "comments": 48, "shares": 945, + "estimatedMinutesWatched": 132300, "averageViewDuration": 138, + "averageViewPercentage": 63.9, "subscribersGained": 283, "subscribersLost": 28 + }, + "1F6z7JttV8I": { + "views": 53000, "likes": 1749, "comments": 42, "shares": 795, + "estimatedMinutesWatched": 106000, "averageViewDuration": 126, + "averageViewPercentage": 58.1, "subscribersGained": 238, "subscribersLost": 23 + }, + "2G7z8KuuW9J": { + "views": 36000, "likes": 1440, "comments": 68, "shares": 720, + "estimatedMinutesWatched": 108000, "averageViewDuration": 195, + "averageViewPercentage": 52.8, "subscribersGained": 162, "subscribersLost": 16 + }, + "3H8z9LvvX0K": { + "views": 70000, "likes": 2310, "comments": 56, "shares": 1050, + "estimatedMinutesWatched": 140000, "averageViewDuration": 132, + "averageViewPercentage": 60.9, "subscribersGained": 315, "subscribersLost": 31 + }, + "4I9z0MwwY1L": { + "views": 86000, "likes": 2838, "comments": 69, "shares": 1376, + "estimatedMinutesWatched": 180600, "averageViewDuration": 140, + "averageViewPercentage": 64.4, "subscribersGained": 387, "subscribersLost": 38 + }, + "5J0z1NxxZ2M": { + "views": 29000, "likes": 1218, "comments": 95, "shares": 580, + "estimatedMinutesWatched": 72500, "averageViewDuration": 168, + "averageViewPercentage": 56.4, "subscribersGained": 145, "subscribersLost": 14 + }, + "6K1z2OyyA3N": { + "views": 60000, "likes": 1920, "comments": 46, "shares": 900, + "estimatedMinutesWatched": 126000, "averageViewDuration": 134, + "averageViewPercentage": 61.2, "subscribersGained": 270, "subscribersLost": 27 + }, + "7L2z3PzzB4O": { + "views": 103000, "likes": 3296, "comments": 82, "shares": 1545, + "estimatedMinutesWatched": 206000, "averageViewDuration": 130, + "averageViewPercentage": 60.4, "subscribersGained": 463, "subscribersLost": 46 + }, + "8M3z4QaaC5P": { + "views": 50000, "likes": 1650, "comments": 40, "shares": 750, + "estimatedMinutesWatched": 105000, "averageViewDuration": 133, + "averageViewPercentage": 61.5, "subscribersGained": 225, "subscribersLost": 22 + }, + "9N4z5RbbD6Q": { + "views": 46000, "likes": 1518, "comments": 36, "shares": 690, + "estimatedMinutesWatched": 100280, "averageViewDuration": 144, + "averageViewPercentage": 66.2, "subscribersGained": 207, "subscribersLost": 20 + }, + "0O5z6SccE7R": { + "views": 56000, "likes": 1848, "comments": 44, "shares": 840, + "estimatedMinutesWatched": 117600, "averageViewDuration": 131, + "averageViewPercentage": 60.5, "subscribersGained": 252, "subscribersLost": 25 + }, + "countries": ["US", "IN", "GB", "DE", "BR"] +} diff --git a/src/ytstudio/demo_data/channel.json b/src/ytstudio/demo_data/channel.json new file mode 100644 index 0000000..509f879 --- /dev/null +++ b/src/ytstudio/demo_data/channel.json @@ -0,0 +1,21 @@ +{ + "items": [ + { + "id": "UCsBjURrPoezykLs9EqgamOA", + "snippet": { + "title": "Fireship", + "customUrl": "@Fireship" + }, + "statistics": { + "subscriberCount": "4060000", + "viewCount": "654000000", + "videoCount": "850" + }, + "contentDetails": { + "relatedPlaylists": { + "uploads": "UUsBjURrPoezykLs9EqgamOA" + } + } + } + ] +} diff --git a/src/ytstudio/demo_data/comments.json b/src/ytstudio/demo_data/comments.json new file mode 100644 index 0000000..bb667c3 --- /dev/null +++ b/src/ytstudio/demo_data/comments.json @@ -0,0 +1,284 @@ +{ + "items": [ + { + "id": "Ugw1abc123", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "CodeNewbie", + "textOriginal": "This is the best explanation I've ever seen!", + "likeCount": 1542, + "publishedAt": "2026-01-15T10:00:00Z", + "videoId": "zQnBQ4tB3ZA" + } + } + } + }, + { + "id": "Ugw2def456", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "DevSenior", + "textOriginal": "100 seconds well spent. Subscribed!", + "likeCount": 856, + "publishedAt": "2026-01-15T07:00:00Z", + "videoId": "Ata9cSC2WpM" + } + } + } + }, + { + "id": "Ugw3ghi789", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "EarnBigNow", + "textOriginal": "I make $5,000/week from home! Visit my profile for the FREE method", + "likeCount": 0, + "publishedAt": "2026-01-15T04:00:00Z", + "videoId": "zQnBQ4tB3ZA" + } + } + } + }, + { + "id": "Ugw4jkl012", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "JuniorDev2024", + "textOriginal": "Finally understand this after watching 10 other tutorials", + "likeCount": 634, + "publishedAt": "2026-01-14T12:00:00Z", + "videoId": "zQnBQ4tB3ZA" + } + } + } + }, + { + "id": "Ugw5mno345", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "CryptoKing99", + "textOriginal": "CHECK OUT MY CHANNEL for daily crypto signals!! 100% guaranteed gains!!!", + "likeCount": 0, + "publishedAt": "2026-01-14T12:00:00Z", + "videoId": "dQw4w9WgXcQ" + } + } + } + }, + { + "id": "Ugw6pqr678", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "RustEvangelist", + "textOriginal": "The borrow checker is your friend, not your enemy. Great intro!", + "likeCount": 1203, + "publishedAt": "2026-01-14T09:30:00Z", + "videoId": "7C2z4GqqS5E" + } + } + } + }, + { + "id": "Ugw7stu901", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "CloudArchitect", + "textOriginal": "I wish someone showed me Docker this clearly 5 years ago", + "likeCount": 742, + "publishedAt": "2026-01-13T22:15:00Z", + "videoId": "9D4z5HrrT6G" + } + } + } + }, + { + "id": "Ugw8vwx234", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "DataEngineer42", + "textOriginal": "SQL is still king in 2026. This aged perfectly.", + "likeCount": 489, + "publishedAt": "2026-01-13T18:45:00Z", + "videoId": "3H8z9LvvX0K" + } + } + } + }, + { + "id": "Ugw9yza567", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "LinuxGuru", + "textOriginal": "btw I use Arch. But seriously, great summary.", + "likeCount": 1876, + "publishedAt": "2026-01-13T15:00:00Z", + "videoId": "4I9z0MwwY1L" + } + } + } + }, + { + "id": "Ugw10bcd890", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "FrontendWizard", + "textOriginal": "The CSS container queries trick blew my mind", + "likeCount": 367, + "publishedAt": "2026-01-13T11:20:00Z", + "videoId": "2G7z8KuuW9J" + } + } + } + }, + { + "id": "Ugw11efg123", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "StartupFounder", + "textOriginal": "Showed this to my entire team. Next.js adoption starts Monday.", + "likeCount": 523, + "publishedAt": "2026-01-12T20:30:00Z", + "videoId": "6K1z2OyyA3N" + } + } + } + }, + { + "id": "Ugw12hij456", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "GitMaster", + "textOriginal": "git rebase --interactive is the real superpower nobody talks about", + "likeCount": 934, + "publishedAt": "2026-01-12T16:00:00Z", + "videoId": "7L2z3PzzB4O" + } + } + } + }, + { + "id": "Ugw13klm789", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "BackendDev", + "textOriginal": "MongoDB is great until you need joins. Then you cry.", + "likeCount": 2105, + "publishedAt": "2026-01-12T12:45:00Z", + "videoId": "8M3z4QaaC5P" + } + } + } + }, + { + "id": "Ugw14nop012", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "DevOpsNinja", + "textOriginal": "K8s in 100 seconds but it takes 100 days to master", + "likeCount": 1458, + "publishedAt": "2026-01-11T21:00:00Z", + "videoId": "0E5z6IssU7H" + } + } + } + }, + { + "id": "Ugw15qrs345", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "AIResearcher", + "textOriginal": "The pace of AI progress is genuinely hard to keep up with", + "likeCount": 312, + "publishedAt": "2026-01-11T14:30:00Z", + "videoId": "5J0z1NxxZ2M" + } + } + } + }, + { + "id": "Ugw16tuv678", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "WebDevDaily", + "textOriginal": "Svelte compiles away the framework. Still blows my mind every time.", + "likeCount": 678, + "publishedAt": "2026-01-11T08:15:00Z", + "videoId": "0O5z6SccE7R" + } + } + } + }, + { + "id": "Ugw17wxy901", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "Pythonista", + "textOriginal": "Python: where indentation is not a style choice, it's the law", + "likeCount": 1789, + "publishedAt": "2026-01-10T19:00:00Z", + "videoId": "dQw4w9WgXcQ" + } + } + } + }, + { + "id": "Ugw18zab234", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "FrameworkHopper", + "textOriginal": "Built the same app 10 times and still can't decide which framework to use", + "likeCount": 2567, + "publishedAt": "2026-01-10T13:45:00Z", + "videoId": "8C3z4GqqS5F" + } + } + } + }, + { + "id": "Ugw19cde567", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "AWSCertified", + "textOriginal": "My AWS bill after following this tutorial: $0.03. Worth it.", + "likeCount": 445, + "publishedAt": "2026-01-10T07:30:00Z", + "videoId": "9N4z5RbbD6Q" + } + } + } + }, + { + "id": "Ugw20fgh890", + "snippet": { + "topLevelComment": { + "snippet": { + "authorDisplayName": "FlutterDev", + "textOriginal": "Hot reload alone makes Flutter worth learning", + "likeCount": 398, + "publishedAt": "2026-01-09T16:00:00Z", + "videoId": "lHhRhPV--G0" + } + } + } + } + ] +} diff --git a/src/ytstudio/demo_data/playlist_items.json b/src/ytstudio/demo_data/playlist_items.json new file mode 100644 index 0000000..3f468c5 --- /dev/null +++ b/src/ytstudio/demo_data/playlist_items.json @@ -0,0 +1,26 @@ +{ + "items": [ + {"snippet": {"title": "TypeScript in 100 Seconds", "publishedAt": "2020-11-25T00:00:00Z"}, "contentDetails": {"videoId": "zQnBQ4tB3ZA"}}, + {"snippet": {"title": "Flutter in 100 Seconds", "publishedAt": "2020-04-14T00:00:00Z"}, "contentDetails": {"videoId": "lHhRhPV--G0"}}, + {"snippet": {"title": "Recursion in 100 Seconds", "publishedAt": "2019-12-30T00:00:00Z"}, "contentDetails": {"videoId": "rf60MejMz3E"}}, + {"snippet": {"title": "React in 100 Seconds", "publishedAt": "2021-05-12T00:00:00Z"}, "contentDetails": {"videoId": "Ata9cSC2WpM"}}, + {"snippet": {"title": "God-Tier Developer Roadmap", "publishedAt": "2022-08-15T00:00:00Z"}, "contentDetails": {"videoId": "w7ejDZ8SWv8"}}, + {"snippet": {"title": "Python in 100 Seconds", "publishedAt": "2021-03-08T00:00:00Z"}, "contentDetails": {"videoId": "dQw4w9WgXcQ"}}, + {"snippet": {"title": "Rust in 100 Seconds", "publishedAt": "2021-07-22T00:00:00Z"}, "contentDetails": {"videoId": "7C2z4GqqS5E"}}, + {"snippet": {"title": "I built the same app 10 times", "publishedAt": "2023-01-18T00:00:00Z"}, "contentDetails": {"videoId": "8C3z4GqqS5F"}}, + {"snippet": {"title": "Docker in 100 Seconds", "publishedAt": "2020-08-03T00:00:00Z"}, "contentDetails": {"videoId": "9D4z5HrrT6G"}}, + {"snippet": {"title": "Kubernetes in 100 Seconds", "publishedAt": "2021-02-15T00:00:00Z"}, "contentDetails": {"videoId": "0E5z6IssU7H"}}, + {"snippet": {"title": "GraphQL in 100 Seconds", "publishedAt": "2020-06-20T00:00:00Z"}, "contentDetails": {"videoId": "1F6z7JttV8I"}}, + {"snippet": {"title": "10 CSS tricks you didn't know", "publishedAt": "2022-11-05T00:00:00Z"}, "contentDetails": {"videoId": "2G7z8KuuW9J"}}, + {"snippet": {"title": "SQL Explained in 100 Seconds", "publishedAt": "2021-09-12T00:00:00Z"}, "contentDetails": {"videoId": "3H8z9LvvX0K"}}, + {"snippet": {"title": "Linux in 100 Seconds", "publishedAt": "2020-10-28T00:00:00Z"}, "contentDetails": {"videoId": "4I9z0MwwY1L"}}, + {"snippet": {"title": "AI is getting satisfying", "publishedAt": "2024-02-01T00:00:00Z"}, "contentDetails": {"videoId": "5J0z1NxxZ2M"}}, + {"snippet": {"title": "Next.js in 100 Seconds", "publishedAt": "2021-11-03T00:00:00Z"}, "contentDetails": {"videoId": "6K1z2OyyA3N"}}, + {"snippet": {"title": "Git in 100 Seconds", "publishedAt": "2020-05-18T00:00:00Z"}, "contentDetails": {"videoId": "7L2z3PzzB4O"}}, + {"snippet": {"title": "MongoDB in 100 Seconds", "publishedAt": "2021-04-07T00:00:00Z"}, "contentDetails": {"videoId": "8M3z4QaaC5P"}}, + {"snippet": {"title": "AWS in 100 Seconds", "publishedAt": "2022-03-14T00:00:00Z"}, "contentDetails": {"videoId": "9N4z5RbbD6Q"}}, + {"snippet": {"title": "Svelte in 100 Seconds", "publishedAt": "2021-06-25T00:00:00Z"}, "contentDetails": {"videoId": "0O5z6SccE7R"}} + ], + "pageInfo": {"totalResults": 20}, + "nextPageToken": null +} diff --git a/src/ytstudio/demo_data/videos.json b/src/ytstudio/demo_data/videos.json new file mode 100644 index 0000000..6c63910 --- /dev/null +++ b/src/ytstudio/demo_data/videos.json @@ -0,0 +1,451 @@ +{ + "items": [ + { + "id": "zQnBQ4tB3ZA", + "snippet": { + "title": "TypeScript in 100 Seconds", + "description": "Learn the basics of TypeScript in 100 seconds...", + "publishedAt": "2020-11-25T00:00:00Z", + "tags": ["typescript", "javascript", "100SecondsOfCode", "programming"], + "categoryId": "28", + "defaultLanguage": "en", + "defaultAudioLanguage": "en" + }, + "statistics": { + "viewCount": "3200000", + "likeCount": "98000", + "commentCount": "2400" + }, + "contentDetails": { + "duration": "PT2M1S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": { + "en": { + "title": "TypeScript in 100 Seconds", + "description": "Learn the basics of TypeScript in 100 seconds..." + } + } + }, + { + "id": "lHhRhPV--G0", + "snippet": { + "title": "Flutter in 100 Seconds", + "description": "Build apps on iOS, Android, the web, and desktop with Flutter...", + "publishedAt": "2020-04-14T00:00:00Z", + "tags": ["flutter", "dart", "mobile", "100SecondsOfCode"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "2100000", + "likeCount": "72000", + "commentCount": "1800" + }, + "contentDetails": { + "duration": "PT2M8S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "rf60MejMz3E", + "snippet": { + "title": "Recursion in 100 Seconds", + "description": "Learn how recursion works in 100 seconds...", + "publishedAt": "2019-12-30T00:00:00Z", + "tags": ["recursion", "algorithms", "100SecondsOfCode", "compsci"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1800000", + "likeCount": "65000", + "commentCount": "1500" + }, + "contentDetails": { + "duration": "PT1M48S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "Ata9cSC2WpM", + "snippet": { + "title": "React in 100 Seconds", + "description": "Learn the basics of React in 100 seconds...", + "publishedAt": "2021-05-12T00:00:00Z", + "tags": ["react", "javascript", "frontend", "100SecondsOfCode"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "4500000", + "likeCount": "125000", + "commentCount": "3200" + }, + "contentDetails": { + "duration": "PT2M15S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "w7ejDZ8SWv8", + "snippet": { + "title": "God-Tier Developer Roadmap", + "description": "The mass extinction satisfies both business and our lizard brain...", + "publishedAt": "2022-08-15T00:00:00Z", + "tags": ["roadmap", "developer", "career", "programming"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "6800000", + "likeCount": "215000", + "commentCount": "8500" + }, + "contentDetails": { + "duration": "PT11M42S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "dQw4w9WgXcQ", + "snippet": { + "title": "Python in 100 Seconds", + "description": "Learn Python in 100 seconds...", + "publishedAt": "2021-03-08T00:00:00Z", + "tags": ["python", "programming", "100SecondsOfCode", "backend"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "5200000", + "likeCount": "142000", + "commentCount": "4100" + }, + "contentDetails": { + "duration": "PT2M12S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "7C2z4GqqS5E", + "snippet": { + "title": "Rust in 100 Seconds", + "description": "Rust is a blazingly fast systems programming language...", + "publishedAt": "2021-07-22T00:00:00Z", + "tags": ["rust", "systems", "100SecondsOfCode", "programming"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "2800000", + "likeCount": "89000", + "commentCount": "2200" + }, + "contentDetails": { + "duration": "PT2M24S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "8C3z4GqqS5F", + "snippet": { + "title": "I built the same app 10 times", + "description": "Which JavaScript framework is best?...", + "publishedAt": "2023-01-18T00:00:00Z", + "tags": ["frameworks", "comparison", "webdev", "javascript"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "3900000", + "likeCount": "156000", + "commentCount": "5600" + }, + "contentDetails": { + "duration": "PT14M33S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "9D4z5HrrT6G", + "snippet": { + "title": "Docker in 100 Seconds", + "description": "Containerize your applications with Docker...", + "publishedAt": "2020-08-03T00:00:00Z", + "tags": ["docker", "devops", "100SecondsOfCode", "containers"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "2400000", + "likeCount": "78000", + "commentCount": "1900" + }, + "contentDetails": { + "duration": "PT2M6S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "0E5z6IssU7H", + "snippet": { + "title": "Kubernetes in 100 Seconds", + "description": "Orchestrate containers at scale with K8s...", + "publishedAt": "2021-02-15T00:00:00Z", + "tags": ["kubernetes", "k8s", "devops", "100SecondsOfCode"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1900000", + "likeCount": "62000", + "commentCount": "1400" + }, + "contentDetails": { + "duration": "PT2M18S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "1F6z7JttV8I", + "snippet": { + "title": "GraphQL in 100 Seconds", + "description": "A query language for your API...", + "publishedAt": "2020-06-20T00:00:00Z", + "tags": ["graphql", "api", "100SecondsOfCode", "backend"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1600000", + "likeCount": "54000", + "commentCount": "1200" + }, + "contentDetails": { + "duration": "PT2M4S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "2G7z8KuuW9J", + "snippet": { + "title": "10 CSS tricks you didn't know", + "description": "Level up your CSS game with these tips...", + "publishedAt": "2022-11-05T00:00:00Z", + "tags": ["css", "frontend", "webdev", "tricks"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1100000", + "likeCount": "45000", + "commentCount": "890" + }, + "contentDetails": { + "duration": "PT8M22S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "3H8z9LvvX0K", + "snippet": { + "title": "SQL Explained in 100 Seconds", + "description": "The most important language for data...", + "publishedAt": "2021-09-12T00:00:00Z", + "tags": ["sql", "database", "100SecondsOfCode", "backend"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "2100000", + "likeCount": "71000", + "commentCount": "1600" + }, + "contentDetails": { + "duration": "PT2M9S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "4I9z0MwwY1L", + "snippet": { + "title": "Linux in 100 Seconds", + "description": "The operating system that runs the world...", + "publishedAt": "2020-10-28T00:00:00Z", + "tags": ["linux", "os", "100SecondsOfCode", "devops"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "2600000", + "likeCount": "82000", + "commentCount": "2100" + }, + "contentDetails": { + "duration": "PT2M15S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "5J0z1NxxZ2M", + "snippet": { + "title": "AI is getting satisfying", + "description": "This week in AI and code...", + "publishedAt": "2024-02-01T00:00:00Z", + "tags": ["ai", "code", "news", "tech"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "890000", + "likeCount": "38000", + "commentCount": "1200" + }, + "contentDetails": { + "duration": "PT5M44S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "6K1z2OyyA3N", + "snippet": { + "title": "Next.js in 100 Seconds", + "description": "The React framework for production...", + "publishedAt": "2021-11-03T00:00:00Z", + "tags": ["nextjs", "react", "100SecondsOfCode", "fullstack"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1800000", + "likeCount": "58000", + "commentCount": "1300" + }, + "contentDetails": { + "duration": "PT2M11S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "7L2z3PzzB4O", + "snippet": { + "title": "Git in 100 Seconds", + "description": "Version control for the modern developer...", + "publishedAt": "2020-05-18T00:00:00Z", + "tags": ["git", "vcs", "100SecondsOfCode", "devtools"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "3100000", + "likeCount": "95000", + "commentCount": "2300" + }, + "contentDetails": { + "duration": "PT2M3S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "8M3z4QaaC5P", + "snippet": { + "title": "MongoDB in 100 Seconds", + "description": "A document database built for modern apps...", + "publishedAt": "2021-04-07T00:00:00Z", + "tags": ["mongodb", "nosql", "100SecondsOfCode", "database"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1500000", + "likeCount": "49000", + "commentCount": "1100" + }, + "contentDetails": { + "duration": "PT2M7S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "9N4z5RbbD6Q", + "snippet": { + "title": "AWS in 100 Seconds", + "description": "The cloud platform that powers the internet...", + "publishedAt": "2022-03-14T00:00:00Z", + "tags": ["aws", "cloud", "100SecondsOfCode", "devops"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1400000", + "likeCount": "46000", + "commentCount": "980" + }, + "contentDetails": { + "duration": "PT2M19S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + }, + { + "id": "0O5z6SccE7R", + "snippet": { + "title": "Svelte in 100 Seconds", + "description": "A radical new approach to building UIs...", + "publishedAt": "2021-06-25T00:00:00Z", + "tags": ["svelte", "frontend", "100SecondsOfCode", "javascript"], + "categoryId": "28" + }, + "statistics": { + "viewCount": "1700000", + "likeCount": "56000", + "commentCount": "1250" + }, + "contentDetails": { + "duration": "PT2M5S" + }, + "status": { + "privacyStatus": "public" + }, + "localizations": {} + } + ] +} diff --git a/src/ytstudio/main.py b/src/ytstudio/main.py index 5f48a86..1978981 100644 --- a/src/ytstudio/main.py +++ b/src/ytstudio/main.py @@ -3,8 +3,8 @@ import typer from rich.console import Console -from ytstudio.auth import authenticate, get_status -from ytstudio.commands import analytics, auth, comments, videos +from ytstudio.api import authenticate, get_status +from ytstudio.commands import analytics, comments, videos from ytstudio.config import setup_credentials from ytstudio.version import get_current_version, is_update_available @@ -17,7 +17,6 @@ console = Console() -app.add_typer(auth.app, name="auth") app.add_typer(videos.app, name="videos") app.add_typer(analytics.app, name="analytics") app.add_typer(comments.app, name="comments") diff --git a/src/ytstudio/registry.py b/src/ytstudio/registry.py new file mode 100644 index 0000000..1e487f9 --- /dev/null +++ b/src/ytstudio/registry.py @@ -0,0 +1,440 @@ +# Reference: https://developers.google.com/youtube/analytics/metrics +# https://developers.google.com/youtube/analytics/dimensions + +from dataclasses import dataclass +from enum import StrEnum + + +class MetricName(StrEnum): + # View metrics + VIEWS = "views" + ENGAGED_VIEWS = "engagedViews" + RED_VIEWS = "redViews" + VIEWER_PERCENTAGE = "viewerPercentage" + # Reach metrics + VIDEO_THUMBNAIL_IMPRESSIONS = "videoThumbnailImpressions" + VIDEO_THUMBNAIL_IMPRESSIONS_CLICK_RATE = "videoThumbnailImpressionsClickRate" + # Watch time metrics + ESTIMATED_MINUTES_WATCHED = "estimatedMinutesWatched" + ESTIMATED_RED_MINUTES_WATCHED = "estimatedRedMinutesWatched" + AVERAGE_VIEW_DURATION = "averageViewDuration" + AVERAGE_VIEW_PERCENTAGE = "averageViewPercentage" + # Engagement metrics + LIKES = "likes" + DISLIKES = "dislikes" + COMMENTS = "comments" + SHARES = "shares" + SUBSCRIBERS_GAINED = "subscribersGained" + SUBSCRIBERS_LOST = "subscribersLost" + VIDEOS_ADDED_TO_PLAYLISTS = "videosAddedToPlaylists" + VIDEOS_REMOVED_FROM_PLAYLISTS = "videosRemovedFromPlaylists" + # Card metrics + CARD_IMPRESSIONS = "cardImpressions" + CARD_CLICKS = "cardClicks" + CARD_CLICK_RATE = "cardClickRate" + CARD_TEASER_IMPRESSIONS = "cardTeaserImpressions" + CARD_TEASER_CLICKS = "cardTeaserClicks" + CARD_TEASER_CLICK_RATE = "cardTeaserClickRate" + # Annotation metrics + ANNOTATION_IMPRESSIONS = "annotationImpressions" + ANNOTATION_CLICKS = "annotationClicks" + ANNOTATION_CLICK_THROUGH_RATE = "annotationClickThroughRate" + ANNOTATION_CLOSABLE_IMPRESSIONS = "annotationClosableImpressions" + ANNOTATION_CLOSES = "annotationCloses" + ANNOTATION_CLOSE_RATE = "annotationCloseRate" + ANNOTATION_CLICKABLE_IMPRESSIONS = "annotationClickableImpressions" + # Revenue metrics + ESTIMATED_REVENUE = "estimatedRevenue" + ESTIMATED_AD_REVENUE = "estimatedAdRevenue" + GROSS_REVENUE = "grossRevenue" + ESTIMATED_RED_PARTNER_REVENUE = "estimatedRedPartnerRevenue" + MONETIZED_PLAYBACKS = "monetizedPlaybacks" + PLAYBACK_BASED_CPM = "playbackBasedCpm" + AD_IMPRESSIONS = "adImpressions" + CPM = "cpm" + # Playlist metrics (in-playlist) + PLAYLIST_VIEWS = "playlistViews" + PLAYLIST_STARTS = "playlistStarts" + VIEWS_PER_PLAYLIST_START = "viewsPerPlaylistStart" + AVERAGE_TIME_IN_PLAYLIST = "averageTimeInPlaylist" + PLAYLIST_SAVES = "playlistSaves" + PLAYLIST_ESTIMATED_MINUTES_WATCHED = "playlistEstimatedMinutesWatched" + PLAYLIST_AVERAGE_VIEW_DURATION = "playlistAverageViewDuration" + # Unique viewers + UNIQUES = "uniques" + + +class DimensionName(StrEnum): + # Time + DAY = "day" + MONTH = "month" + # Geographic + COUNTRY = "country" + PROVINCE = "province" + CITY = "city" + CONTINENT = "continent" + SUB_CONTINENT = "subContinent" + DMA = "dma" + # Content + VIDEO = "video" + PLAYLIST = "playlist" + GROUP = "group" + CREATOR_CONTENT_TYPE = "creatorContentType" + # Traffic sources + INSIGHT_TRAFFIC_SOURCE_TYPE = "insightTrafficSourceType" + INSIGHT_TRAFFIC_SOURCE_DETAIL = "insightTrafficSourceDetail" + # Playback + PLAYBACK_LOCATION_TYPE = "playbackLocationType" + LIVE_OR_ON_DEMAND = "liveOrOnDemand" + # Device + DEVICE_TYPE = "deviceType" + OPERATING_SYSTEM = "operatingSystem" + # Audience + AGE_GROUP = "ageGroup" + GENDER = "gender" + SUBSCRIBED_STATUS = "subscribedStatus" + YOUTUBE_PRODUCT = "youtubeProduct" + # Sharing + SHARING_SERVICE = "sharingService" + # Ads + AD_TYPE = "adType" + + +@dataclass(frozen=True) +class Metric: + name: MetricName + description: str + group: str + core: bool = False + monetary: bool = False # requires yt-analytics-monetary.readonly scope + + +@dataclass(frozen=True) +class Dimension: + name: DimensionName + description: str + group: str + filter_only: bool = False # can only be used as filter, not as dimension + + +# --- Metrics --- + +METRICS: dict[MetricName, Metric] = { + m.name: m + for m in [ + # View metrics + Metric(MetricName.VIEWS, "Number of times videos were viewed", "views", core=True), + Metric(MetricName.ENGAGED_VIEWS, "Views past the initial seconds", "views", core=True), + Metric(MetricName.RED_VIEWS, "Views by YouTube Premium members", "views"), + Metric(MetricName.VIEWER_PERCENTAGE, "Percentage of logged-in viewers", "views", core=True), + # Reach metrics + Metric( + MetricName.VIDEO_THUMBNAIL_IMPRESSIONS, + "Times thumbnails were shown to viewers", + "reach", + ), + Metric( + MetricName.VIDEO_THUMBNAIL_IMPRESSIONS_CLICK_RATE, + "Percentage of impressions that became views (CTR)", + "reach", + ), + # Watch time metrics + Metric( + MetricName.ESTIMATED_MINUTES_WATCHED, + "Total minutes watched", + "watch_time", + core=True, + ), + Metric( + MetricName.ESTIMATED_RED_MINUTES_WATCHED, + "Minutes watched by YouTube Premium members", + "watch_time", + ), + Metric( + MetricName.AVERAGE_VIEW_DURATION, + "Average playback length in seconds", + "watch_time", + core=True, + ), + Metric( + MetricName.AVERAGE_VIEW_PERCENTAGE, + "Average percentage of video watched", + "watch_time", + ), + # Engagement metrics + Metric(MetricName.LIKES, "Number of likes", "engagement", core=True), + Metric(MetricName.DISLIKES, "Number of dislikes", "engagement", core=True), + Metric(MetricName.COMMENTS, "Number of comments", "engagement", core=True), + Metric(MetricName.SHARES, "Number of shares via the Share button", "engagement", core=True), + Metric(MetricName.SUBSCRIBERS_GAINED, "New subscribers gained", "engagement", core=True), + Metric(MetricName.SUBSCRIBERS_LOST, "Subscribers lost", "engagement", core=True), + Metric( + MetricName.VIDEOS_ADDED_TO_PLAYLISTS, + "Times videos were added to any playlist", + "engagement", + ), + Metric( + MetricName.VIDEOS_REMOVED_FROM_PLAYLISTS, + "Times videos were removed from any playlist", + "engagement", + ), + # Card metrics + Metric(MetricName.CARD_IMPRESSIONS, "Number of card impressions", "cards"), + Metric(MetricName.CARD_CLICKS, "Number of card clicks", "cards"), + Metric(MetricName.CARD_CLICK_RATE, "Card click-through rate", "cards"), + Metric(MetricName.CARD_TEASER_IMPRESSIONS, "Number of card teaser impressions", "cards"), + Metric(MetricName.CARD_TEASER_CLICKS, "Number of card teaser clicks", "cards"), + Metric(MetricName.CARD_TEASER_CLICK_RATE, "Card teaser click-through rate", "cards"), + # Annotation metrics + Metric(MetricName.ANNOTATION_IMPRESSIONS, "Total annotation impressions", "annotations"), + Metric(MetricName.ANNOTATION_CLICKS, "Number of annotation clicks", "annotations"), + Metric( + MetricName.ANNOTATION_CLICK_THROUGH_RATE, + "Annotation click-through rate", + "annotations", + core=True, + ), + Metric( + MetricName.ANNOTATION_CLOSABLE_IMPRESSIONS, + "Closable annotation impressions", + "annotations", + ), + Metric(MetricName.ANNOTATION_CLOSES, "Number of annotation closes", "annotations"), + Metric(MetricName.ANNOTATION_CLOSE_RATE, "Annotation close rate", "annotations", core=True), + Metric( + MetricName.ANNOTATION_CLICKABLE_IMPRESSIONS, + "Clickable annotation impressions", + "annotations", + ), + # Revenue metrics + Metric( + MetricName.ESTIMATED_REVENUE, + "Estimated total net revenue", + "revenue", + core=True, + monetary=True, + ), + Metric( + MetricName.ESTIMATED_AD_REVENUE, + "Estimated ad net revenue", + "revenue", + monetary=True, + ), + Metric( + MetricName.GROSS_REVENUE, + "Estimated gross revenue from ads", + "revenue", + monetary=True, + ), + Metric( + MetricName.ESTIMATED_RED_PARTNER_REVENUE, + "Estimated YouTube Premium revenue", + "revenue", + monetary=True, + ), + Metric( + MetricName.MONETIZED_PLAYBACKS, + "Playbacks that showed at least one ad", + "revenue", + monetary=True, + ), + Metric( + MetricName.PLAYBACK_BASED_CPM, + "Estimated gross revenue per 1000 playbacks", + "revenue", + monetary=True, + ), + Metric( + MetricName.AD_IMPRESSIONS, + "Number of verified ad impressions", + "revenue", + monetary=True, + ), + Metric( + MetricName.CPM, + "Estimated gross revenue per 1000 ad impressions", + "revenue", + monetary=True, + ), + # Playlist metrics (in-playlist) + Metric(MetricName.PLAYLIST_VIEWS, "Video views in the context of a playlist", "playlist"), + Metric( + MetricName.PLAYLIST_STARTS, + "Number of times playlist playback was initiated", + "playlist", + ), + Metric( + MetricName.VIEWS_PER_PLAYLIST_START, + "Average views per playlist start", + "playlist", + ), + Metric( + MetricName.AVERAGE_TIME_IN_PLAYLIST, + "Average time (min) viewers spent in playlist", + "playlist", + ), + Metric(MetricName.PLAYLIST_SAVES, "Net number of playlist saves", "playlist"), + Metric( + MetricName.PLAYLIST_ESTIMATED_MINUTES_WATCHED, + "Minutes watched in playlist context", + "playlist", + ), + Metric( + MetricName.PLAYLIST_AVERAGE_VIEW_DURATION, + "Average video view length in playlist context", + "playlist", + ), + # Unique viewers + Metric(MetricName.UNIQUES, "Estimated unique viewers", "audience"), + ] +} + +# --- Dimensions --- + +DIMENSIONS: dict[DimensionName, Dimension] = { + d.name: d + for d in [ + # Time + Dimension(DimensionName.DAY, "Date in YYYY-MM-DD format", "time"), + Dimension(DimensionName.MONTH, "Month in YYYY-MM format", "time"), + # Geographic + Dimension(DimensionName.COUNTRY, "Two-letter ISO 3166-1 country code", "geographic"), + Dimension( + DimensionName.PROVINCE, + "US state (ISO 3166-2, requires country==US filter)", + "geographic", + ), + Dimension(DimensionName.CITY, "Estimated city (available from 2022-01-01)", "geographic"), + Dimension( + DimensionName.CONTINENT, "UN statistical region code", "geographic", filter_only=True + ), + Dimension( + DimensionName.SUB_CONTINENT, "UN sub-region code", "geographic", filter_only=True + ), + Dimension(DimensionName.DMA, "Nielsen Designated Market Area (3-digit)", "geographic"), + # Content + Dimension(DimensionName.VIDEO, "YouTube video ID", "content"), + Dimension(DimensionName.PLAYLIST, "YouTube playlist ID", "content"), + Dimension(DimensionName.GROUP, "YouTube Analytics group ID", "content", filter_only=True), + Dimension( + DimensionName.CREATOR_CONTENT_TYPE, + "Content type: shorts, videos, or live", + "content", + ), + # Traffic sources + Dimension(DimensionName.INSIGHT_TRAFFIC_SOURCE_TYPE, "Traffic source category", "traffic"), + Dimension( + DimensionName.INSIGHT_TRAFFIC_SOURCE_DETAIL, + "Specific traffic source (search term, URL)", + "traffic", + ), + # Playback + Dimension( + DimensionName.PLAYBACK_LOCATION_TYPE, + "Where the video was played (watch page, embed, etc)", + "playback", + ), + Dimension( + DimensionName.LIVE_OR_ON_DEMAND, + "Whether content was live or on-demand", + "playback", + ), + # Device + Dimension( + DimensionName.DEVICE_TYPE, + "Device type (mobile, desktop, tablet, tv, etc)", + "device", + ), + Dimension(DimensionName.OPERATING_SYSTEM, "Operating system", "device"), + # Audience + Dimension(DimensionName.AGE_GROUP, "Viewer age group", "audience"), + Dimension(DimensionName.GENDER, "Viewer gender", "audience"), + Dimension(DimensionName.SUBSCRIBED_STATUS, "Whether viewer is subscribed", "audience"), + Dimension( + DimensionName.YOUTUBE_PRODUCT, + "YouTube product (main, shorts, music, etc)", + "audience", + ), + # Sharing + Dimension( + DimensionName.SHARING_SERVICE, + "Service used to share (whatsapp, twitter, etc)", + "sharing", + ), + # Ads + Dimension(DimensionName.AD_TYPE, "Type of ad that ran during playback", "ads"), + ] +} + +METRIC_GROUPS = sorted({m.group for m in METRICS.values()}) +DIMENSION_GROUPS = sorted({d.group for d in DIMENSIONS.values()}) + + +def find_closest_metric(name: str, max_distance: int = 3) -> str | None: + """Find the closest matching metric name for typo suggestions.""" + return _find_closest(name, list(METRICS.keys()), max_distance) + + +def find_closest_dimension(name: str, max_distance: int = 3) -> str | None: + """Find the closest matching dimension name for typo suggestions.""" + return _find_closest(name, list(DIMENSIONS.keys()), max_distance) + + +def _find_closest(name: str, candidates: list[str], max_distance: int) -> str | None: + """Simple Levenshtein-based closest match.""" + best = None + best_dist = max_distance + 1 + + for candidate in candidates: + dist = _levenshtein(name.lower(), candidate.lower()) + if dist < best_dist: + best_dist = dist + best = candidate + + return best if best_dist <= max_distance else None + + +def _levenshtein(s1: str, s2: str) -> int: + if len(s1) < len(s2): + return _levenshtein(s2, s1) + + if len(s2) == 0: + return len(s1) + + prev_row = range(len(s2) + 1) + for i, c1 in enumerate(s1): + curr_row = [i + 1] + for j, c2 in enumerate(s2): + insertions = prev_row[j + 1] + 1 + deletions = curr_row[j] + 1 + substitutions = prev_row[j] + (c1 != c2) + curr_row.append(min(insertions, deletions, substitutions)) + prev_row = curr_row + + return prev_row[-1] + + +def validate_metrics(names: list[str]) -> list[str]: + """Validate metric names, return list of errors.""" + errors = [] + for name in names: + if name not in METRICS: + suggestion = find_closest_metric(name) + msg = f"Unknown metric '{name}'." + if suggestion: + msg += f" Did you mean '{suggestion}'?" + errors.append(msg) + return errors + + +def validate_dimensions(names: list[str]) -> list[str]: + """Validate dimension names, return list of errors.""" + errors = [] + for name in names: + if name not in DIMENSIONS: + suggestion = find_closest_dimension(name) + msg = f"Unknown dimension '{name}'." + if suggestion: + msg += f" Did you mean '{suggestion}'?" + errors.append(msg) + return errors diff --git a/src/ytstudio/services.py b/src/ytstudio/services.py new file mode 100644 index 0000000..1756fb9 --- /dev/null +++ b/src/ytstudio/services.py @@ -0,0 +1,14 @@ +from ytstudio.api import get_authenticated_service +from ytstudio.demo import DemoAnalyticsService, DemoDataService, is_demo_mode + + +def get_data_service(): + if is_demo_mode(): + return DemoDataService() + return get_authenticated_service("youtube", "v3") + + +def get_analytics_service(): + if is_demo_mode(): + return DemoAnalyticsService() + return get_authenticated_service("youtubeAnalytics", "v2") diff --git a/src/ytstudio/ui.py b/src/ytstudio/ui.py index 79f919d..75036ec 100644 --- a/src/ytstudio/ui.py +++ b/src/ytstudio/ui.py @@ -27,8 +27,16 @@ def create_kv_table() -> Table: ) +_state = {"raw": False} + + +def set_raw_output(value: bool): + _state["raw"] = value + + def format_number(n: int) -> str: - """Format large numbers (1234567 -> 1.2M)""" + if _state["raw"]: + return str(n) if n >= 1_000_000: return f"{n / 1_000_000:.1f}M" if n >= 1_000: diff --git a/tests/conftest.py b/tests/conftest.py index 3a00b13..c7e3753 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -139,8 +139,8 @@ def mock_service(): def mock_auth(mock_service): mock_creds = MagicMock() with ( - patch("ytstudio.auth.get_credentials", return_value=mock_creds), - patch("ytstudio.auth.build", return_value=mock_service), + patch("ytstudio.api.get_credentials", return_value=mock_creds), + patch("ytstudio.api.build", return_value=mock_service), ): yield mock_service diff --git a/tests/test_analytics.py b/tests/test_analytics.py index e83587b..f12e5c1 100644 --- a/tests/test_analytics.py +++ b/tests/test_analytics.py @@ -1,34 +1,267 @@ -from unittest.mock import patch +import json +from unittest.mock import MagicMock, patch +import typer from typer.testing import CliRunner from ytstudio.main import app -from ytstudio.ui import format_number +from ytstudio.ui import format_number, set_raw_output runner = CliRunner() +MOCK_QUERY_RESPONSE = { + "columnHeaders": [ + {"name": "day", "columnType": "DIMENSION", "dataType": "STRING"}, + {"name": "views", "columnType": "METRIC", "dataType": "INTEGER"}, + {"name": "likes", "columnType": "METRIC", "dataType": "INTEGER"}, + ], + "rows": [ + ["2026-01-01", 1500, 45], + ["2026-01-02", 2300, 78], + ["2026-01-03", 1800, 52], + ], +} + class TestFormatNumber: - def test_formats_correctly(self): + def test_human_by_default(self): assert format_number(999) == "999" assert format_number(1500) == "1.5K" assert format_number(2500000) == "2.5M" + def test_raw_mode(self): + set_raw_output(True) + assert format_number(999) == "999" + assert format_number(1500) == "1500" + assert format_number(2500000) == "2500000" + set_raw_output(False) + class TestAnalyticsCommands: def test_overview(self): - with patch("ytstudio.commands.analytics.is_demo_mode", return_value=True): + with patch("ytstudio.services.is_demo_mode", return_value=True): result = runner.invoke(app, ["analytics", "overview"]) assert result.exit_code == 0 def test_video_not_found(self, mock_auth): mock_auth.videos.return_value.list.return_value.execute.return_value = {"items": []} - with patch("ytstudio.commands.analytics.get_services") as mock_get: - mock_get.return_value = (mock_auth, mock_auth) + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=mock_auth), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=mock_auth), + ): result = runner.invoke(app, ["analytics", "video", "nonexistent"]) assert result.exit_code == 1 def test_not_authenticated(self): - with patch("ytstudio.commands.analytics.get_authenticated_service", return_value=None): + with patch( + "ytstudio.commands.analytics.get_data_service", + side_effect=typer.Exit(1), + ): result = runner.invoke(app, ["analytics", "overview"]) assert result.exit_code == 1 + + +class TestQueryCommand: + def _mock_services(self): + data_service = MagicMock() + analytics_service = MagicMock() + + # channel id lookup + data_service.channels.return_value.list.return_value.execute.return_value = { + "items": [{"id": "UC_test"}] + } + + # query response + analytics_service.reports.return_value.query.return_value.execute.return_value = ( + MOCK_QUERY_RESPONSE + ) + + return data_service, analytics_service + + def test_query_table_output(self): + data_svc, analytics_svc = self._mock_services() + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=data_svc), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=analytics_svc), + ): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes", "-d", "day", "--days", "3"], + ) + assert result.exit_code == 0 + assert "2026-01-01" in result.output + + def test_query_json_output(self): + data_svc, analytics_svc = self._mock_services() + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=data_svc), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=analytics_svc), + ): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes", "-d", "day", "-o", "json"], + ) + assert result.exit_code == 0 + data = json.loads(result.output) + assert len(data) == 3 + assert data[0]["day"] == "2026-01-01" + assert data[0]["views"] == 1500 + + def test_query_csv_output(self): + data_svc, analytics_svc = self._mock_services() + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=data_svc), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=analytics_svc), + ): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes", "-d", "day", "-o", "csv"], + ) + assert result.exit_code == 0 + lines = result.output.strip().split("\n") + assert lines[0] == "day,views,likes" + assert "2026-01-01" in lines[1] + + def test_query_with_filter(self): + data_svc, analytics_svc = self._mock_services() + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=data_svc), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=analytics_svc), + ): + result = runner.invoke( + app, + [ + "analytics", + "query", + "-m", + "views", + "-f", + "video==abc123", + "-f", + "country==NL", + ], + ) + assert result.exit_code == 0 + # verify filters were passed + call_kwargs = analytics_svc.reports.return_value.query.call_args + assert "video==abc123;country==NL" in str(call_kwargs) + + def test_query_invalid_metric(self): + result = runner.invoke( + app, + ["analytics", "query", "-m", "veiws"], + ) + assert result.exit_code == 1 + assert "Unknown metric" in result.output + assert "views" in result.output # suggestion + + def test_query_invalid_dimension(self): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views", "-d", "contry"], + ) + assert result.exit_code == 1 + assert "Unknown dimension" in result.output + assert "country" in result.output # suggestion + + def test_query_invalid_filter_format(self): + data_svc, analytics_svc = self._mock_services() + with ( + patch("ytstudio.commands.analytics.get_data_service", return_value=data_svc), + patch("ytstudio.commands.analytics.get_analytics_service", return_value=analytics_svc), + ): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views", "-f", "video=abc"], + ) + assert result.exit_code == 1 + assert "Invalid filter" in result.output + + +class TestQueryDemoMode: + def test_query_demo_table(self): + with patch("ytstudio.services.is_demo_mode", return_value=True): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes", "-d", "day", "--days", "7"], + ) + assert result.exit_code == 0 + assert "views" in result.output + assert "likes" in result.output + + def test_query_demo_json(self): + with patch("ytstudio.services.is_demo_mode", return_value=True): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes", "-d", "country", "-o", "json"], + ) + assert result.exit_code == 0 + data = json.loads(result.output) + assert len(data) > 0 + assert "views" in data[0] + assert "country" in data[0] + + def test_query_demo_no_dimensions(self): + with patch("ytstudio.services.is_demo_mode", return_value=True): + result = runner.invoke( + app, + ["analytics", "query", "-m", "views,likes"], + ) + assert result.exit_code == 0 + assert "views" in result.output + + +class TestMetricsCommand: + def test_list_all(self): + result = runner.invoke(app, ["analytics", "metrics"]) + assert result.exit_code == 0 + assert "views" in result.output + assert "likes" in result.output + + def test_list_by_group(self): + result = runner.invoke(app, ["analytics", "metrics", "--group", "engagement"]) + assert result.exit_code == 0 + assert "likes" in result.output + assert "shares" in result.output + + def test_list_invalid_group(self): + result = runner.invoke(app, ["analytics", "metrics", "--group", "nonexistent"]) + assert result.exit_code == 1 + assert "Unknown group" in result.output + + def test_json_output(self): + result = runner.invoke(app, ["analytics", "metrics", "-o", "json"]) + assert result.exit_code == 0 + data = json.loads(result.output) + assert isinstance(data, list) + assert any(m["name"] == "views" for m in data) + + +class TestDimensionsCommand: + def test_list_all(self): + result = runner.invoke(app, ["analytics", "dimensions"]) + assert result.exit_code == 0 + assert "country" in result.output + assert "day" in result.output + + def test_list_by_group(self): + result = runner.invoke(app, ["analytics", "dimensions", "--group", "geographic"]) + assert result.exit_code == 0 + assert "country" in result.output + + def test_detail_view(self): + result = runner.invoke(app, ["analytics", "dimensions", "country"]) + assert result.exit_code == 0 + assert "ISO 3166-1" in result.output + + def test_filter_only_shown(self): + result = runner.invoke(app, ["analytics", "dimensions", "continent"]) + assert result.exit_code == 0 + assert "filter only" in result.output + + def test_json_output(self): + result = runner.invoke(app, ["analytics", "dimensions", "-o", "json"]) + assert result.exit_code == 0 + data = json.loads(result.output) + assert isinstance(data, list) + assert any(d["name"] == "country" for d in data) diff --git a/tests/test_auth.py b/tests/test_api.py similarity index 74% rename from tests/test_auth.py rename to tests/test_api.py index 2f10a96..0c59984 100644 --- a/tests/test_auth.py +++ b/tests/test_api.py @@ -5,7 +5,7 @@ from googleapiclient.errors import HttpError from typer.testing import CliRunner -from ytstudio.auth import api, get_authenticated_service, handle_api_error +from ytstudio.api import api, get_authenticated_service, handle_api_error from ytstudio.main import app runner = CliRunner() @@ -43,23 +43,18 @@ def test_returns_result(self): class TestGetAuthenticatedService: def test_exits_when_no_credentials(self): - with patch("ytstudio.auth.get_credentials", return_value=None), pytest.raises(Exit): + with patch("ytstudio.api.get_credentials", return_value=None), pytest.raises(Exit): get_authenticated_service() class TestCommands: def test_login_requires_client_secrets(self): - with patch("ytstudio.auth.CLIENT_SECRETS_FILE") as mock_file: + with patch("ytstudio.api.CLIENT_SECRETS_FILE") as mock_file: mock_file.exists.return_value = False result = runner.invoke(app, ["login"]) assert result.exit_code == 1 def test_status_not_authenticated(self): - with patch("ytstudio.auth.load_credentials", return_value=None): + with patch("ytstudio.api.load_credentials", return_value=None): result = runner.invoke(app, ["status"]) assert "Not authenticated" in result.stdout - - def test_logout(self): - with patch("ytstudio.auth.clear_credentials"): - result = runner.invoke(app, ["auth", "logout"]) - assert "Logged out" in result.stdout diff --git a/tests/test_registry.py b/tests/test_registry.py new file mode 100644 index 0000000..a375703 --- /dev/null +++ b/tests/test_registry.py @@ -0,0 +1,110 @@ +from ytstudio.registry import ( + DIMENSION_GROUPS, + DIMENSIONS, + METRIC_GROUPS, + METRICS, + DimensionName, + MetricName, + find_closest_dimension, + find_closest_metric, + validate_dimensions, + validate_metrics, +) + + +class TestMetricsRegistry: + def test_has_core_metrics(self): + core = [m for m in METRICS.values() if m.core] + assert len(core) >= 10 + + def test_views_is_core(self): + assert "views" in METRICS + assert METRICS["views"].core is True + + def test_revenue_metrics_are_monetary(self): + revenue = [m for m in METRICS.values() if m.group == "revenue"] + assert all(m.monetary for m in revenue) + + def test_groups_are_consistent(self): + for m in METRICS.values(): + assert m.group in METRIC_GROUPS + + def test_no_duplicate_names(self): + names = [m.name for m in METRICS.values()] + assert len(names) == len(set(names)) + + +class TestDimensionsRegistry: + def test_has_common_dimensions(self): + for name in ["day", "month", "country", "video", "deviceType"]: + assert name in DIMENSIONS + + def test_filter_only_dimensions(self): + assert DIMENSIONS["continent"].filter_only is True + assert DIMENSIONS["day"].filter_only is False + + def test_groups_are_consistent(self): + for d in DIMENSIONS.values(): + assert d.group in DIMENSION_GROUPS + + def test_no_duplicate_names(self): + names = [d.name for d in DIMENSIONS.values()] + assert len(names) == len(set(names)) + + +class TestValidation: + def test_valid_metrics(self): + errors = validate_metrics(["views", "likes", "comments"]) + assert errors == [] + + def test_invalid_metric(self): + errors = validate_metrics(["views", "veiws"]) + assert len(errors) == 1 + assert "veiws" in errors[0] + + def test_valid_dimensions(self): + errors = validate_dimensions(["day", "country"]) + assert errors == [] + + def test_invalid_dimension(self): + errors = validate_dimensions(["cuntry"]) + assert len(errors) == 1 + assert "cuntry" in errors[0] + + +class TestEnums: + def test_metric_string_equality(self): + assert MetricName.VIEWS == "views" + assert MetricName.ESTIMATED_MINUTES_WATCHED == "estimatedMinutesWatched" + + def test_dimension_string_equality(self): + assert DimensionName.DAY == "day" + assert DimensionName.INSIGHT_TRAFFIC_SOURCE_TYPE == "insightTrafficSourceType" + + def test_isinstance_str(self): + assert isinstance(MetricName.VIEWS, str) + assert isinstance(DimensionName.VIDEO, str) + + def test_string_lookup_in_dicts(self): + assert "views" in METRICS + assert MetricName.VIEWS in METRICS + assert "day" in DIMENSIONS + assert DimensionName.DAY in DIMENSIONS + + +class TestFuzzyMatching: + def test_close_metric(self): + assert find_closest_metric("veiws") == "views" + assert find_closest_metric("liks") == "likes" + assert find_closest_metric("commets") == "comments" + + def test_no_match(self): + assert find_closest_metric("zzzzzzzzz") is None + + def test_close_dimension(self): + assert find_closest_dimension("contry") == "country" + assert find_closest_dimension("vidoe") == "video" + + def test_case_insensitive(self): + assert find_closest_metric("Views") == "views" + assert find_closest_metric("LIKES") == "likes" diff --git a/tests/test_videos.py b/tests/test_videos.py index 8734735..e848b24 100644 --- a/tests/test_videos.py +++ b/tests/test_videos.py @@ -1,5 +1,6 @@ from unittest.mock import patch +import typer from typer.testing import CliRunner from ytstudio.main import app @@ -34,9 +35,9 @@ def test_update_no_changes(self, mock_auth): assert result.exit_code == 1 def test_not_authenticated(self): - with ( - patch("ytstudio.auth.get_authenticated_service", return_value=None), - patch("ytstudio.commands.videos.is_demo_mode", return_value=False), + with patch( + "ytstudio.commands.videos.get_data_service", + side_effect=typer.Exit(1), ): result = runner.invoke(app, ["videos", "list"]) assert result.exit_code == 1