Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "sentienceapi"
version = "0.90.3"
version = "0.90.5"
description = "Python SDK for Sentience AI Agent Browser Automation"
readme = "README.md"
requires-python = ">=3.11"
Expand Down
5 changes: 3 additions & 2 deletions sentience/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .browser import SentienceBrowser

# Tracing (v0.12.0+)
from .cloud_tracing import CloudTraceSink
from .cloud_tracing import CloudTraceSink, SentienceLogger
from .conversational_agent import ConversationalAgent
from .expect import expect

Expand Down Expand Up @@ -64,7 +64,7 @@
)
from .wait import wait_for

__version__ = "0.90.3"
__version__ = "0.90.5"

__all__ = [
# Core SDK
Expand Down Expand Up @@ -123,6 +123,7 @@
"TraceSink",
"JsonlTraceSink",
"CloudTraceSink",
"SentienceLogger",
"TraceEvent",
"create_tracer",
"SENTIENCE_API_URL",
Expand Down
90 changes: 88 additions & 2 deletions sentience/cloud_tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,29 @@
import threading
from collections.abc import Callable
from pathlib import Path
from typing import Any
from typing import Any, Protocol

import requests

from sentience.tracing import TraceSink


class SentienceLogger(Protocol):
"""Protocol for optional logger interface."""

def info(self, message: str) -> None:
"""Log info message."""
...

def warning(self, message: str) -> None:
"""Log warning message."""
...

def error(self, message: str) -> None:
"""Log error message."""
...


class CloudTraceSink(TraceSink):
"""
Enterprise Cloud Sink: "Local Write, Batch Upload" pattern.
Expand Down Expand Up @@ -51,17 +67,30 @@ class CloudTraceSink(TraceSink):
>>> tracer.close(blocking=False) # Returns immediately
"""

def __init__(self, upload_url: str, run_id: str):
def __init__(
self,
upload_url: str,
run_id: str,
api_key: str | None = None,
api_url: str | None = None,
logger: SentienceLogger | None = None,
):
"""
Initialize cloud trace sink.

Args:
upload_url: Pre-signed PUT URL from Sentience API
(e.g., "https://sentience.nyc3.digitaloceanspaces.com/...")
run_id: Unique identifier for this agent run (used for persistent cache)
api_key: Sentience API key for calling /v1/traces/complete
api_url: Sentience API base URL (default: https://api.sentienceapi.com)
logger: Optional logger instance for logging file sizes and errors
"""
self.upload_url = upload_url
self.run_id = run_id
self.api_key = api_key
self.api_url = api_url or "https://api.sentienceapi.com"
self.logger = logger

# Use persistent cache directory instead of temp file
# This ensures traces survive process crashes
Expand All @@ -74,6 +103,10 @@ def __init__(self, upload_url: str, run_id: str):
self._closed = False
self._upload_successful = False

# File size tracking (NEW)
self.trace_file_size_bytes = 0
self.screenshot_total_size_bytes = 0

def emit(self, event: dict[str, Any]) -> None:
"""
Write event to local persistent file (Fast, non-blocking).
Expand Down Expand Up @@ -140,6 +173,18 @@ def _do_upload(self, on_progress: Callable[[int, int], None] | None = None) -> N
compressed_data = gzip.compress(trace_data)
compressed_size = len(compressed_data)

# Measure trace file size (NEW)
self.trace_file_size_bytes = compressed_size

# Log file sizes if logger is provided (NEW)
if self.logger:
self.logger.info(
f"Trace file size: {self.trace_file_size_bytes / 1024 / 1024:.2f} MB"
)
self.logger.info(
f"Screenshot total: {self.screenshot_total_size_bytes / 1024 / 1024:.2f} MB"
)

# Report progress: start
if on_progress:
on_progress(0, compressed_size)
Expand All @@ -165,6 +210,9 @@ def _do_upload(self, on_progress: Callable[[int, int], None] | None = None) -> N
if on_progress:
on_progress(compressed_size, compressed_size)

# Call /v1/traces/complete to report file sizes (NEW)
self._complete_trace()

# Delete file only on successful upload
if os.path.exists(self._path):
try:
Expand All @@ -183,6 +231,44 @@ def _do_upload(self, on_progress: Callable[[int, int], None] | None = None) -> N
print(f" Local trace preserved at: {self._path}")
# Don't raise - preserve trace locally even if upload fails

def _complete_trace(self) -> None:
"""
Call /v1/traces/complete to report file sizes to gateway.

This is a best-effort call - failures are logged but don't affect upload success.
"""
if not self.api_key:
# No API key - skip complete call
return

try:
response = requests.post(
f"{self.api_url}/v1/traces/complete",
headers={"Authorization": f"Bearer {self.api_key}"},
json={
"run_id": self.run_id,
"stats": {
"trace_file_size_bytes": self.trace_file_size_bytes,
"screenshot_total_size_bytes": self.screenshot_total_size_bytes,
},
},
timeout=10,
)

if response.status_code == 200:
if self.logger:
self.logger.info("Trace completion reported to gateway")
else:
if self.logger:
self.logger.warning(
f"Failed to report trace completion: HTTP {response.status_code}"
)

except Exception as e:
# Best-effort - log but don't fail
if self.logger:
self.logger.warning(f"Error reporting trace completion: {e}")

def __enter__(self):
"""Context manager support."""
return self
Expand Down
6 changes: 3 additions & 3 deletions sentience/extension/background.js
Original file line number Diff line number Diff line change
Expand Up @@ -144,13 +144,13 @@ async function handleScreenshotCapture(_tabId, options = {}) {
async function handleSnapshotProcessing(rawData, options = {}) {
const MAX_ELEMENTS = 10000; // Safety limit to prevent hangs
const startTime = performance.now();

try {
// Safety check: limit element count to prevent hangs
if (!Array.isArray(rawData)) {
throw new Error('rawData must be an array');
}

if (rawData.length > MAX_ELEMENTS) {
console.warn(`[Sentience Background] ⚠️ Large dataset: ${rawData.length} elements. Limiting to ${MAX_ELEMENTS} to prevent hangs.`);
rawData = rawData.slice(0, MAX_ELEMENTS);
Expand Down Expand Up @@ -186,7 +186,7 @@ async function handleSnapshotProcessing(rawData, options = {}) {
// Add timeout protection (18 seconds - less than content.js timeout)
analyzedElements = await Promise.race([
wasmPromise,
new Promise((_, reject) =>
new Promise((_, reject) =>
setTimeout(() => reject(new Error('WASM processing timeout (>18s)')), 18000)
)
]);
Expand Down
2 changes: 1 addition & 1 deletion sentience/extension/content.js
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ function handleSnapshotRequest(data) {
if (responded) return; // Already responded via timeout
responded = true;
clearTimeout(timeoutId);

const duration = performance.now() - startTime;

// Handle Chrome extension errors (e.g., background script crashed)
Expand Down
Loading