-
Notifications
You must be signed in to change notification settings - Fork 3.2k
fix(tracing): avoid import-time tracing exporter/provider initialization #2490
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -354,16 +354,22 @@ def _export_batches(self, force: bool = False): | |
| self._exporter.export(items_to_export) | ||
|
|
||
|
|
||
| # Create a shared global instance: | ||
| _global_exporter = BackendSpanExporter() | ||
| _global_processor = BatchTraceProcessor(_global_exporter) | ||
| # Lazily initialized defaults. Creating these at import time is unsafe in fork-based runtimes. | ||
| _global_exporter: BackendSpanExporter | None = None | ||
| _global_processor: BatchTraceProcessor | None = None | ||
|
|
||
|
|
||
| def default_exporter() -> BackendSpanExporter: | ||
| """The default exporter, which exports traces and spans to the backend in batches.""" | ||
| global _global_exporter | ||
| if _global_exporter is None: | ||
| _global_exporter = BackendSpanExporter() | ||
| return _global_exporter | ||
|
|
||
|
|
||
| def default_processor() -> BatchTraceProcessor: | ||
| """The default processor, which exports traces and spans to the backend in batches.""" | ||
| global _global_processor | ||
| if _global_processor is None: | ||
| _global_processor = BatchTraceProcessor(default_exporter()) | ||
|
Comment on lines
+365
to
+374
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Useful? React with 👍 / 👎. |
||
| return _global_processor | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,9 +1,10 @@ | ||
| from __future__ import annotations | ||
|
|
||
| import atexit | ||
| from typing import TYPE_CHECKING | ||
|
|
||
| if TYPE_CHECKING: | ||
| from .provider import TraceProvider | ||
| if TYPE_CHECKING: # pragma: no cover | ||
| from .provider import TraceProvider # pragma: no cover | ||
|
|
||
| GLOBAL_TRACE_PROVIDER: TraceProvider | None = None | ||
|
|
||
|
|
@@ -16,6 +17,15 @@ def set_trace_provider(provider: TraceProvider) -> None: | |
|
|
||
| def get_trace_provider() -> TraceProvider: | ||
| """Get the global trace provider used by tracing utilities.""" | ||
| global GLOBAL_TRACE_PROVIDER | ||
| if GLOBAL_TRACE_PROVIDER is None: | ||
| raise RuntimeError("Trace provider not set") | ||
| # Lazily initialize defaults on first tracing API usage to avoid | ||
| # import-time side effects while keeping historical call behavior. | ||
| from .processors import default_processor | ||
| from .provider import DefaultTraceProvider | ||
|
|
||
| provider = DefaultTraceProvider() | ||
| GLOBAL_TRACE_PROVIDER = provider | ||
| provider.register_processor(default_processor()) | ||
| atexit.register(provider.shutdown) | ||
|
Comment on lines
21
to
+30
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Useful? React with 👍 / 👎. |
||
| return GLOBAL_TRACE_PROVIDER | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,131 @@ | ||
| import json | ||
| import subprocess | ||
| import sys | ||
| from typing import cast | ||
|
|
||
|
|
||
| def _run_python_snippet(snippet: str) -> dict[str, bool]: | ||
| completed = subprocess.run( | ||
| [sys.executable, "-c", snippet], | ||
| check=True, | ||
| capture_output=True, | ||
| text=True, | ||
| ) | ||
| return cast(dict[str, bool], json.loads(completed.stdout)) | ||
|
|
||
|
|
||
| def test_import_does_not_initialize_default_tracing_objects(): | ||
| result = _run_python_snippet( | ||
| """ | ||
| import json | ||
| import agents.tracing as tracing | ||
| import agents.tracing.processors as processors | ||
| from agents.tracing import setup | ||
|
|
||
| print(json.dumps({ | ||
| "provider_is_none": setup.GLOBAL_TRACE_PROVIDER is None, | ||
| "exporter_is_none": processors._global_exporter is None, | ||
| "processor_is_none": processors._global_processor is None, | ||
| })) | ||
| """ | ||
| ) | ||
|
|
||
| assert result["provider_is_none"] is True | ||
| assert result["exporter_is_none"] is True | ||
| assert result["processor_is_none"] is True | ||
|
|
||
|
|
||
| def test_get_trace_provider_initializes_defaults_lazily(): | ||
| result = _run_python_snippet( | ||
| """ | ||
| import json | ||
| import agents.tracing as tracing | ||
| import agents.tracing.processors as processors | ||
| from agents.tracing import setup | ||
|
|
||
| provider_1 = tracing.get_trace_provider() | ||
| provider_2 = tracing.get_trace_provider() | ||
|
|
||
| print(json.dumps({ | ||
| "same_provider": provider_1 is provider_2, | ||
| "provider_is_set": setup.GLOBAL_TRACE_PROVIDER is not None, | ||
| "exporter_created": processors._global_exporter is not None, | ||
| "processor_created": processors._global_processor is not None, | ||
| "processor_uses_exporter": ( | ||
| processors._global_processor is not None | ||
| and processors._global_exporter is not None | ||
| and processors._global_processor._exporter is processors._global_exporter | ||
| ), | ||
| })) | ||
| """ | ||
| ) | ||
|
|
||
| assert result["same_provider"] is True | ||
| assert result["provider_is_set"] is True | ||
| assert result["exporter_created"] is True | ||
| assert result["processor_created"] is True | ||
| assert result["processor_uses_exporter"] is True | ||
|
|
||
|
|
||
| def test_default_tracing_init_is_idempotent(): | ||
| result = _run_python_snippet( | ||
| """ | ||
| import json | ||
| import agents.tracing as tracing | ||
| import agents.tracing.processors as processors | ||
| from agents.tracing import setup | ||
|
|
||
| tracing._ensure_default_tracing_initialized() | ||
| provider_1 = setup.GLOBAL_TRACE_PROVIDER | ||
| tracing._ensure_default_tracing_initialized() | ||
| provider_2 = setup.GLOBAL_TRACE_PROVIDER | ||
|
|
||
| print(json.dumps({ | ||
| "same_provider": provider_1 is provider_2, | ||
| "exporter_created": processors._global_exporter is not None, | ||
| "processor_created": processors._global_processor is not None, | ||
| })) | ||
| """ | ||
| ) | ||
|
|
||
| assert result["same_provider"] is True | ||
| assert result["exporter_created"] is True | ||
| assert result["processor_created"] is True | ||
|
|
||
|
|
||
| def test_set_trace_provider_sets_global_provider() -> None: | ||
| from agents.tracing import setup | ||
| from agents.tracing.provider import DefaultTraceProvider | ||
|
|
||
| previous_provider = setup.GLOBAL_TRACE_PROVIDER | ||
| provider = DefaultTraceProvider() | ||
| try: | ||
| setup.set_trace_provider(provider) | ||
| assert setup.GLOBAL_TRACE_PROVIDER is provider | ||
| finally: | ||
| setup.GLOBAL_TRACE_PROVIDER = previous_provider | ||
|
|
||
|
|
||
| def test_trace_helper_initializes_defaults_lazily() -> None: | ||
| result = _run_python_snippet( | ||
| """ | ||
| import json | ||
| import agents.tracing as tracing | ||
| import agents.tracing.processors as processors | ||
| from agents.tracing import setup | ||
|
|
||
| trace_obj = tracing.trace("lazy-init-from-trace") | ||
|
|
||
| print(json.dumps({ | ||
| "trace_created": trace_obj is not None, | ||
| "provider_is_set": setup.GLOBAL_TRACE_PROVIDER is not None, | ||
| "exporter_created": processors._global_exporter is not None, | ||
| "processor_created": processors._global_processor is not None, | ||
| })) | ||
| """ | ||
| ) | ||
|
|
||
| assert result["trace_created"] is True | ||
| assert result["provider_is_set"] is True | ||
| assert result["exporter_created"] is True | ||
| assert result["processor_created"] is True |
Uh oh!
There was an error while loading. Please reload this page.