Skip to content

Commit ecc8504

Browse files
authored
Merge pull request #156 from SentienceAPI/langchain
Langchain / Langgraph supported
2 parents 54c4324 + 52c7232 commit ecc8504

File tree

13 files changed

+1002
-43
lines changed

13 files changed

+1002
-43
lines changed

examples/lang-chain/README.md

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
### LangChain / LangGraph examples (Python)
2+
3+
These examples show how to use Sentience as a **tool layer** inside LangChain and LangGraph.
4+
5+
Install:
6+
7+
```bash
8+
pip install sentienceapi[langchain]
9+
```
10+
11+
Examples:
12+
- `langchain_tools_demo.py`: build a Sentience tool pack for LangChain
13+
- `langgraph_self_correcting_graph.py`: observe → act → verify → branch (retry) template
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
"""
2+
Example: Build Sentience LangChain tools (async-only).
3+
4+
Install:
5+
pip install sentienceapi[langchain]
6+
7+
Run:
8+
python examples/lang-chain/langchain_tools_demo.py
9+
10+
Notes:
11+
- This example focuses on creating the tools. Hook them into your agent of choice.
12+
"""
13+
14+
from __future__ import annotations
15+
16+
import asyncio
17+
18+
from sentience import AsyncSentienceBrowser
19+
from sentience.integrations.langchain import (
20+
SentienceLangChainContext,
21+
build_sentience_langchain_tools,
22+
)
23+
24+
25+
async def main() -> None:
26+
browser = AsyncSentienceBrowser(headless=False)
27+
await browser.start()
28+
await browser.goto("https://example.com")
29+
30+
ctx = SentienceLangChainContext(browser=browser)
31+
tools = build_sentience_langchain_tools(ctx)
32+
33+
print("Registered tools:")
34+
for t in tools:
35+
print(f"- {t.name}")
36+
37+
await browser.close()
38+
39+
40+
if __name__ == "__main__":
41+
asyncio.run(main())
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
"""
2+
LangGraph reference example: Sentience observe → act → verify → branch (self-correcting).
3+
4+
Install:
5+
pip install sentienceapi[langchain]
6+
7+
Run:
8+
python examples/lang-chain/langgraph_self_correcting_graph.py
9+
"""
10+
11+
from __future__ import annotations
12+
13+
import asyncio
14+
from dataclasses import dataclass
15+
16+
from sentience import AsyncSentienceBrowser
17+
from sentience.integrations.langchain import SentienceLangChainContext, SentienceLangChainCore
18+
19+
20+
@dataclass
21+
class State:
22+
url: str | None = None
23+
last_action: str | None = None
24+
attempts: int = 0
25+
done: bool = False
26+
27+
28+
async def main() -> None:
29+
from langgraph.graph import END, StateGraph
30+
31+
browser = AsyncSentienceBrowser(headless=False)
32+
await browser.start()
33+
34+
core = SentienceLangChainCore(SentienceLangChainContext(browser=browser))
35+
36+
async def observe(state: State) -> State:
37+
s = await core.snapshot_state()
38+
state.url = s.url
39+
return state
40+
41+
async def act(state: State) -> State:
42+
# Replace with an LLM decision node. For demo we just navigate once.
43+
if state.attempts == 0:
44+
await core.navigate("https://example.com")
45+
state.last_action = "navigate"
46+
else:
47+
state.last_action = "noop"
48+
state.attempts += 1
49+
return state
50+
51+
async def verify(state: State) -> State:
52+
out = await core.verify_url_matches(r"example\.com")
53+
state.done = bool(out.passed)
54+
return state
55+
56+
def branch(state: State) -> str:
57+
if state.done:
58+
return "done"
59+
if state.attempts >= 3:
60+
return "done"
61+
return "retry"
62+
63+
g = StateGraph(State)
64+
g.add_node("observe", observe)
65+
g.add_node("act", act)
66+
g.add_node("verify", verify)
67+
g.set_entry_point("observe")
68+
g.add_edge("observe", "act")
69+
g.add_edge("act", "verify")
70+
g.add_conditional_edges("verify", branch, {"retry": "observe", "done": END})
71+
app = g.compile()
72+
73+
final = await app.ainvoke(State())
74+
print(final)
75+
76+
await browser.close()
77+
78+
79+
if __name__ == "__main__":
80+
asyncio.run(main())
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
"""
2+
LangGraph reference example: Sentience observe → act → verify → branch (self-correcting).
3+
4+
Install:
5+
pip install sentienceapi[langchain]
6+
7+
Run:
8+
python examples/langgraph/sentience_self_correcting_graph.py
9+
10+
Notes:
11+
- This is a template demonstrating control flow; you can replace the "decide" node
12+
with an LLM step (LangChain) that chooses actions based on snapshot_state/read_page.
13+
"""
14+
15+
from __future__ import annotations
16+
17+
import asyncio
18+
from dataclasses import dataclass
19+
from typing import Optional
20+
21+
from sentience import AsyncSentienceBrowser
22+
from sentience.integrations.langchain import SentienceLangChainContext, SentienceLangChainCore
23+
24+
25+
@dataclass
26+
class State:
27+
url: str | None = None
28+
last_action: str | None = None
29+
attempts: int = 0
30+
done: bool = False
31+
32+
33+
async def main() -> None:
34+
# Lazy import so the file can exist without langgraph installed
35+
from langgraph.graph import END, StateGraph
36+
37+
browser = AsyncSentienceBrowser(headless=False)
38+
await browser.start()
39+
40+
core = SentienceLangChainCore(SentienceLangChainContext(browser=browser))
41+
42+
async def observe(state: State) -> State:
43+
s = await core.snapshot_state()
44+
state.url = s.url
45+
return state
46+
47+
async def act(state: State) -> State:
48+
# Replace this with an LLM-driven decision. For demo purposes, we just navigate once.
49+
if state.attempts == 0:
50+
await core.navigate("https://example.com")
51+
state.last_action = "navigate"
52+
else:
53+
state.last_action = "noop"
54+
state.attempts += 1
55+
return state
56+
57+
async def verify(state: State) -> State:
58+
# Guard condition: URL should contain example.com
59+
out = await core.verify_url_matches(r"example\.com")
60+
state.done = bool(out.passed)
61+
return state
62+
63+
def should_continue(state: State) -> str:
64+
# Self-correcting loop: retry observe→act→verify up to 3 attempts
65+
if state.done:
66+
return "done"
67+
if state.attempts >= 3:
68+
return "done"
69+
return "retry"
70+
71+
g = StateGraph(State)
72+
g.add_node("observe", observe)
73+
g.add_node("act", act)
74+
g.add_node("verify", verify)
75+
g.set_entry_point("observe")
76+
g.add_edge("observe", "act")
77+
g.add_edge("act", "verify")
78+
g.add_conditional_edges("verify", should_continue, {"retry": "observe", "done": END})
79+
app = g.compile()
80+
81+
final = await app.ainvoke(State())
82+
print(final)
83+
84+
await browser.close()
85+
86+
87+
if __name__ == "__main__":
88+
asyncio.run(main())

sentience/cloud_tracing.py

Lines changed: 63 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -148,40 +148,80 @@ def close(
148148

149149
self._closed = True
150150

151-
# Flush and sync file to disk before closing to ensure all data is written
152-
# This is critical on CI systems where file system operations may be slower
153-
self._trace_file.flush()
151+
if not blocking:
152+
# Fire-and-forget background finalize+upload.
153+
#
154+
# IMPORTANT: for truly non-blocking close, we avoid synchronous work here
155+
# (flush/fsync/index generation). That work happens in the background thread.
156+
thread = threading.Thread(
157+
target=self._close_and_upload_background,
158+
args=(on_progress,),
159+
daemon=True,
160+
)
161+
thread.start()
162+
return # Return immediately
163+
164+
# Blocking mode: finalize trace file and upload now.
165+
if not self._finalize_trace_file_for_upload():
166+
return
167+
self._do_upload(on_progress)
168+
169+
def _finalize_trace_file_for_upload(self) -> bool:
170+
"""
171+
Finalize the local trace file so it is ready for upload.
172+
173+
Returns:
174+
True if there is data to upload, False if the trace is empty/missing.
175+
"""
176+
# Flush and sync file to disk before closing to ensure all data is written.
177+
# This can be slow on CI file systems; in non-blocking close we do this in background.
178+
try:
179+
self._trace_file.flush()
180+
except Exception:
181+
pass
154182
try:
155-
# Force OS to write buffered data to disk
156183
os.fsync(self._trace_file.fileno())
157184
except (OSError, AttributeError):
158-
# Some file handles don't support fsync (e.g., StringIO in tests)
159-
# This is fine - flush() is usually sufficient
185+
# Some file handles don't support fsync; flush is usually sufficient.
186+
pass
187+
try:
188+
self._trace_file.close()
189+
except Exception:
160190
pass
161-
self._trace_file.close()
162191

163192
# Ensure file exists and has content before proceeding
164-
if not self._path.exists() or self._path.stat().st_size == 0:
165-
# No events were emitted, nothing to upload
166-
if self.logger:
167-
self.logger.warning("No trace events to upload (file is empty or missing)")
168-
return
193+
try:
194+
if not self._path.exists() or self._path.stat().st_size == 0:
195+
if self.logger:
196+
self.logger.warning("No trace events to upload (file is empty or missing)")
197+
return False
198+
except Exception:
199+
# If we can't stat, don't attempt upload
200+
return False
169201

170202
# Generate index after closing file
171203
self._generate_index()
204+
return True
172205

173-
if not blocking:
174-
# Fire-and-forget background upload
175-
thread = threading.Thread(
176-
target=self._do_upload,
177-
args=(on_progress,),
178-
daemon=True,
179-
)
180-
thread.start()
181-
return # Return immediately
206+
def _close_and_upload_background(
207+
self, on_progress: Callable[[int, int], None] | None = None
208+
) -> None:
209+
"""
210+
Background worker for non-blocking close.
182211
183-
# Blocking mode
184-
self._do_upload(on_progress)
212+
Performs file finalization + index generation + upload.
213+
"""
214+
try:
215+
if not self._finalize_trace_file_for_upload():
216+
return
217+
self._do_upload(on_progress)
218+
except Exception as e:
219+
# Non-fatal: preserve trace locally
220+
self._upload_successful = False
221+
print(f"❌ [Sentience] Error uploading trace (background): {e}")
222+
print(f" Local trace preserved at: {self._path}")
223+
if self.logger:
224+
self.logger.error(f"Error uploading trace (background): {e}")
185225

186226
def _do_upload(self, on_progress: Callable[[int, int], None] | None = None) -> None:
187227
"""
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
"""
2+
LangChain / LangGraph integration helpers (optional).
3+
4+
This package is designed so the base SDK can be imported without LangChain installed.
5+
All LangChain imports are done lazily inside tool-builder functions.
6+
"""
7+
8+
from .context import SentienceLangChainContext
9+
from .core import SentienceLangChainCore
10+
from .tools import build_sentience_langchain_tools
11+
12+
__all__ = ["SentienceLangChainContext", "SentienceLangChainCore", "build_sentience_langchain_tools"]
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
from __future__ import annotations
2+
3+
from dataclasses import dataclass
4+
5+
from sentience.browser import AsyncSentienceBrowser
6+
from sentience.tracing import Tracer
7+
8+
9+
@dataclass
10+
class SentienceLangChainContext:
11+
"""
12+
Context for LangChain/LangGraph integrations.
13+
14+
We keep this small and explicit; it mirrors the PydanticAI deps object.
15+
"""
16+
17+
browser: AsyncSentienceBrowser
18+
tracer: Tracer | None = None

0 commit comments

Comments
 (0)