Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 133 additions & 0 deletions src/providers/agent-memory/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
import type {
Provider,
ProviderConfig,
IngestOptions,
IngestResult,
SearchOptions,
IndexingProgressCallback,
} from "../../types/provider"
import type { UnifiedSession } from "../../types/unified"
import { logger } from "../../utils/logger"
import { AGENT_MEMORY_PROMPTS } from "./prompts"

/**
* agent-memory provider for MemoryBench.
*
* Connects to a local agent-memory bench server (Python HTTP wrapper).
* The server manages per-container SQLite databases with semantic embeddings
* and graph-based memory relationships.
*
* Start the server: python -m agent_memory.bench_server --port 9876
*/
export class AgentMemoryProvider implements Provider {
name = "agent-memory"
prompts = AGENT_MEMORY_PROMPTS
concurrency = {
default: 10, // Local, so moderate concurrency
}
private baseUrl: string = "http://127.0.0.1:9876"

async initialize(config: ProviderConfig): Promise<void> {
if (config.baseUrl) {
this.baseUrl = config.baseUrl
}

// Health check
try {
const res = await fetch(`${this.baseUrl}/health`)
if (!res.ok) throw new Error(`HTTP ${res.status}`)
const data = await res.json() as { status: string }
logger.info(`Connected to agent-memory bench server: ${data.status}`)
} catch (e) {
throw new Error(
`Cannot connect to agent-memory bench server at ${this.baseUrl}. ` +
`Start it with: python -m agent_memory.bench_server --port 9876\n` +
`Error: ${e}`
)
}
}

async ingest(sessions: UnifiedSession[], options: IngestOptions): Promise<IngestResult> {
// Send sessions in batches to avoid overwhelming the server
const batchSize = 5
const allDocIds: string[] = []

for (let i = 0; i < sessions.length; i += batchSize) {
const batch = sessions.slice(i, i + batchSize)

const res = await fetch(`${this.baseUrl}/ingest`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
containerTag: options.containerTag,
sessions: batch,
}),
})

if (!res.ok) {
const text = await res.text()
throw new Error(`Ingest failed: ${text}`)
}

const data = await res.json() as { documentIds: string[], count: number }
allDocIds.push(...data.documentIds)

if (i % 20 === 0 && i > 0) {
logger.info(`Ingested ${i}/${sessions.length} sessions (${allDocIds.length} memories)`)
}
}

logger.info(`Ingested ${sessions.length} sessions → ${allDocIds.length} memories`)
return { documentIds: allDocIds }
}

async awaitIndexing(
result: IngestResult,
_containerTag: string,
onProgress?: IndexingProgressCallback
): Promise<void> {
// agent-memory indexes synchronously on ingest, no waiting needed
const total = result.documentIds.length
onProgress?.({
completedIds: result.documentIds,
failedIds: [],
total,
})
}

async search(query: string, options: SearchOptions): Promise<unknown[]> {
const res = await fetch(`${this.baseUrl}/search`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
containerTag: options.containerTag,
query,
limit: options.limit || 30,
}),
})

if (!res.ok) {
const text = await res.text()
throw new Error(`Search failed: ${text}`)
}

const data = await res.json() as { results: unknown[] }
return data.results ?? []
}

async clear(containerTag: string): Promise<void> {
const res = await fetch(`${this.baseUrl}/clear`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ containerTag }),
})

if (!res.ok) {
logger.warn(`Clear failed for ${containerTag}`)
} else {
logger.info(`Cleared memories for: ${containerTag}`)
}
}
}

export default AgentMemoryProvider
30 changes: 30 additions & 0 deletions src/providers/agent-memory/prompts.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import type { ProviderPrompts } from "../../types/prompts"

export const AGENT_MEMORY_PROMPTS: ProviderPrompts = {
answerPrompt: (question: string, context: unknown[], questionDate?: string) => {
const memories = (context as Array<{ memory?: string; score?: number }>)
.map((r, i) => {
const memory = r.memory || JSON.stringify(r)
const score = r.score ? ` (relevance: ${r.score.toFixed(2)})` : ""
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: The truthiness check on r.score is unsafe. It will cause a crash if the score is non-numeric and will incorrectly hide a valid score of 0.
Severity: MEDIUM

Suggested Fix

Replace the truthiness check with an explicit type check to ensure r.score is a number before calling .toFixed(2). For example: const score = typeof r.score === "number" ? ", (relevance: " + r.score.toFixed(2) + ")" : "". This handles non-numeric values gracefully and correctly formats a score of 0.

Prompt for AI Agent
Review the code at the location below. A potential bug has been identified by an AI
agent.
Verify if this is a real issue. If it is, propose a fix; if not, explain why it's not
valid.

Location: src/providers/agent-memory/prompts.ts#L8

Potential issue: The code uses a truthiness check on the `r.score` property before
calling `.toFixed(2)`. This creates two issues. First, if an external agent-memory
service returns a non-numeric value for `score` (e.g., a string, null), the call to
`.toFixed(2)` will throw a `TypeError` and crash the answer generation process. The
provider's search method returns `unknown[]`, so the type of `score` is not guaranteed.
Second, if `r.score` is `0`, the truthiness check will fail, and the score will be
incorrectly omitted from the output.

Did we get this right? 👍 / 👎 to inform future reviews.

return `${i + 1}. ${memory}${score}`
})
.join("\n")

return `You are answering questions based on memories from past conversations.

${questionDate ? `Current date context: ${questionDate}\n` : ""}
Retrieved memories:
${memories || "(no relevant memories found)"}

Question: ${question}

Instructions:
- Answer ONLY based on the retrieved memories above
- If the memories don't contain enough information, say so
- Be specific and cite details from the memories
- For temporal questions, pay attention to dates and sequence of events
- If memories contradict each other, prefer the most recent one

Answer:`
},
}
4 changes: 3 additions & 1 deletion src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,15 @@ import { Mem0Provider } from "./mem0"
import { ZepProvider } from "./zep"
import { FilesystemProvider } from "./filesystem"
import { RAGProvider } from "./rag"
import { AgentMemoryProvider } from "./agent-memory"

const providers: Record<ProviderName, new () => Provider> = {
supermemory: SupermemoryProvider,
mem0: Mem0Provider,
zep: ZepProvider,
filesystem: FilesystemProvider,
rag: RAGProvider,
"agent-memory": AgentMemoryProvider,
}

export function createProvider(name: ProviderName): Provider {
Expand Down Expand Up @@ -39,4 +41,4 @@ export function getProviderInfo(name: ProviderName): {
}
}

export { SupermemoryProvider, Mem0Provider, ZepProvider, FilesystemProvider, RAGProvider }
export { SupermemoryProvider, Mem0Provider, ZepProvider, FilesystemProvider, RAGProvider, AgentMemoryProvider }
2 changes: 1 addition & 1 deletion src/types/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@ export interface Provider {
clear(containerTag: string): Promise<void>
}

export type ProviderName = "supermemory" | "mem0" | "zep" | "filesystem" | "rag"
export type ProviderName = "supermemory" | "mem0" | "zep" | "filesystem" | "rag" | "agent-memory"
4 changes: 4 additions & 0 deletions src/utils/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export interface Config {
supermemoryBaseUrl: string
mem0ApiKey: string
zepApiKey: string
agentMemoryBaseUrl: string
openaiApiKey: string
anthropicApiKey: string
googleApiKey: string
Expand All @@ -13,6 +14,7 @@ export const config: Config = {
supermemoryBaseUrl: process.env.SUPERMEMORY_BASE_URL || "https://api.supermemory.ai",
mem0ApiKey: process.env.MEM0_API_KEY || "",
zepApiKey: process.env.ZEP_API_KEY || "",
agentMemoryBaseUrl: process.env.AGENT_MEMORY_BASE_URL || "http://127.0.0.1:9876",
openaiApiKey: process.env.OPENAI_API_KEY || "",
anthropicApiKey: process.env.ANTHROPIC_API_KEY || "",
googleApiKey: process.env.GOOGLE_API_KEY || "",
Expand All @@ -30,6 +32,8 @@ export function getProviderConfig(provider: string): { apiKey: string; baseUrl?:
return { apiKey: config.openaiApiKey } // Filesystem uses OpenAI for memory extraction
case "rag":
return { apiKey: config.openaiApiKey } // RAG provider uses OpenAI for embeddings
case "agent-memory":
return { apiKey: "local", baseUrl: config.agentMemoryBaseUrl }
default:
throw new Error(`Unknown provider: ${provider}`)
}
Expand Down