Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions src/memos/graph_dbs/polardb.py
Original file line number Diff line number Diff line change
Expand Up @@ -2179,8 +2179,9 @@ def get_by_metadata(

# Format value
if isinstance(value, str):
# Escape single quotes in string values
escaped_str = value.replace("'", "''")
# Escape single quotes using backslash when inside $$ dollar-quoted strings
# In $$ delimiters, Cypher string literals can use \' to escape single quotes
escaped_str = value.replace("'", "\\'")
escaped_value = f"'{escaped_str}'"
elif isinstance(value, list):
# Handle list values - use double quotes for Cypher arrays
Expand Down Expand Up @@ -4153,6 +4154,17 @@ def _build_filter_conditions_cypher(
if filter:

def escape_cypher_string(value: str) -> str:
"""
Escape single quotes in Cypher string literals.

In Cypher, single quotes in string literals are escaped by doubling them: ' -> ''
However, when inside PostgreSQL's $$ dollar-quoted string, we need to be careful.

The issue: In $$ delimiters, Cypher still needs to parse string literals correctly.
The solution: Use backslash escape \' instead of doubling '' when inside $$.
"""
# Use backslash escape for single quotes inside $$ dollar-quoted strings
# This works because $$ protects the backslash from PostgreSQL interpretation
return value.replace("'", "\\'")

def build_cypher_filter_condition(condition_dict: dict) -> str:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def _add_memories_parallel(
return added_ids

def _add_memories_batch(
self, memories: list[TextualMemoryItem], user_name: str | None = None, batch_size: int = 10
self, memories: list[TextualMemoryItem], user_name: str | None = None, batch_size: int = 50
) -> list[str]:
"""
Add memories using batch database operations (more efficient for large batches).
Expand Down