From 6fca80a62d98186b8dbcdb371bcb14a7c830640d Mon Sep 17 00:00:00 2001 From: Sergey Galuza Date: Wed, 14 Jan 2026 20:22:21 +0100 Subject: [PATCH] fix(falkordb): add missing special characters to sanitize method Add forward slash (/), backslash (\), and pipe (|) to the sanitize() method to prevent RediSearch query syntax errors. RediSearch uses these characters with special meaning: - | is the OR operator - \ is the escape character - / causes query syntax errors in entity names Updated docstring with: - Reference to RediSearch escaping documentation - Note that this is for fulltext search, not exact path matching Fixes #1144 Related to #1118 Built with [OnSteroids](https://onsteroids.ai) Co-Authored-By: OnSteroids --- graphiti_core/driver/falkordb_driver.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/graphiti_core/driver/falkordb_driver.py b/graphiti_core/driver/falkordb_driver.py index de469d53a..15c285e84 100644 --- a/graphiti_core/driver/falkordb_driver.py +++ b/graphiti_core/driver/falkordb_driver.py @@ -288,10 +288,21 @@ def convert_datetimes_to_strings(obj): def sanitize(self, query: str) -> str: """ - Replace FalkorDB special characters with whitespace. - Based on FalkorDB tokenization rules: ,.<>{}[]"':;!@#$%^&*()-+=~ + Replace FalkorDB/RediSearch special characters with whitespace. + + This method is designed for full-text search queries where text is split + into searchable words. Not suitable for exact path matching (e.g., file + paths like '/home/user/file.txt' will become 'home user file txt'). + + RediSearch (used by FalkorDB for full-text search) has reserved characters + that must be sanitized to prevent query syntax errors: + - Tokenization separators: ,.<>{}[]"':;!@#$%^&*()-+=~? + - Query operators: | (OR), / and \\ (path separators that break queries) + + See: https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/escaping/ + Related: https://github.com/getzep/graphiti/issues/1144 """ - # FalkorDB separator characters that break text into tokens + # RediSearch separator and operator characters separator_map = str.maketrans( { ',': ' ', @@ -321,6 +332,9 @@ def sanitize(self, query: str) -> str: '=': ' ', '~': ' ', '?': ' ', + '/': ' ', # Forward slash - causes query syntax errors + '\\': ' ', # Backslash - escape character in RediSearch + '|': ' ', # Pipe - OR operator in RediSearch } ) sanitized = query.translate(separator_map)