Graph Memory Error when added messages are a list of dicts
🐛 Describe the bug
OS: windows 11 mem0ai version 0.1.56
Neo4j and quadrant dockerfile
services:
neo4j:
container_name: neo4j
image: neo4j:latest
ports:
- 7474:7474
- 7687:7687
environment:
- NEO4J_AUTH=neo4j/${NEO4J_PASSWORD}
- NEO4J_apoc_export_file_enabled=true
- NEO4J_apoc_import_file_enabled=true
- NEO4J_apoc_import_file_use__neo4j__config=true
- NEO4J_PLUGINS=["apoc", "graph-data-science"]
volumes:
- ./neo4j_db/data:/data
- ./neo4j_db/logs:/logs
- ./neo4j_db/import:/var/lib/neo4j/import
- ./neo4j_db/plugins:/plugins
qdrant:
container_name: qdrant
image: qdrant/qdrant:latest
ports:
- 6333:6333
- 6334:6334
volumes:
- ./qdrant_storage:/qdrant/storage:z
mem0 configuration :
config_complete = {
"vector_store": {
"provider": "qdrant",
"config": {
"host": "localhost",
"port": 6333
}
},
"llm": {
"provider": "openai",
"config": {
"model": "gpt-4o-mini",
"temperature": 0.2,
"max_tokens": 1500,
"api_key": OPENAI_API_KEY,
"openai_base_url": OPENAI_API_BASE
}
},
"embedder": {
"provider": "openai",
"config": {
"api_key": OPENAI_API_KEY,
"model": "text-embedding-3-small",
"openai_base_url": OPENAI_API_BASE
}
},
"graph_store": {
"provider": "neo4j",
"config": {
"url": "neo4j://localhost:7687",
"username": "neo4j",
"password": neo4j_passwd
},
"llm" : {
"provider": "openai",
"config": {
"model": "gpt-4o-mini",
"temperature": 0.0,
"api_key": OPENAI_API_KEY,
"openai_base_url": OPENAI_API_BASE
}
}
},
"version": "v1.1"
}
mem_complete = Memory.from_config(config_dict=config_complete)
loading conversation from here :
from mem0 import Memory
conversation = [
{
"role": "assistant",
"content": "Hi, I'm Best Buy's chatbot!\n\nThanks for being a My Best Buy TotalTM member.\n\nWhat can I help you with?",
},
{
"role": "user",
"content": 'Seeing horizontal lines on our tv. TV model: Sony - 77" Class BRAVIA XR A80K OLED 4K UHD Smart Google TV',
},
{
"role": "assistant",
"content": "Thanks for being a My Best Buy Total™ member. I can connect you to an expert immediately - just one perk of your membership!\n\nSelect the button below when you're ready to chat.",
},
{
"role": "assistant",
"content": "Good evening, thank you for choosing Best Buy, Fnu. My name is Lovely. I hope you are doing well. I'm sorry to hear that you're seeing horizontal lines on your TV.\n\nI'm absolutely committed to exploring all possible ways to assist you to fix this issue.\n\nTo ensure that we are on the right account, may I please have your email address registered with your Best Buy account?",
},
{"role": "user", "content": "[email protected]"},
{
"role": "assistant",
"content": "Perfect! Thank you for providing all the details, surely you have made my job easier by doing this. I really appreciate it.\n\nI also want to take a moment to express our heartfelt appreciation for your trust and loyalty. Thank you for being an amazing customer of BestBuy Total.\n\nCould you please help me with the order number or product's details to check it quickly?\n\nSamsung - 49\" Odyssey OLED G9 (G95SC) DQHD 240Hz 0.03ms G-Sync Compatible Curved Smart Gaming Monitor - Silver - just to confirm this is the item, right?",
},
{"role": "user", "content": "Order number: 112217629"},
{
"role": "assistant",
"content": "Superb! Thank you for confirmation.\n\nThank you for your patience. After exploring all possible solutions, I can help you to arrange a home repair appointment for your device. Our Geek Squad experts will visit your home to inspect and fix your device.\n\nIt's great that you have a protection plan - rest assured, we've got your back! As a valued Total member, you can avail this service at a minimal service fee. This fee, applicable to all repairs, covers the cost of diagnosing the issue and any small parts needed for the repair. It's part of our 24-month free protection plan.\n\nPlease click here to review the service fee and plan coverage details -\n\nhttps://www.bestbuy.com/site/best-buy-membership/best-buy-protection/pcmcat1608643232014.c?id=pcmcat1608643232014#jl-servicefees\n\nFnu - just to confirm shall I proceed to schedule the appointment?",
},
{"role": "user", "content": "Yes please"},
{"role": "assistant", "content": "When should I schedule the appointment?"},
{"role": "user", "content": "Schedule it for tomorrow please"}
]
mem_complete.add(messages=conversation, user_id="customer_service_bot")
Error generated :
ERROR:mem0.memory.graph_memory:Error in search tool: 'entity_type'
---------------------------------------------------------------------------
JSONDecodeError Traceback (most recent call last)
Cell In[16], line 35
1 conversation = [
2 {
3 "role": "assistant",
(...)
30 {"role": "user", "content": "Schedule it for tomorrow please"}
31 ]
33 #m.add(messages=conversation, user_id="customer_service_bot")
34 #memory_vs.add(messages=conversation, user_id="customer_service_bot")
---> 35 mem_complete.add(messages=conversation, user_id="customer_service_bot")
File c:\...\venv\Lib\site-packages\mem0\memory\main.py:125, in Memory.add(self, messages, user_id, agent_id, run_id, metadata, filters, prompt)
121 future2 = executor.submit(self._add_to_graph, messages, filters)
123 concurrent.futures.wait([future1, future2])
--> 125 vector_store_result = future1.result()
126 graph_result = future2.result()
128 if self.api_version == "v1.1":
File C:\Python311\Lib\concurrent\futures\_base.py:449, in Future.result(self, timeout)
447 raise CancelledError()
448 elif self._state == FINISHED:
--> 449 return self.__get_result()
451 self._condition.wait(timeout)
453 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
File C:\Python311\Lib\concurrent\futures\_base.py:401, in Future.__get_result(self)
399 if self._exception:
400 try:
--> 401 raise self._exception
402 finally:
403 # Break a reference cycle with the exception in self._exception
404 self = None
File C:\Python311\Lib\concurrent\futures\thread.py:58, in _WorkItem.run(self)
55 return
57 try:
---> 58 result = self.fn(*self.args, **self.kwargs)
59 except BaseException as exc:
60 self.future.set_exception(exc)
File c:\...\venv\Lib\site-packages\mem0\memory\main.py:199, in Memory._add_to_vector_store(self, messages, metadata, filters)
193 new_memories_with_actions = self.llm.generate_response(
194 messages=[{"role": "user", "content": function_calling_prompt}],
195 response_format={"type": "json_object"},
196 )
198 new_memories_with_actions = remove_code_blocks(new_memories_with_actions)
--> 199 new_memories_with_actions = json.loads(new_memories_with_actions)
201 returned_memories = []
202 try:
File C:\Python311\Lib\json\__init__.py:346, in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
341 s = s.decode(detect_encoding(s), 'surrogatepass')
343 if (cls is None and object_hook is None and
344 parse_int is None and parse_float is None and
345 parse_constant is None and object_pairs_hook is None and not kw):
--> 346 return _default_decoder.decode(s)
347 if cls is None:
348 cls = JSONDecoder
File C:\Python311\Lib\json\decoder.py:337, in JSONDecoder.decode(self, s, _w)
332 def decode(self, s, _w=WHITESPACE.match):
333 """Return the Python representation of ``s`` (a ``str`` instance
334 containing a JSON document).
335
336 """
--> 337 obj, end = self.raw_decode(s, idx=_w(s, 0).end())
338 end = _w(s, end).end()
339 if end != len(s):
File C:\Python311\Lib\json\decoder.py:353, in JSONDecoder.raw_decode(self, s, idx)
344 """Decode a JSON document from ``s`` (a ``str`` beginning with
345 a JSON document) and return a 2-tuple of the Python
346 representation and the index in ``s`` where the document ended.
(...)
350
351 """
352 try:
--> 353 obj, end = self.scan_once(s, idx)
354 except StopIteration as err:
355 raise JSONDecodeError("Expecting value", s, err.value) from None
JSONDecodeError: Unterminated string starting at: line 15 column 21 (char 362)
Also tried with a simpler messages from here :
messages = [
{"role": "user", "content": "I'm planning a trip to Japan. Can you help me with an itinerary?"},
{"role": "assistant", "content": "Sure! What are your travel dates and interests?"},
{"role": "user", "content": "I'll be visiting in April, and I love culture, food, and nature."},
{"role": "assistant", "content": "Great! How about cherry blossoms in Tokyo, temples in Kyoto, and street food in Osaka?"}
]
mem_complete.add(messages,user_id="sash")
Generated Error :
c:\...\venv\Lib\site-packages\mem0\vector_stores\qdrant.py:143: DeprecationWarning: `search` method is deprecated and will be removed in the future. Use `query_points` instead.
hits = self.client.search(
ERROR:mem0.memory.graph_memory:Error in search tool: 'entity_type'
---------------------------------------------------------------------------
JSONDecodeError Traceback (most recent call last)
Cell In[15], line 8
1 messages = [
2 {"role": "user", "content": "I'm planning a trip to Japan. Can you help me with an itinerary?"},
3 {"role": "assistant", "content": "Sure! What are your travel dates and interests?"},
4 {"role": "user", "content": "I'll be visiting in April, and I love culture, food, and nature."},
5 {"role": "assistant", "content": "Great! How about cherry blossoms in Tokyo, temples in Kyoto, and street food in Osaka?"}
6 ]
----> 8 mem_complete.add(messages,user_id="sash")
File c:\...\venv\Lib\site-packages\mem0\memory\main.py:125, in Memory.add(self, messages, user_id, agent_id, run_id, metadata, filters, prompt)
121 future2 = executor.submit(self._add_to_graph, messages, filters)
123 concurrent.futures.wait([future1, future2])
--> 125 vector_store_result = future1.result()
126 graph_result = future2.result()
128 if self.api_version == "v1.1":
File C:\Python311\Lib\concurrent\futures\_base.py:449, in Future.result(self, timeout)
447 raise CancelledError()
448 elif self._state == FINISHED:
--> 449 return self.__get_result()
451 self._condition.wait(timeout)
453 if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
File C:\Python311\Lib\concurrent\futures\_base.py:401, in Future.__get_result(self)
399 if self._exception:
400 try:
--> 401 raise self._exception
402 finally:
403 # Break a reference cycle with the exception in self._exception
404 self = None
File C:\Python311\Lib\concurrent\futures\thread.py:58, in _WorkItem.run(self)
55 return
57 try:
---> 58 result = self.fn(*self.args, **self.kwargs)
59 except BaseException as exc:
60 self.future.set_exception(exc)
File c:\...\venv\Lib\site-packages\mem0\memory\main.py:199, in Memory._add_to_vector_store(self, messages, metadata, filters)
193 new_memories_with_actions = self.llm.generate_response(
194 messages=[{"role": "user", "content": function_calling_prompt}],
195 response_format={"type": "json_object"},
196 )
198 new_memories_with_actions = remove_code_blocks(new_memories_with_actions)
--> 199 new_memories_with_actions = json.loads(new_memories_with_actions)
201 returned_memories = []
202 try:
File C:\Python311\Lib\json\__init__.py:346, in loads(s, cls, object_hook, parse_float, parse_int, parse_constant, object_pairs_hook, **kw)
341 s = s.decode(detect_encoding(s), 'surrogatepass')
343 if (cls is None and object_hook is None and
344 parse_int is None and parse_float is None and
345 parse_constant is None and object_pairs_hook is None and not kw):
--> 346 return _default_decoder.decode(s)
347 if cls is None:
348 cls = JSONDecoder
File C:\Python311\Lib\json\decoder.py:337, in JSONDecoder.decode(self, s, _w)
332 def decode(self, s, _w=WHITESPACE.match):
333 """Return the Python representation of ``s`` (a ``str`` instance
334 containing a JSON document).
335
336 """
--> 337 obj, end = self.raw_decode(s, idx=_w(s, 0).end())
338 end = _w(s, end).end()
339 if end != len(s):
File C:\Python311\Lib\json\decoder.py:353, in JSONDecoder.raw_decode(self, s, idx)
344 """Decode a JSON document from ``s`` (a ``str`` beginning with
345 a JSON document) and return a 2-tuple of the Python
346 representation and the index in ``s`` where the document ended.
(...)
350
351 """
352 try:
--> 353 obj, end = self.scan_once(s, idx)
354 except StopIteration as err:
355 raise JSONDecodeError("Expecting value", s, err.value) from None
JSONDecodeError: Expecting property name enclosed in double quotes: line 19 column 23 (char 391)
I'm having this exact same issue in a similar setup. Have dumped so many hours in trying to troubleshoot it.
@Dev-Khant I can pick this up.
Sure, feel free to work on it.
@epie-godfred I tried recreating this issue. But I didn't find any errors that you're facing. Try updating ther mem0 library and let me know if you still face the same issue.
Hi @parshvadaftari your right the problem seems to be solved after upgrading. Thanks