You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
When I used llama_index.memory.mem0, I encountered an error stating that the 'Memory' object has no attribute 'version'.
The logic worked after I modified and deleted [ and self._client.version == "v1.1" ] on line 147 of memory>mem0>base.py.
The versions in use are: Python 3.12, llama-index==0.12.2, llama-index-memory-mem0==0.2.0, and mem0ai==0.1.34.
# my codeimportosfromllama_index.memory.mem0importMem0Memoryuser_id="jsduan"agent_id="agent_1"run_id="run_1"context= {
"user_id": user_id,
# "agent_id": agent_id,# "run_id": run_id,
}
config= {
"vector_store": {
# "provider": "qdrant",# "config": {# "collection_name": "test",# "host": "localhost",# "port": 6333,# "embedding_model_dims": 768, # Change this according to your local model's dimensions# },"provider": "chroma",
"config": {
"collection_name": "chat_session",
"path": "chroma_db",
},
},
"llm": {
"provider": "ollama",
"config": {
"model": "llama3.1:8b",
"temperature": 0,
"max_tokens": 8000,
"ollama_base_url": "http://localhost:11434", # Ensure this URL is correct
},
},
"embedder": {
"provider": "ollama",
"config": {
"model": "nomic-embed-text",
# Alternatively, you can use "snowflake-arctic-embed:latest""ollama_base_url": "http://localhost:11434",
},
},
"version": "v1.1",
}
m=Mem0Memory.from_config(
context=context,
config=config,
search_msg_limit=4,
)
agent=index.as_chat_engine(
text_qa_template=text_qa_template,
refine_template=refine_template,
llm=llm,
memory=m,
verbose=True,
chat_mode="best",
)
response_stream=agent.stream_chat("订单管理的收货方式有什么?")
response_stream.print_response_stream()
File ~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/core/callbacks/utils.py:41, in trace_method.<locals>.decorator.<locals>.wrapper(self, *args, **kwargs)
[39](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/core/callbacks/utils.py:39) callback_manager = cast(CallbackManager, callback_manager)
[40](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/core/callbacks/utils.py:40) with callback_manager.as_trace(trace_id):
---> [41](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/core/callbacks/utils.py:41) return func(self, *args, **kwargs)
...
--> [147](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/memory/mem0/base.py:147) if isinstance(self._client, Memory) and self._client.version == "v1.1":
[148](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/memory/mem0/base.py:148) search_results = search_results["results"]
[150](https://vscode-remote+wsl-002bubuntu-002d24-002e04.vscode-resource.vscode-cdn.net/mnt/c/Users/js/Documents/Workspace/LlamaIndex/~/.conda/envs/LlamaIndexStudy/lib/python3.12/site-packages/llama_index/memory/mem0/base.py:150) system_message = convert_memory_to_system_message(search_results)
AttributeError: 'Memory' object has no attribute 'version'
# memory>mem0>base.pydefget(self, input: Optional[str] =None, **kwargs: Any) ->List[ChatMessage]:
"""Get chat history. With memory system message."""messages=self.primary_memory.get(input=input, **kwargs)
input=convert_messages_to_string(messages, input, limit=self.search_msg_limit)
search_results=self.search(query=input, **self.context.get_context())
ifisinstance(self._client, Memory) andself._client.version=="v1.1":
search_results=search_results["results"]
system_message=convert_memory_to_system_message(search_results)
# If system message is presentiflen(messages) >0andmessages[0].role==MessageRole.SYSTEM:
assertmessages[0].contentisnotNonesystem_message=convert_memory_to_system_message(
response=search_results, existing_system_message=messages[0]
)
messages.insert(0, system_message)
returnmessages
The text was updated successfully, but these errors were encountered:
🐛 Describe the bug
When I used llama_index.memory.mem0, I encountered an error stating that the 'Memory' object has no attribute 'version'.
The logic worked after I modified and deleted [ and self._client.version == "v1.1" ] on line 147 of memory>mem0>base.py.
The versions in use are: Python 3.12, llama-index==0.12.2, llama-index-memory-mem0==0.2.0, and mem0ai==0.1.34.
The text was updated successfully, but these errors were encountered: