Skip to content

Commit

Permalink
Adding v1.1 code snipper for personal-travel-assistant (#1785)
Browse files Browse the repository at this point in the history
  • Loading branch information
PranavPuranik authored Sep 3, 2024
1 parent af84548 commit 83eb800
Showing 1 changed file with 97 additions and 3 deletions.
100 changes: 97 additions & 3 deletions docs/examples/personal-travel-assistant.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,99 @@ pip install openai mem0ai

Here's the complete code to create and interact with a Personalized AI Travel Assistant using Mem0:

```python
<CodeGroup>

```python After v1.1
import os
from openai import OpenAI
from mem0 import Memory

# Set the OpenAI API key
os.environ['OPENAI_API_KEY'] = "sk-xxx"

config = {
"llm": {
"provider": "openai",
"config": {
"model": "gpt-4o",
"temperature": 0.1,
"max_tokens": 2000,
}
},
"embedder": {
"provider": "openai",
"config": {
"model": "text-embedding-3-large"
}
},
"vector_store": {
"provider": "qdrant",
"config": {
"collection_name": "test",
"embedding_model_dims": 3072,
}
},
"version": "v1.1",
}

class PersonalTravelAssistant:
def __init__(self):
self.client = OpenAI()
self.memory = Memory.from_config(config)
self.messages = [{"role": "system", "content": "You are a personal AI Assistant."}]

def ask_question(self, question, user_id):
# Fetch previous related memories
previous_memories = self.search_memories(question, user_id=user_id)
prompt = question
if previous_memories:
prompt = f"User input: {question}\n Previous memories: {previous_memories}"
self.messages.append({"role": "user", "content": prompt})

# Generate response using GPT-4o
response = self.client.chat.completions.create(
model="gpt-4o",
messages=self.messages
)
answer = response.choices[0].message.content
self.messages.append({"role": "assistant", "content": answer})

# Store the question in memory
self.memory.add(question, user_id=user_id)
return answer

def get_memories(self, user_id):
memories = self.memory.get_all(user_id=user_id)
return [m['memory'] for m in memories['memories']]

def search_memories(self, query, user_id):
memories = self.memory.search(query, user_id=user_id)
return [m['memory'] for m in memories['memories']]

# Usage example
user_id = "traveler_123"
ai_assistant = PersonalTravelAssistant()

def main():
while True:
question = input("Question: ")
if question.lower() in ['q', 'exit']:
print("Exiting...")
break

answer = ai_assistant.ask_question(question, user_id=user_id)
print(f"Answer: {answer}")
memories = ai_assistant.get_memories(user_id=user_id)
print("Memories:")
for memory in memories:
print(f"- {memory}")
print("-----")

if __name__ == "__main__":
main()
```

```python Before v1.1
import os
from openai import OpenAI
from mem0 import Memory
Expand Down Expand Up @@ -55,11 +147,11 @@ class PersonalTravelAssistant:

def get_memories(self, user_id):
memories = self.memory.get_all(user_id=user_id)
return [m['text'] for m in memories]
return [m['memory'] for m in memories['memories']]

def search_memories(self, query, user_id):
memories = self.memory.search(query, user_id=user_id)
return [m['text'] for m in memories]
return [m['memory'] for m in memories['memories']]

# Usage example
user_id = "traveler_123"
Expand All @@ -83,6 +175,8 @@ def main():
if __name__ == "__main__":
main()
```
</CodeGroup>


## Key Components

Expand Down

0 comments on commit 83eb800

Please sign in to comment.