forked from kyegomez/swarms
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnew_agent_tool_system.py
77 lines (53 loc) · 1.44 KB
/
new_agent_tool_system.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
"""
tool decorated func [search_api] -> agent which parses the docs of the tool func
-> injected into prompt -> agent will output json containing tool usage -> agent output will be parsed -> tool executed
-> terminal response can be returned to agent for self-healing
"""
import os
from dotenv import load_dotenv
# Import the OpenAIChat model and the Agent struct
from swarms import Agent, llama3Hosted
# Load the environment variables
load_dotenv()
# Define a tool
def search_api(query: str, description: str):
"""Search the web for the query
Args:
query (str): _description_
Returns:
_type_: _description_
"""
return f"Search results for {query}"
def weather_api(
query: str,
):
"""_summary_
Args:
query (str): _description_
"""
print(f"Getting the weather for {query}")
def rapid_api(query: str):
"""_summary_
Args:
query (str): _description_
"""
print(f"Getting the weather for {query}")
# Get the API key from the environment
api_key = os.environ.get("OPENAI_API_KEY")
# Initialize the language model
llm = llama3Hosted(
temperature=0.5,
)
## Initialize the workflow
agent = Agent(
agent_name="Research Agent",
llm=llm,
max_loops=3,
dashboard=True,
tools=[search_api, weather_api, rapid_api],
interactive=True,
execute_tool=True,
)
# Run the workflow on a task
out = agent.run("Use the weather tool in Miami")
print(out)