forked from QwenLM/Qwen-Agent
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add fn_call example and bugfix gen_keyword
- Loading branch information
Showing
22 changed files
with
253 additions
and
113 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
# Reference: https://platform.openai.com/docs/guides/function-calling | ||
import json | ||
|
||
from qwen_agent.llm import get_chat_model | ||
|
||
|
||
# Example dummy function hard coded to return the same weather | ||
# In production, this could be your backend API or an external API | ||
def get_current_weather(location, unit='fahrenheit'): | ||
"""Get the current weather in a given location""" | ||
if 'tokyo' in location.lower(): | ||
return json.dumps({ | ||
'location': 'Tokyo', | ||
'temperature': '10', | ||
'unit': 'celsius' | ||
}) | ||
elif 'san francisco' in location.lower(): | ||
return json.dumps({ | ||
'location': 'San Francisco', | ||
'temperature': '72', | ||
'unit': 'fahrenheit' | ||
}) | ||
elif 'paris' in location.lower(): | ||
return json.dumps({ | ||
'location': 'Paris', | ||
'temperature': '22', | ||
'unit': 'celsius' | ||
}) | ||
else: | ||
return json.dumps({'location': location, 'temperature': 'unknown'}) | ||
|
||
|
||
def run_conversation(): | ||
llm = get_chat_model({ | ||
# Use the model service provided by DashScope: | ||
'model': 'qwen-max', | ||
'model_server': 'dashscope', | ||
# 'api_key': 'YOUR_DASHSCOPE_API_KEY', | ||
# It will use the `DASHSCOPE_API_KEY' environment variable if 'api_key' is not set. | ||
|
||
# Use your own model service compatible with OpenAI API: | ||
# 'model': 'Qwen/Qwen1.5-72B-Chat', | ||
# 'model_server': 'http://localhost:8000/v1', # api_base | ||
# 'api_key': 'EMPTY', | ||
}) | ||
|
||
# Step 1: send the conversation and available functions to the model | ||
messages = [{ | ||
'role': 'user', | ||
'content': "What's the weather like in San Francisco?" | ||
}] | ||
functions = [{ | ||
'name': 'get_current_weather', | ||
'description': 'Get the current weather in a given location', | ||
'parameters': { | ||
'type': 'object', | ||
'properties': { | ||
'location': { | ||
'type': 'string', | ||
'description': | ||
'The city and state, e.g. San Francisco, CA', | ||
}, | ||
'unit': { | ||
'type': 'string', | ||
'enum': ['celsius', 'fahrenheit'] | ||
}, | ||
}, | ||
'required': ['location'], | ||
}, | ||
}] | ||
|
||
print('# Assistant Response 1:') | ||
responses = llm.chat(messages=messages, functions=functions, stream=False) | ||
print(responses) | ||
|
||
messages.extend(responses) # extend conversation with assistant's reply | ||
|
||
# Step 2: check if the model wanted to call a function | ||
last_response = messages[-1] | ||
if last_response.get('function_call', None): | ||
|
||
# Step 3: call the function | ||
# Note: the JSON response may not always be valid; be sure to handle errors | ||
available_functions = { | ||
'get_current_weather': get_current_weather, | ||
} # only one function in this example, but you can have multiple | ||
function_name = last_response['function_call']['name'] | ||
function_to_call = available_functions[function_name] | ||
function_args = json.loads(last_response['function_call']['arguments']) | ||
function_response = function_to_call( | ||
location=function_args.get('location'), | ||
unit=function_args.get('unit'), | ||
) | ||
print('# Function Response:') | ||
print(function_response) | ||
|
||
# Step 4: send the info for each function call and function response to the model | ||
messages.append({ | ||
'role': 'function', | ||
'name': function_name, | ||
'content': function_response, | ||
}) # extend conversation with function response | ||
|
||
print('# Assistant Response 2:') | ||
responses = llm.chat( | ||
messages=messages, | ||
functions=functions, | ||
stream=False, | ||
) # get a new response from the model where it can see the function response | ||
print(responses) | ||
|
||
|
||
if __name__ == '__main__': | ||
run_conversation() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.