Spaces:
Sleeping
Sleeping
#!/usr/bin/python3 | |
# -*- coding: utf-8 -*- | |
""" | |
https://platform.openai.com/docs/guides/function-calling | |
""" | |
import argparse | |
import json | |
import time | |
import openai | |
from openai import OpenAI | |
from openai.pagination import SyncCursorPage | |
from openai.types.beta.threads import ThreadMessage | |
from openai.types.beta.assistant import Assistant | |
from project_settings import environment, project_path | |
def get_args(): | |
parser = argparse.ArgumentParser() | |
parser.add_argument( | |
"--openai_api_key", | |
default=environment.get("openai_api_key", default=None, dtype=str), | |
type=str | |
) | |
args = parser.parse_args() | |
return args | |
def get_current_weather(location, unit="fahrenheit"): | |
if "tokyo" in location.lower(): | |
return json.dumps({"location": location, "temperature": "10", "unit": "celsius"}) | |
elif "san francisco" in location.lower(): | |
return json.dumps({"location": location, "temperature": "72", "unit": "fahrenheit"}) | |
else: | |
return json.dumps({"location": location, "temperature": "22", "unit": "celsius"}) | |
available_functions = { | |
"get_current_weather": get_current_weather, | |
} | |
def main(): | |
""" | |
assistant.id: asst_9iUOSeG3dUgzBxYqfygvtKLi | |
thread.id: thread_9C4dDj5i4jDCtkMCujyBleOc | |
""" | |
args = get_args() | |
client = OpenAI( | |
api_key=args.openai_api_key | |
) | |
messages = [{"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris?"}] | |
tools = [ | |
{ | |
"type": "function", | |
"function": { | |
"name": "get_current_weather", | |
"description": "Get the current weather in a given location", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"location": { | |
"type": "string", | |
"description": "The city and state, e.g. San Francisco, CA", | |
}, | |
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, | |
}, | |
"required": ["location"], | |
}, | |
}, | |
} | |
] | |
response = openai.chat.completions.create( | |
model="gpt-3.5-turbo-1106", | |
messages=messages, | |
tools=tools, | |
tool_choice="auto", # auto is default, but we'll be explicit | |
) | |
print(response.choices) | |
response_message = response.choices[0].message | |
tool_calls = response_message.tool_calls | |
print(tool_calls) | |
if tool_calls: | |
messages.append(response_message) | |
for tool_call in tool_calls: | |
function_name = tool_call.function.name | |
function_to_call = available_functions[function_name] | |
function_args = json.loads(tool_call.function.arguments) | |
function_response = function_to_call( | |
location=function_args.get("location"), | |
unit=function_args.get("unit"), | |
) | |
messages.append( | |
{ | |
"tool_call_id": tool_call.id, | |
"role": "tool", | |
"name": function_name, | |
"content": function_response, | |
} | |
) | |
second_response = openai.chat.completions.create( | |
model="gpt-3.5-turbo-1106", | |
messages=messages, | |
) | |
print("second_response: {}".format(second_response)) | |
return | |
if __name__ == '__main__': | |
main() | |