OpenAI Function calling
Jun 22, 2023
OpenAI Function calling
Google Colab notebook: https://colab.research.google.com/drive/1hb6tQVBsE6jeyVZjhRvUHxyi3GgzZzne?authuser=1#forceEdit=true&sandboxMode=true
“An essential part of creativity is not being afraid to fail.” -Edwin H. Land
# OpenAI Function calling
Source: https://platform.openai.com/docs/guides/gpt/function-calling
!pip install openai
import openai
import json
import os
os.environ["OPENAI_API_KEY"] = "OPENAI_API_KEY" #find this in your files; hard coding is NOT the way to go!
openai.api_key = os.getenv('OPENAI_API_KEY')
# Step 0: Define function
# Example dummy function hard coded to return the same weather
# In production, this could be your backend API or an external API
def get_current_weather(location, unit="fahrenheit"):
"""Get the current weather in a given location"""
weather_info = {
"location": location,
"temperature": "65",
"unit": unit,
"forecast": ["sunny", "windy"],
}
return json.dumps(weather_info)
get_current_weather("Ann Arbor", unit="fahrenheit")
# functions: A list of functions the model may generate JSON inputs for.
# name: string; Required; The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
# description; string; Optional; The description of what the function does.
# parameters; object; Optional; The parameters the functions accepts, described as a JSON Schema object. See the guide for examples, and the JSON Schema reference for documentation about the format.
functions = [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
}
]
# Step 1, send model the user query and what functions it has access to
# functions: A list of functions the model may generate JSON inputs for.
# function_call: Controls how the model responds to function calls.
# "none" means the model does not call a function, and responds to the end-user.
# "auto" means the model can pick between an end-user or calling a function.
# Specifying a particular function via {"name":\ "my_function"} forces the model to call that function.
# "none" is the default when no functions are present. "auto" is the default if functions are present.
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=[{"role": "user", "content": "What's the weather like in Ann Arbor?"}],
functions=functions,
function_call="auto",
)
message = response["choices"][0]["message"]
message
# Step 2, check if the model wants to call a function
if message.get("function_call"):
function_name = message["function_call"]["name"]
function_name
# Step 3, call the function
# Note: the JSON response from the model may not be valid JSON
function_response = get_current_weather(
location = eval(message['function_call']['arguments']).get("location")
)
function_response
# Step 4, send model the info on the function call and function response
second_response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=[
{"role": "user", "content": "What is the weather like in Ann Arbor?"},
message,
{
"role": "function",
"name": function_name,
"content": function_response,
},
],
)
second_response
second_response['choices'][0]['message']['content']
# Convert LangChain tools to OpenAI functions
Source: https://python.langchain.com/en/latest/modules/agents/tools/tools_as_openai_functions.html
!pip install langchain
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage
model = ChatOpenAI(model="gpt-3.5-turbo-0613")
from langchain.tools import MoveFileTool, format_tool_to_openai_function
??MoveFileTool
tools = [MoveFileTool()]
tools
functions = [format_tool_to_openai_function(t) for t in tools]
functions
message = model.predict_messages(
[HumanMessage(content='move file foo to bar')],
functions=functions)
message
message.additional_kwargs['function_call']