Created
June 28, 2025 18:12
-
-
Save ezimuel/3625dbbfeeb0a1776eb4bdf78df86737 to your computer and use it in GitHub Desktop.
Example of function calling using OpenAI in Python
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Example of function call using OpenAI | |
# by Enrico Zimuel (https://github.com/ezimuel) | |
# | |
# To run the code you need the OPENAI_API_KEY as env variable | |
# For more information: https://platform.openai.com/docs/guides/function-calling?api-mode=chat | |
from openai import OpenAI | |
import requests | |
import json | |
import datetime | |
def get_weather(location: str)-> str: | |
""" | |
Get current temperature for a given location. | |
Args: | |
location (str): City and country e.g. Rome, Italy | |
""" | |
# Get the latitude, longitude for the location | |
response = requests.get(f"https://geocoding-api.open-meteo.com/v1/search?name={location}&number=1") | |
data = response.json() | |
lat, lon = (data['results'][0]['latitude'], data['results'][0]['longitude']) | |
# Get current date and hour | |
now = datetime.datetime.now() | |
date = now.strftime("%Y-%m-%d") | |
hour = int(now.strftime("%H")) | |
# Get the temperature for today | |
response = requests.get(f"https://api.open-meteo.com/v1/forecast?latitude={lat}&longitude={lon}&hourly=temperature_2m&start_date={date}&end_date={date}") | |
data = response.json() | |
return f"{data['hourly']['temperature_2m'][hour]} °C" | |
client = OpenAI() | |
tools = [{ | |
"type": "function", | |
"function": { | |
"name": "get_weather", | |
"description": "Get current temperature for a given location.", | |
"parameters": { | |
"type": "object", | |
"properties": { | |
"location": { | |
"type": "string", | |
"description": "City and country e.g. Rome, Italy" | |
} | |
}, | |
"required": [ | |
"location" | |
], | |
"additionalProperties": False | |
}, | |
"strict": True | |
} | |
}] | |
messages = [{ | |
"role": "user", | |
"content": "What's the temperature now in Milan?" | |
}] | |
response = client.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=messages, | |
tools=tools | |
) | |
tool_call = response.choices[0].message.tool_calls[0] | |
kwargs = json.loads(tool_call.function.arguments) | |
# Execute the function | |
content = get_weather(**kwargs) | |
print(f"Response from get_weather(): {content}") | |
# append the previous function call message | |
messages.append(response.choices[0].message) | |
# append result message | |
messages.append({ | |
"role": "tool", | |
"tool_call_id": tool_call.id, | |
"content": content | |
}) | |
response2 = client.chat.completions.create( | |
model="gpt-4o-mini", | |
messages=messages, | |
tools=tools, | |
) | |
print(f"Response from LLM: {response2.choices[0].message.content}") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment