Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save siddicky/8ea8e70a04d00e80f27311fe6173ce14 to your computer and use it in GitHub Desktop.
Save siddicky/8ea8e70a04d00e80f27311fe6173ce14 to your computer and use it in GitHub Desktop.
Testing OpenAI parallel_tool_calls
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"authorship_tag": "ABX9TyOnCt3lEf5mIL8z5A3d/0UE",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/siddicky/8ea8e70a04d00e80f27311fe6173ce14/testing-openai-parallel_tool_calls.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"execution_count": 92,
"metadata": {
"id": "hsmw7wpdSaZ1"
},
"outputs": [],
"source": [
"%%capture --no-stderr\n",
"%pip install -U langgraph langchain-openai\n",
"from google.colab import userdata\n",
"from langchain_openai import ChatOpenAI\n",
"from langchain_core.pydantic_v1 import BaseModel, Field\n",
"from langchain_core.prompts import ChatPromptTemplate\n",
"\n",
"OPENAI_API_KEY = userdata.get('OPENAI_API_KEY')\n",
"%env OPENAI_API_KEY = $OPENAI_API_KEY"
]
},
{
"cell_type": "code",
"source": [
"class Extractor(BaseModel):\n",
" \"\"\"Url extractor tool from text\"\"\"\n",
" url: str = Field(..., description='The URL to extract')\n",
"\n",
"\n",
"model = ChatOpenAI(model='gpt-4o-mini', temperature=0).bind_tools(tools=[Extractor], parallel_tool_calls=False)\n",
"\n",
"prompt = ChatPromptTemplate.from_messages(\n",
" [\n",
" (\"system\", \"You are extraction expert. Retrieve relevant information\"),\n",
" (\"human\", \"{input}\"),\n",
" ]\n",
")\n",
"\n",
"chain = prompt | model"
],
"metadata": {
"id": "HwfvpfsWTC8f"
},
"execution_count": 100,
"outputs": []
},
{
"cell_type": "code",
"source": [
"res = chain.invoke({'input': 'Here is the link for example showing paralell tool calls parameter https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/langchain_openai/chat_models/base.py'})"
],
"metadata": {
"id": "u3ahek9GUZ_q"
},
"execution_count": 101,
"outputs": []
},
{
"cell_type": "code",
"source": [
"print(res.tool_calls[0].get('args')['url'])"
],
"metadata": {
"id": "6yQcLddgVGc5",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "798d9793-ebf7-4691-9c5a-ed3559b62c42"
},
"execution_count": 102,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/langchain_openai/chat_models/base.py\n"
]
}
]
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment