mirror of
https://github.com/openai/openai-cookbook
synced 2024-11-13 07:10:30 +00:00
Add parallel function calling (#838)
Co-authored-by: Simón Fishman <simonpfish@gmail.com>
This commit is contained in:
parent
f6b5ce2047
commit
c5a2076f06
@ -10,9 +10,9 @@
|
||||
"\n",
|
||||
"This notebook covers how to use the Chat Completions API in combination with external functions to extend the capabilities of GPT models.\n",
|
||||
"\n",
|
||||
"`functions` is an optional parameter in the Chat Completion API which can be used to provide function specifications. The purpose of this is to enable models to generate function arguments which adhere to the provided specifications. Note that the API will not actually execute any function calls. It is up to developers to execute function calls using model outputs.\n",
|
||||
"`tools` is an optional parameter in the Chat Completion API which can be used to provide function specifications. The purpose of this is to enable models to generate function arguments which adhere to the provided specifications. Note that the API will not actually execute any function calls. It is up to developers to execute function calls using model outputs.\n",
|
||||
"\n",
|
||||
"If the `functions` parameter is provided then by default the model will decide when it is appropriate to use one of the functions. The API can be forced to use a specific function by setting the `function_call` parameter to `{\"name\": \"<insert-function-name>\"}`. The API can also be forced to not use any function by setting the `function_call` parameter to `\"none\"`. If a function is used, the output will contain `\"finish_reason\": \"function_call\"` in the response, as well as a `function_call` object that has the name of the function and the generated function arguments.\n",
|
||||
"Within the `tools` parameter, if the `functions` parameter is provided then by default the model will decide when it is appropriate to use one of the functions. The API can be forced to use a specific function by setting the `tool_choice` parameter to `{\"name\": \"<insert-function-name>\"}`. The API can also be forced to not use any function by setting the `tool_choice` parameter to `\"none\"`. If a function is used, the output will contain `\"finish_reason\": \"function_call\"` in the response, as well as a `tool_choice` object that has the name of the function and the generated function arguments.\n",
|
||||
"\n",
|
||||
"### Overview\n",
|
||||
"\n",
|
||||
@ -85,16 +85,16 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"@retry(wait=wait_random_exponential(multiplier=1, max=40), stop=stop_after_attempt(3))\n",
|
||||
"def chat_completion_request(messages, functions=None, function_call=None, model=GPT_MODEL):\n",
|
||||
"def chat_completion_request(messages, tools=None, tool_choice=None, model=GPT_MODEL):\n",
|
||||
" headers = {\n",
|
||||
" \"Content-Type\": \"application/json\",\n",
|
||||
" \"Authorization\": \"Bearer \" + openai.api_key,\n",
|
||||
" }\n",
|
||||
" json_data = {\"model\": model, \"messages\": messages}\n",
|
||||
" if functions is not None:\n",
|
||||
" json_data.update({\"functions\": functions})\n",
|
||||
" if function_call is not None:\n",
|
||||
" json_data.update({\"function_call\": function_call})\n",
|
||||
" if tools is not None:\n",
|
||||
" json_data.update({\"tools\": tools})\n",
|
||||
" if tool_choice is not None:\n",
|
||||
" json_data.update({\"tool_choice\": tool_choice})\n",
|
||||
" try:\n",
|
||||
" response = requests.post(\n",
|
||||
" \"https://api.openai.com/v1/chat/completions\",\n",
|
||||
@ -120,7 +120,7 @@
|
||||
" \"system\": \"red\",\n",
|
||||
" \"user\": \"green\",\n",
|
||||
" \"assistant\": \"blue\",\n",
|
||||
" \"function\": \"magenta\",\n",
|
||||
" \"tool\": \"magenta\",\n",
|
||||
" }\n",
|
||||
" \n",
|
||||
" for message in messages:\n",
|
||||
@ -132,7 +132,7 @@
|
||||
" print(colored(f\"assistant: {message['function_call']}\\n\", role_to_color[message[\"role\"]]))\n",
|
||||
" elif message[\"role\"] == \"assistant\" and not message.get(\"function_call\"):\n",
|
||||
" print(colored(f\"assistant: {message['content']}\\n\", role_to_color[message[\"role\"]]))\n",
|
||||
" elif message[\"role\"] == \"function\":\n",
|
||||
" elif message[\"role\"] == \"tool\":\n",
|
||||
" print(colored(f\"function ({message['name']}): {message['content']}\\n\", role_to_color[message[\"role\"]]))\n"
|
||||
]
|
||||
},
|
||||
@ -154,48 +154,54 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"functions = [\n",
|
||||
"tools = [\n",
|
||||
" {\n",
|
||||
" \"name\": \"get_current_weather\",\n",
|
||||
" \"description\": \"Get the current weather\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"location\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
|
||||
" },\n",
|
||||
" \"format\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"enum\": [\"celsius\", \"fahrenheit\"],\n",
|
||||
" \"description\": \"The temperature unit to use. Infer this from the users location.\",\n",
|
||||
" \"type\": \"function\",\n",
|
||||
" \"function\": {\n",
|
||||
" \"name\": \"get_current_weather\",\n",
|
||||
" \"description\": \"Get the current weather\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"location\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
|
||||
" },\n",
|
||||
" \"format\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"enum\": [\"celsius\", \"fahrenheit\"],\n",
|
||||
" \"description\": \"The temperature unit to use. Infer this from the users location.\",\n",
|
||||
" },\n",
|
||||
" },\n",
|
||||
" \"required\": [\"location\", \"format\"],\n",
|
||||
" },\n",
|
||||
" \"required\": [\"location\", \"format\"],\n",
|
||||
" },\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" {\n",
|
||||
" \"name\": \"get_n_day_weather_forecast\",\n",
|
||||
" \"description\": \"Get an N-day weather forecast\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"location\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
|
||||
" \"type\": \"function\",\n",
|
||||
" \"function\": {\n",
|
||||
" \"name\": \"get_n_day_weather_forecast\",\n",
|
||||
" \"description\": \"Get an N-day weather forecast\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"location\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
|
||||
" },\n",
|
||||
" \"format\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"enum\": [\"celsius\", \"fahrenheit\"],\n",
|
||||
" \"description\": \"The temperature unit to use. Infer this from the users location.\",\n",
|
||||
" },\n",
|
||||
" \"num_days\": {\n",
|
||||
" \"type\": \"integer\",\n",
|
||||
" \"description\": \"The number of days to forecast\",\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" \"format\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"enum\": [\"celsius\", \"fahrenheit\"],\n",
|
||||
" \"description\": \"The temperature unit to use. Infer this from the users location.\",\n",
|
||||
" },\n",
|
||||
" \"num_days\": {\n",
|
||||
" \"type\": \"integer\",\n",
|
||||
" \"description\": \"The number of days to forecast\",\n",
|
||||
" }\n",
|
||||
" \"required\": [\"location\", \"format\", \"num_days\"]\n",
|
||||
" },\n",
|
||||
" \"required\": [\"location\", \"format\", \"num_days\"]\n",
|
||||
" },\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
"]"
|
||||
]
|
||||
@ -219,7 +225,7 @@
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': 'In which city and state would you like to know the current weather?'}"
|
||||
" 'content': 'Sure, I can help you with that. Could you please tell me the city and state you are in or the location you want to know the weather for?'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
@ -232,7 +238,7 @@
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"What's the weather like today\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions\n",
|
||||
" messages, tools=tools\n",
|
||||
")\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]\n",
|
||||
"messages.append(assistant_message)\n",
|
||||
@ -259,8 +265,10 @@
|
||||
"text/plain": [
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': None,\n",
|
||||
" 'function_call': {'name': 'get_current_weather',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Glasgow, Scotland\",\\n \"format\": \"celsius\"\\n}'}}"
|
||||
" 'tool_calls': [{'id': 'call_o7uyztQLeVIoRdjcDkDJY3ni',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_current_weather',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Glasgow, Scotland\",\\n \"format\": \"celsius\"\\n}'}}]}"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
@ -271,7 +279,7 @@
|
||||
"source": [
|
||||
"messages.append({\"role\": \"user\", \"content\": \"I'm in Glasgow, Scotland.\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions\n",
|
||||
" messages, tools=tools\n",
|
||||
")\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]\n",
|
||||
"messages.append(assistant_message)\n",
|
||||
@ -297,7 +305,7 @@
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': 'Sure, I can help you with that. Please provide me with the number of days you want to forecast for.'}"
|
||||
" 'content': 'Sure, I can help you with that. Please let me know the value for x.'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 7,
|
||||
@ -310,7 +318,7 @@
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"what is the weather going to be like in Glasgow, Scotland over the next x days\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions\n",
|
||||
" messages, tools=tools\n",
|
||||
")\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]\n",
|
||||
"messages.append(assistant_message)\n",
|
||||
@ -338,9 +346,11 @@
|
||||
"{'index': 0,\n",
|
||||
" 'message': {'role': 'assistant',\n",
|
||||
" 'content': None,\n",
|
||||
" 'function_call': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Glasgow, Scotland\",\\n \"format\": \"celsius\",\\n \"num_days\": 5\\n}'}},\n",
|
||||
" 'finish_reason': 'function_call'}"
|
||||
" 'tool_calls': [{'id': 'call_drz2YpGPWEMVySzYgsWYY249',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Glasgow, Scotland\",\\n \"format\": \"celsius\",\\n \"num_days\": 5\\n}'}}]},\n",
|
||||
" 'finish_reason': 'tool_calls'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 8,
|
||||
@ -351,7 +361,7 @@
|
||||
"source": [
|
||||
"messages.append({\"role\": \"user\", \"content\": \"5 days\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions\n",
|
||||
" messages, tools=tools\n",
|
||||
")\n",
|
||||
"chat_response.json()[\"choices\"][0]\n"
|
||||
]
|
||||
@ -385,8 +395,10 @@
|
||||
"text/plain": [
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': None,\n",
|
||||
" 'function_call': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Toronto, Canada\",\\n \"format\": \"celsius\",\\n \"num_days\": 1\\n}'}}"
|
||||
" 'tool_calls': [{'id': 'call_jdmoJQ4lqsu4mBWcVBYtt5cU',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Toronto, Canada\",\\n \"format\": \"celsius\",\\n \"num_days\": 1\\n}'}}]}"
|
||||
]
|
||||
},
|
||||
"execution_count": 9,
|
||||
@ -400,7 +412,7 @@
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"Give me a weather report for Toronto, Canada.\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions, function_call={\"name\": \"get_n_day_weather_forecast\"}\n",
|
||||
" messages, tools=tools, tool_choice={\"type\": \"function\", \"function\": {\"name\": \"get_n_day_weather_forecast\"}}\n",
|
||||
")\n",
|
||||
"chat_response.json()[\"choices\"][0][\"message\"]\n"
|
||||
]
|
||||
@ -416,8 +428,10 @@
|
||||
"text/plain": [
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': None,\n",
|
||||
" 'function_call': {'name': 'get_current_weather',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Toronto, Canada\",\\n \"format\": \"celsius\"\\n}'}}"
|
||||
" 'tool_calls': [{'id': 'call_RYXaDjxpUCfWmpXU7BZEYVqS',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_current_weather',\n",
|
||||
" 'arguments': '{\\n \"location\": \"Toronto, Canada\",\\n \"format\": \"celsius\"\\n}'}}]}"
|
||||
]
|
||||
},
|
||||
"execution_count": 10,
|
||||
@ -431,7 +445,7 @@
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"Give me a weather report for Toronto, Canada.\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions\n",
|
||||
" messages, tools=tools\n",
|
||||
")\n",
|
||||
"chat_response.json()[\"choices\"][0][\"message\"]\n"
|
||||
]
|
||||
@ -454,7 +468,8 @@
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'role': 'assistant', 'content': 'Sure, let me get that information for you.'}"
|
||||
"{'role': 'assistant',\n",
|
||||
" 'content': '{ \"location\": \"Toronto, Canada\", \"format\": \"celsius\" }'}"
|
||||
]
|
||||
},
|
||||
"execution_count": 11,
|
||||
@ -467,11 +482,56 @@
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"Give me the current weather (use Celcius) for Toronto, Canada.\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, functions=functions, function_call=\"none\"\n",
|
||||
" messages, tools=tools, tool_choice=\"none\"\n",
|
||||
")\n",
|
||||
"chat_response.json()[\"choices\"][0][\"message\"]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Parallel Function Calling\n",
|
||||
"\n",
|
||||
"Newer models like gpt-4-1106-preview or gpt-3.5-turbo-1106 can call multiple functions in one turn."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[{'id': 'call_fLsKR5vGllhbWxvpqsDT3jBj',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\"location\": \"San Francisco, CA\", \"format\": \"celsius\", \"num_days\": 4}'}},\n",
|
||||
" {'id': 'call_CchlsGE8OE03QmeyFbg7pkDz',\n",
|
||||
" 'type': 'function',\n",
|
||||
" 'function': {'name': 'get_n_day_weather_forecast',\n",
|
||||
" 'arguments': '{\"location\": \"Glasgow\", \"format\": \"celsius\", \"num_days\": 4}'}}]"
|
||||
]
|
||||
},
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"messages = []\n",
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"what is the weather going to be like in San Francisco and Glasgow over the next 4 days\"})\n",
|
||||
"chat_response = chat_completion_request(\n",
|
||||
" messages, tools=tools, model='gpt-3.5-turbo-1106'\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"chat_response.json()\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]['tool_calls']\n",
|
||||
"assistant_message"
|
||||
]
|
||||
},
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
@ -498,7 +558,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"execution_count": 13,
|
||||
"id": "30f6b60e",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -519,7 +579,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"execution_count": 14,
|
||||
"id": "abec0214",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
@ -562,7 +622,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"execution_count": 15,
|
||||
"id": "0c0104cd",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
@ -587,30 +647,33 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 16,
|
||||
"id": "0258813a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"functions = [\n",
|
||||
"tools = [\n",
|
||||
" {\n",
|
||||
" \"name\": \"ask_database\",\n",
|
||||
" \"description\": \"Use this function to answer user questions about music. Input should be a fully formed SQL query.\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"query\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": f\"\"\"\n",
|
||||
" SQL query extracting info to answer the user's question.\n",
|
||||
" SQL should be written using this database schema:\n",
|
||||
" {database_schema_string}\n",
|
||||
" The query should be returned in plain text, not in JSON.\n",
|
||||
" \"\"\",\n",
|
||||
" }\n",
|
||||
" \"type\": \"function\",\n",
|
||||
" \"function\": {\n",
|
||||
" \"name\": \"ask_database\",\n",
|
||||
" \"description\": \"Use this function to answer user questions about music. Input should be a fully formed SQL query.\",\n",
|
||||
" \"parameters\": {\n",
|
||||
" \"type\": \"object\",\n",
|
||||
" \"properties\": {\n",
|
||||
" \"query\": {\n",
|
||||
" \"type\": \"string\",\n",
|
||||
" \"description\": f\"\"\"\n",
|
||||
" SQL query extracting info to answer the user's question.\n",
|
||||
" SQL should be written using this database schema:\n",
|
||||
" {database_schema_string}\n",
|
||||
" The query should be returned in plain text, not in JSON.\n",
|
||||
" \"\"\",\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" \"required\": [\"query\"],\n",
|
||||
" },\n",
|
||||
" \"required\": [\"query\"],\n",
|
||||
" },\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
"]"
|
||||
]
|
||||
@ -628,7 +691,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"execution_count": 17,
|
||||
"id": "65585e74",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
@ -642,17 +705,17 @@
|
||||
" return results\n",
|
||||
"\n",
|
||||
"def execute_function_call(message):\n",
|
||||
" if message[\"function_call\"][\"name\"] == \"ask_database\":\n",
|
||||
" query = json.loads(message[\"function_call\"][\"arguments\"])[\"query\"]\n",
|
||||
" if message[\"tool_calls\"][0][\"function\"][\"name\"] == \"ask_database\":\n",
|
||||
" query = json.loads(message[\"tool_calls\"][0][\"function\"][\"arguments\"])[\"query\"]\n",
|
||||
" results = ask_database(conn, query)\n",
|
||||
" else:\n",
|
||||
" results = f\"Error: function {message['function_call']['name']} does not exist\"\n",
|
||||
" results = f\"Error: function {message['tool_calls'][0]['function']['name']} does not exist\"\n",
|
||||
" return results"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"execution_count": 18,
|
||||
"id": "38c55083",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -660,14 +723,14 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"system: Answer user questions by generating SQL queries against the Chinook Music Database.\n",
|
||||
"\n",
|
||||
"user: Hi, who are the top 5 artists by number of tracks?\n",
|
||||
"\n",
|
||||
"assistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT ar.Name, COUNT(t.TrackId) AS NumTracks FROM Artist ar INNER JOIN Album al ON ar.ArtistId = al.ArtistId INNER JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY ar.ArtistId ORDER BY NumTracks DESC LIMIT 5\"\\n}'}\n",
|
||||
"\n",
|
||||
"function (ask_database): [('Iron Maiden', 213), ('U2', 135), ('Led Zeppelin', 114), ('Metallica', 112), ('Lost', 92)]\n",
|
||||
"\n"
|
||||
"\u001b[31msystem: Answer user questions by generating SQL queries against the Chinook Music Database.\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[32muser: Hi, who are the top 5 artists by number of tracks?\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[34massistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT Artist.Name, COUNT(Track.TrackId) AS TrackCount FROM Artist JOIN Album ON Artist.ArtistId = Album.ArtistId JOIN Track ON Album.AlbumId = Track.AlbumId GROUP BY Artist.Name ORDER BY TrackCount DESC LIMIT 5\"\\n}'}\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[35mfunction (ask_database): [('Iron Maiden', 213), ('U2', 135), ('Led Zeppelin', 114), ('Metallica', 112), ('Lost', 92)]\n",
|
||||
"\u001b[0m\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@ -675,18 +738,19 @@
|
||||
"messages = []\n",
|
||||
"messages.append({\"role\": \"system\", \"content\": \"Answer user questions by generating SQL queries against the Chinook Music Database.\"})\n",
|
||||
"messages.append({\"role\": \"user\", \"content\": \"Hi, who are the top 5 artists by number of tracks?\"})\n",
|
||||
"chat_response = chat_completion_request(messages, functions)\n",
|
||||
"chat_response = chat_completion_request(messages, tools)\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]\n",
|
||||
"assistant_message['content'] = str(assistant_message[\"tool_calls\"][0][\"function\"])\n",
|
||||
"messages.append(assistant_message)\n",
|
||||
"if assistant_message.get(\"function_call\"):\n",
|
||||
"if assistant_message.get(\"tool_calls\"):\n",
|
||||
" results = execute_function_call(assistant_message)\n",
|
||||
" messages.append({\"role\": \"function\", \"name\": assistant_message[\"function_call\"][\"name\"], \"content\": results})\n",
|
||||
" messages.append({\"role\": \"tool\", \"tool_call_id\": assistant_message[\"tool_calls\"][0]['id'], \"name\": assistant_message[\"tool_calls\"][0][\"function\"][\"name\"], \"content\": results})\n",
|
||||
"pretty_print_conversation(messages)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"execution_count": 19,
|
||||
"id": "710481dc",
|
||||
"metadata": {
|
||||
"scrolled": true
|
||||
@ -696,31 +760,32 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"system: Answer user questions by generating SQL queries against the Chinook Music Database.\n",
|
||||
"\n",
|
||||
"user: Hi, who are the top 5 artists by number of tracks?\n",
|
||||
"\n",
|
||||
"assistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT ar.Name, COUNT(t.TrackId) AS NumTracks FROM Artist ar INNER JOIN Album al ON ar.ArtistId = al.ArtistId INNER JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY ar.ArtistId ORDER BY NumTracks DESC LIMIT 5\"\\n}'}\n",
|
||||
"\n",
|
||||
"function (ask_database): [('Iron Maiden', 213), ('U2', 135), ('Led Zeppelin', 114), ('Metallica', 112), ('Lost', 92)]\n",
|
||||
"\n",
|
||||
"user: What is the name of the album with the most tracks?\n",
|
||||
"\n",
|
||||
"assistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT al.Title, COUNT(t.TrackId) AS NumTracks FROM Album al INNER JOIN Track t ON al.AlbumId = t.AlbumId GROUP BY al.AlbumId ORDER BY NumTracks DESC LIMIT 1\"\\n}'}\n",
|
||||
"\n",
|
||||
"function (ask_database): [('Greatest Hits', 57)]\n",
|
||||
"\n"
|
||||
"\u001b[31msystem: Answer user questions by generating SQL queries against the Chinook Music Database.\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[32muser: Hi, who are the top 5 artists by number of tracks?\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[34massistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT Artist.Name, COUNT(Track.TrackId) AS TrackCount FROM Artist JOIN Album ON Artist.ArtistId = Album.ArtistId JOIN Track ON Album.AlbumId = Track.AlbumId GROUP BY Artist.Name ORDER BY TrackCount DESC LIMIT 5\"\\n}'}\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[35mfunction (ask_database): [('Iron Maiden', 213), ('U2', 135), ('Led Zeppelin', 114), ('Metallica', 112), ('Lost', 92)]\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[32muser: What is the name of the album with the most tracks?\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[34massistant: {'name': 'ask_database', 'arguments': '{\\n \"query\": \"SELECT Album.Title, COUNT(Track.TrackId) AS TrackCount FROM Album JOIN Track ON Album.AlbumId = Track.AlbumId GROUP BY Album.Title ORDER BY TrackCount DESC LIMIT 1\"\\n}'}\n",
|
||||
"\u001b[0m\n",
|
||||
"\u001b[35mfunction (ask_database): [('Greatest Hits', 57)]\n",
|
||||
"\u001b[0m\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"messages.append({\"role\": \"user\", \"content\": \"What is the name of the album with the most tracks?\"})\n",
|
||||
"chat_response = chat_completion_request(messages, functions)\n",
|
||||
"chat_response = chat_completion_request(messages, tools)\n",
|
||||
"assistant_message = chat_response.json()[\"choices\"][0][\"message\"]\n",
|
||||
"assistant_message['content'] = str(assistant_message[\"tool_calls\"][0][\"function\"])\n",
|
||||
"messages.append(assistant_message)\n",
|
||||
"if assistant_message.get(\"function_call\"):\n",
|
||||
"if assistant_message.get(\"tool_calls\"):\n",
|
||||
" results = execute_function_call(assistant_message)\n",
|
||||
" messages.append({\"role\": \"function\", \"content\": results, \"name\": assistant_message[\"function_call\"][\"name\"]})\n",
|
||||
" messages.append({\"role\": \"tool\", \"tool_call_id\": assistant_message[\"tool_calls\"][0]['id'], \"name\": assistant_message[\"tool_calls\"][0][\"function\"][\"name\"], \"content\": results})\n",
|
||||
"pretty_print_conversation(messages)"
|
||||
]
|
||||
},
|
||||
@ -752,7 +817,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.9"
|
||||
"version": "3.9.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
Loading…
Reference in New Issue
Block a user