mirror of
https://github.com/hwchase17/langchain
synced 2024-10-31 15:20:26 +00:00
215 lines
5.5 KiB
Plaintext
215 lines
5.5 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "2def22ea",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Extraction with OpenAI Tools\n",
|
|
"\n",
|
|
"Performing extraction has never been easier! OpenAI's tool calling ability is the perfect thing to use as it allows for extracting multiple different elements from text that are different types. \n",
|
|
"\n",
|
|
"Models after 1106 use tools and support \"parallel function calling\" which makes this super easy."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 8,
|
|
"id": "5c628496",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"from typing import List, Optional\n",
|
|
"\n",
|
|
"from langchain.chains.openai_tools import create_extraction_chain_pydantic\n",
|
|
"from langchain.chat_models import ChatOpenAI\n",
|
|
"from langchain.pydantic_v1 import BaseModel"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 2,
|
|
"id": "afe9657b",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Make sure to use a recent model that supports tools\n",
|
|
"model = ChatOpenAI(model=\"gpt-3.5-turbo-1106\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 3,
|
|
"id": "bc0ca3b6",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Pydantic is an easy way to define a schema\n",
|
|
"class Person(BaseModel):\n",
|
|
" \"\"\"Information about people to extract.\"\"\"\n",
|
|
"\n",
|
|
" name: str\n",
|
|
" age: Optional[int] = None"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 10,
|
|
"id": "2036af68",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"chain = create_extraction_chain_pydantic(Person, model)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 11,
|
|
"id": "1748ad21",
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"[Person(name='jane', age=2), Person(name='bob', age=3)]"
|
|
]
|
|
},
|
|
"execution_count": 11,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"chain.invoke({\"input\": \"jane is 2 and bob is 3\"})"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 12,
|
|
"id": "c8262ce5",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Let's define another element\n",
|
|
"class Class(BaseModel):\n",
|
|
" \"\"\"Information about classes to extract.\"\"\"\n",
|
|
"\n",
|
|
" teacher: str\n",
|
|
" students: List[str]"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 13,
|
|
"id": "4973c104",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"chain = create_extraction_chain_pydantic([Person, Class], model)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 14,
|
|
"id": "e976a15e",
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"[Person(name='jane', age=2),\n",
|
|
" Person(name='bob', age=3),\n",
|
|
" Class(teacher='Mrs Sampson', students=['jane', 'bob'])]"
|
|
]
|
|
},
|
|
"execution_count": 14,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"chain.invoke({\"input\": \"jane is 2 and bob is 3 and they are in Mrs Sampson's class\"})"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "6575a7d6",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Under the hood\n",
|
|
"\n",
|
|
"Under the hood, this is a simple chain:"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "b8ba83e5",
|
|
"metadata": {},
|
|
"source": [
|
|
"```python\n",
|
|
"from typing import Union, List, Type, Optional\n",
|
|
"\n",
|
|
"from langchain.output_parsers.openai_tools import PydanticToolsParser\n",
|
|
"from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n",
|
|
"from langchain.schema.runnable import Runnable\n",
|
|
"from langchain.pydantic_v1 import BaseModel\n",
|
|
"from langchain.prompts import ChatPromptTemplate\n",
|
|
"from langchain.schema.messages import SystemMessage\n",
|
|
"from langchain.schema.language_model import BaseLanguageModel\n",
|
|
"\n",
|
|
"_EXTRACTION_TEMPLATE = \"\"\"Extract and save the relevant entities mentioned \\\n",
|
|
"in the following passage together with their properties.\n",
|
|
"\n",
|
|
"If a property is not present and is not required in the function parameters, do not include it in the output.\"\"\" # noqa: E501\n",
|
|
"\n",
|
|
"\n",
|
|
"def create_extraction_chain_pydantic(\n",
|
|
" pydantic_schemas: Union[List[Type[BaseModel]], Type[BaseModel]],\n",
|
|
" llm: BaseLanguageModel,\n",
|
|
" system_message: str = _EXTRACTION_TEMPLATE,\n",
|
|
") -> Runnable:\n",
|
|
" if not isinstance(pydantic_schemas, list):\n",
|
|
" pydantic_schemas = [pydantic_schemas]\n",
|
|
" prompt = ChatPromptTemplate.from_messages([\n",
|
|
" (\"system\", system_message),\n",
|
|
" (\"user\", \"{input}\")\n",
|
|
" ])\n",
|
|
" tools = [convert_pydantic_to_openai_tool(p) for p in pydantic_schemas]\n",
|
|
" model = llm.bind(tools=tools)\n",
|
|
" chain = prompt | model | PydanticToolsParser(tools=pydantic_schemas)\n",
|
|
" return chain\n",
|
|
"```"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "2eac6b68",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": []
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3 (ipykernel)",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.9.1"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 5
|
|
}
|