mirror of
https://github.com/hwchase17/langchain
synced 2024-11-18 09:25:54 +00:00
9ffca3b92a
Update imports to use core for the low-hanging fruit changes. Ran following ```bash git grep -l 'langchain.schema.runnable' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.runnable/langchain_core.runnables/g' git grep -l 'langchain.schema.output_parser' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.output_parser/langchain_core.output_parsers/g' git grep -l 'langchain.schema.messages' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.messages/langchain_core.messages/g' git grep -l 'langchain.schema.chat_histry' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.chat_history/langchain_core.chat_history/g' git grep -l 'langchain.schema.prompt_template' {docs,templates,cookbook} | xargs sed -i '' 's/langchain\.schema\.prompt_template/langchain_core.prompts/g' git grep -l 'from langchain.pydantic_v1' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.pydantic_v1/from langchain_core.pydantic_v1/g' git grep -l 'from langchain.tools.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.tools\.base/from langchain_core.tools/g' git grep -l 'from langchain.chat_models.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.chat_models.base/from langchain_core.language_models.chat_models/g' git grep -l 'from langchain.llms.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.llms\.base\ /from langchain_core.language_models.llms\ /g' git grep -l 'from langchain.embeddings.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.embeddings\.base/from langchain_core.embeddings/g' git grep -l 'from langchain.vectorstores.base' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.vectorstores\.base/from langchain_core.vectorstores/g' git grep -l 'from langchain.agents.tools' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.agents\.tools/from langchain_core.tools/g' git grep -l 'from langchain.schema.output' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.output\ /from langchain_core.outputs\ /g' git grep -l 'from langchain.schema.embeddings' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.embeddings/from langchain_core.embeddings/g' git grep -l 'from langchain.schema.document' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.document/from langchain_core.documents/g' git grep -l 'from langchain.schema.agent' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.agent/from langchain_core.agents/g' git grep -l 'from langchain.schema.prompt ' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.prompt\ /from langchain_core.prompt_values /g' git grep -l 'from langchain.schema.language_model' {docs,templates,cookbook} | xargs sed -i '' 's/from langchain\.schema\.language_model/from langchain_core.language_models/g' ```
215 lines
5.5 KiB
Plaintext
215 lines
5.5 KiB
Plaintext
{
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "2def22ea",
|
|
"metadata": {},
|
|
"source": [
|
|
"# Extraction with OpenAI Tools\n",
|
|
"\n",
|
|
"Performing extraction has never been easier! OpenAI's tool calling ability is the perfect thing to use as it allows for extracting multiple different elements from text that are different types. \n",
|
|
"\n",
|
|
"Models after 1106 use tools and support \"parallel function calling\" which makes this super easy."
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 8,
|
|
"id": "5c628496",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"from typing import List, Optional\n",
|
|
"\n",
|
|
"from langchain.chains.openai_tools import create_extraction_chain_pydantic\n",
|
|
"from langchain.chat_models import ChatOpenAI\n",
|
|
"from langchain_core.pydantic_v1 import BaseModel"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 2,
|
|
"id": "afe9657b",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Make sure to use a recent model that supports tools\n",
|
|
"model = ChatOpenAI(model=\"gpt-3.5-turbo-1106\")"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 3,
|
|
"id": "bc0ca3b6",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Pydantic is an easy way to define a schema\n",
|
|
"class Person(BaseModel):\n",
|
|
" \"\"\"Information about people to extract.\"\"\"\n",
|
|
"\n",
|
|
" name: str\n",
|
|
" age: Optional[int] = None"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 10,
|
|
"id": "2036af68",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"chain = create_extraction_chain_pydantic(Person, model)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 11,
|
|
"id": "1748ad21",
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"[Person(name='jane', age=2), Person(name='bob', age=3)]"
|
|
]
|
|
},
|
|
"execution_count": 11,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"chain.invoke({\"input\": \"jane is 2 and bob is 3\"})"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 12,
|
|
"id": "c8262ce5",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"# Let's define another element\n",
|
|
"class Class(BaseModel):\n",
|
|
" \"\"\"Information about classes to extract.\"\"\"\n",
|
|
"\n",
|
|
" teacher: str\n",
|
|
" students: List[str]"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 13,
|
|
"id": "4973c104",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": [
|
|
"chain = create_extraction_chain_pydantic([Person, Class], model)"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": 14,
|
|
"id": "e976a15e",
|
|
"metadata": {},
|
|
"outputs": [
|
|
{
|
|
"data": {
|
|
"text/plain": [
|
|
"[Person(name='jane', age=2),\n",
|
|
" Person(name='bob', age=3),\n",
|
|
" Class(teacher='Mrs Sampson', students=['jane', 'bob'])]"
|
|
]
|
|
},
|
|
"execution_count": 14,
|
|
"metadata": {},
|
|
"output_type": "execute_result"
|
|
}
|
|
],
|
|
"source": [
|
|
"chain.invoke({\"input\": \"jane is 2 and bob is 3 and they are in Mrs Sampson's class\"})"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "6575a7d6",
|
|
"metadata": {},
|
|
"source": [
|
|
"## Under the hood\n",
|
|
"\n",
|
|
"Under the hood, this is a simple chain:"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"id": "b8ba83e5",
|
|
"metadata": {},
|
|
"source": [
|
|
"```python\n",
|
|
"from typing import Union, List, Type, Optional\n",
|
|
"\n",
|
|
"from langchain.output_parsers.openai_tools import PydanticToolsParser\n",
|
|
"from langchain.utils.openai_functions import convert_pydantic_to_openai_tool\n",
|
|
"from langchain_core.runnables import Runnable\n",
|
|
"from langchain_core.pydantic_v1 import BaseModel\n",
|
|
"from langchain.prompts import ChatPromptTemplate\n",
|
|
"from langchain_core.messages import SystemMessage\n",
|
|
"from langchain_core.language_models import BaseLanguageModel\n",
|
|
"\n",
|
|
"_EXTRACTION_TEMPLATE = \"\"\"Extract and save the relevant entities mentioned \\\n",
|
|
"in the following passage together with their properties.\n",
|
|
"\n",
|
|
"If a property is not present and is not required in the function parameters, do not include it in the output.\"\"\" # noqa: E501\n",
|
|
"\n",
|
|
"\n",
|
|
"def create_extraction_chain_pydantic(\n",
|
|
" pydantic_schemas: Union[List[Type[BaseModel]], Type[BaseModel]],\n",
|
|
" llm: BaseLanguageModel,\n",
|
|
" system_message: str = _EXTRACTION_TEMPLATE,\n",
|
|
") -> Runnable:\n",
|
|
" if not isinstance(pydantic_schemas, list):\n",
|
|
" pydantic_schemas = [pydantic_schemas]\n",
|
|
" prompt = ChatPromptTemplate.from_messages([\n",
|
|
" (\"system\", system_message),\n",
|
|
" (\"user\", \"{input}\")\n",
|
|
" ])\n",
|
|
" tools = [convert_pydantic_to_openai_tool(p) for p in pydantic_schemas]\n",
|
|
" model = llm.bind(tools=tools)\n",
|
|
" chain = prompt | model | PydanticToolsParser(tools=pydantic_schemas)\n",
|
|
" return chain\n",
|
|
"```"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"execution_count": null,
|
|
"id": "2eac6b68",
|
|
"metadata": {},
|
|
"outputs": [],
|
|
"source": []
|
|
}
|
|
],
|
|
"metadata": {
|
|
"kernelspec": {
|
|
"display_name": "Python 3 (ipykernel)",
|
|
"language": "python",
|
|
"name": "python3"
|
|
},
|
|
"language_info": {
|
|
"codemirror_mode": {
|
|
"name": "ipython",
|
|
"version": 3
|
|
},
|
|
"file_extension": ".py",
|
|
"mimetype": "text/x-python",
|
|
"name": "python",
|
|
"nbconvert_exporter": "python",
|
|
"pygments_lexer": "ipython3",
|
|
"version": "3.9.1"
|
|
}
|
|
},
|
|
"nbformat": 4,
|
|
"nbformat_minor": 5
|
|
}
|