mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
ed58eeb9c5
Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
91 lines
3.0 KiB
Python
91 lines
3.0 KiB
Python
from __future__ import annotations
|
|
|
|
from typing import List, Optional
|
|
|
|
import aiohttp
|
|
import requests
|
|
from langchain_core.callbacks import (
|
|
AsyncCallbackManagerForRetrieverRun,
|
|
CallbackManagerForRetrieverRun,
|
|
)
|
|
from langchain_core.documents import Document
|
|
from langchain_core.retrievers import BaseRetriever
|
|
|
|
|
|
class ChatGPTPluginRetriever(BaseRetriever):
|
|
"""`ChatGPT plugin` retriever."""
|
|
|
|
url: str
|
|
"""URL of the ChatGPT plugin."""
|
|
bearer_token: str
|
|
"""Bearer token for the ChatGPT plugin."""
|
|
top_k: int = 3
|
|
"""Number of documents to return."""
|
|
filter: Optional[dict] = None
|
|
"""Filter to apply to the results."""
|
|
aiosession: Optional[aiohttp.ClientSession] = None
|
|
"""Aiohttp session to use for requests."""
|
|
|
|
class Config:
|
|
"""Configuration for this pydantic object."""
|
|
|
|
arbitrary_types_allowed = True
|
|
"""Allow arbitrary types."""
|
|
|
|
def _get_relevant_documents(
|
|
self, query: str, *, run_manager: CallbackManagerForRetrieverRun
|
|
) -> List[Document]:
|
|
url, json, headers = self._create_request(query)
|
|
response = requests.post(url, json=json, headers=headers)
|
|
results = response.json()["results"][0]["results"]
|
|
docs = []
|
|
for d in results:
|
|
content = d.pop("text")
|
|
metadata = d.pop("metadata", d)
|
|
if metadata.get("source_id"):
|
|
metadata["source"] = metadata.pop("source_id")
|
|
docs.append(Document(page_content=content, metadata=metadata))
|
|
return docs
|
|
|
|
async def _aget_relevant_documents(
|
|
self, query: str, *, run_manager: AsyncCallbackManagerForRetrieverRun
|
|
) -> List[Document]:
|
|
url, json, headers = self._create_request(query)
|
|
|
|
if not self.aiosession:
|
|
async with aiohttp.ClientSession() as session:
|
|
async with session.post(url, headers=headers, json=json) as response:
|
|
res = await response.json()
|
|
else:
|
|
async with self.aiosession.post(
|
|
url, headers=headers, json=json
|
|
) as response:
|
|
res = await response.json()
|
|
|
|
results = res["results"][0]["results"]
|
|
docs = []
|
|
for d in results:
|
|
content = d.pop("text")
|
|
metadata = d.pop("metadata", d)
|
|
if metadata.get("source_id"):
|
|
metadata["source"] = metadata.pop("source_id")
|
|
docs.append(Document(page_content=content, metadata=metadata))
|
|
return docs
|
|
|
|
def _create_request(self, query: str) -> tuple[str, dict, dict]:
|
|
url = f"{self.url}/query"
|
|
json = {
|
|
"queries": [
|
|
{
|
|
"query": query,
|
|
"filter": self.filter,
|
|
"top_k": self.top_k,
|
|
}
|
|
]
|
|
}
|
|
headers = {
|
|
"Content-Type": "application/json",
|
|
"Authorization": f"Bearer {self.bearer_token}",
|
|
}
|
|
return url, json, headers
|