forked from Archives/langchain
Fix class promotion (#6187)
In LangChain, all module classes are enumerated in the `__init__.py` file of the correspondent module. But some classes were missed and were not included in the module `__init__.py` This PR: - added the missed classes to the module `__init__.py` files - `__init__.py:__all_` variable value (a list of the class names) was sorted - `langchain.tools.sql_database.tool.QueryCheckerTool` was renamed into the `QuerySQLCheckerTool` because it conflicted with `langchain.tools.spark_sql.tool.QueryCheckerTool` - changes to `pyproject.toml`: - added `pgvector` to `pyproject.toml:extended_testing` - added `pandas` to `pyproject.toml:[tool.poetry.group.test.dependencies]` - commented out the `streamlit` from `collbacks/__init__.py`, It is because now the `streamlit` requires Python >=3.7, !=3.9.7 - fixed duplicate names in `tools` - fixed correspondent ut-s #### Who can review? @hwchase17 @dev2049
This commit is contained in:
parent
c0c2fd0782
commit
c7ca350cd3
@ -31,6 +31,7 @@ from langchain.agents.load_tools import (
|
||||
)
|
||||
from langchain.agents.loading import load_agent
|
||||
from langchain.agents.mrkl.base import MRKLChain, ZeroShotAgent
|
||||
from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
|
||||
from langchain.agents.react.base import ReActChain, ReActTextWorldAgent
|
||||
from langchain.agents.self_ask_with_search.base import SelfAskWithSearchChain
|
||||
from langchain.agents.structured_chat.base import StructuredChatAgent
|
||||
@ -47,6 +48,7 @@ __all__ = [
|
||||
"ConversationalChatAgent",
|
||||
"LLMSingleActionAgent",
|
||||
"MRKLChain",
|
||||
"OpenAIFunctionsAgent",
|
||||
"ReActChain",
|
||||
"ReActTextWorldAgent",
|
||||
"SelfAskWithSearchChain",
|
||||
|
@ -10,7 +10,7 @@ from langchain.tools import BaseTool
|
||||
from langchain.tools.sql_database.tool import (
|
||||
InfoSQLDatabaseTool,
|
||||
ListSQLDatabaseTool,
|
||||
QueryCheckerTool,
|
||||
QuerySQLCheckerTool,
|
||||
QuerySQLDataBaseTool,
|
||||
)
|
||||
|
||||
@ -55,5 +55,5 @@ class SQLDatabaseToolkit(BaseToolkit):
|
||||
db=self.db, description=info_sql_database_tool_description
|
||||
),
|
||||
ListSQLDatabaseTool(db=self.db),
|
||||
QueryCheckerTool(db=self.db, llm=self.llm),
|
||||
QuerySQLCheckerTool(db=self.db, llm=self.llm),
|
||||
]
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
from langchain.callbacks.aim_callback import AimCallbackHandler
|
||||
from langchain.callbacks.argilla_callback import ArgillaCallbackHandler
|
||||
from langchain.callbacks.arize_callback import ArizeCallbackHandler
|
||||
from langchain.callbacks.clearml_callback import ClearMLCallbackHandler
|
||||
from langchain.callbacks.comet_ml_callback import CometCallbackHandler
|
||||
from langchain.callbacks.file import FileCallbackHandler
|
||||
@ -15,23 +16,35 @@ from langchain.callbacks.mlflow_callback import MlflowCallbackHandler
|
||||
from langchain.callbacks.openai_info import OpenAICallbackHandler
|
||||
from langchain.callbacks.stdout import StdOutCallbackHandler
|
||||
from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler
|
||||
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
||||
from langchain.callbacks.streaming_stdout_final_only import (
|
||||
FinalStreamingStdOutCallbackHandler,
|
||||
)
|
||||
|
||||
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out here.
|
||||
# from langchain.callbacks.streamlit import StreamlitCallbackHandler
|
||||
from langchain.callbacks.wandb_callback import WandbCallbackHandler
|
||||
from langchain.callbacks.whylabs_callback import WhyLabsCallbackHandler
|
||||
|
||||
__all__ = [
|
||||
"ArgillaCallbackHandler",
|
||||
"OpenAICallbackHandler",
|
||||
"StdOutCallbackHandler",
|
||||
"FileCallbackHandler",
|
||||
"AimCallbackHandler",
|
||||
"WandbCallbackHandler",
|
||||
"MlflowCallbackHandler",
|
||||
"ArgillaCallbackHandler",
|
||||
"ArizeCallbackHandler",
|
||||
"AsyncIteratorCallbackHandler",
|
||||
"ClearMLCallbackHandler",
|
||||
"CometCallbackHandler",
|
||||
"FileCallbackHandler",
|
||||
"FinalStreamingStdOutCallbackHandler",
|
||||
"HumanApprovalCallbackHandler",
|
||||
"MlflowCallbackHandler",
|
||||
"OpenAICallbackHandler",
|
||||
"StdOutCallbackHandler",
|
||||
"StreamingStdOutCallbackHandler",
|
||||
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out here.
|
||||
# "StreamlitCallbackHandler",
|
||||
"WandbCallbackHandler",
|
||||
"WhyLabsCallbackHandler",
|
||||
"AsyncIteratorCallbackHandler",
|
||||
"get_openai_callback",
|
||||
"tracing_enabled",
|
||||
"wandb_tracing_enabled",
|
||||
"HumanApprovalCallbackHandler",
|
||||
]
|
||||
|
@ -22,6 +22,7 @@ from langchain.chains.llm_summarization_checker.base import LLMSummarizationChec
|
||||
from langchain.chains.loading import load_chain
|
||||
from langchain.chains.mapreduce import MapReduceChain
|
||||
from langchain.chains.moderation import OpenAIModerationChain
|
||||
from langchain.chains.natbot.base import NatBotChain
|
||||
from langchain.chains.openai_functions import (
|
||||
create_extraction_chain,
|
||||
create_extraction_chain_pydantic,
|
||||
@ -34,6 +35,13 @@ from langchain.chains.qa_with_sources.base import QAWithSourcesChain
|
||||
from langchain.chains.qa_with_sources.retrieval import RetrievalQAWithSourcesChain
|
||||
from langchain.chains.qa_with_sources.vector_db import VectorDBQAWithSourcesChain
|
||||
from langchain.chains.retrieval_qa.base import RetrievalQA, VectorDBQA
|
||||
from langchain.chains.router import (
|
||||
LLMRouterChain,
|
||||
MultiPromptChain,
|
||||
MultiRetrievalQAChain,
|
||||
MultiRouteChain,
|
||||
RouterChain,
|
||||
)
|
||||
from langchain.chains.sequential import SequentialChain, SimpleSequentialChain
|
||||
from langchain.chains.sql_database.base import (
|
||||
SQLDatabaseChain,
|
||||
@ -42,41 +50,47 @@ from langchain.chains.sql_database.base import (
|
||||
from langchain.chains.transform import TransformChain
|
||||
|
||||
__all__ = [
|
||||
"ConversationChain",
|
||||
"LLMChain",
|
||||
"LLMBashChain",
|
||||
"LLMCheckerChain",
|
||||
"LLMSummarizationCheckerChain",
|
||||
"LLMMathChain",
|
||||
"PALChain",
|
||||
"QAWithSourcesChain",
|
||||
"SQLDatabaseChain",
|
||||
"SequentialChain",
|
||||
"SimpleSequentialChain",
|
||||
"VectorDBQA",
|
||||
"VectorDBQAWithSourcesChain",
|
||||
"APIChain",
|
||||
"LLMRequestsChain",
|
||||
"TransformChain",
|
||||
"MapReduceChain",
|
||||
"OpenAIModerationChain",
|
||||
"SQLDatabaseSequentialChain",
|
||||
"load_chain",
|
||||
"AnalyzeDocumentChain",
|
||||
"HypotheticalDocumentEmbedder",
|
||||
"ChatVectorDBChain",
|
||||
"GraphQAChain",
|
||||
"GraphCypherQAChain",
|
||||
"ConstitutionalChain",
|
||||
"ConversationChain",
|
||||
"ConversationalRetrievalChain",
|
||||
"FlareChain",
|
||||
"GraphCypherQAChain",
|
||||
"GraphQAChain",
|
||||
"HypotheticalDocumentEmbedder",
|
||||
"LLMBashChain",
|
||||
"LLMChain",
|
||||
"LLMCheckerChain",
|
||||
"LLMMathChain",
|
||||
"LLMRequestsChain",
|
||||
"LLMRouterChain",
|
||||
"LLMSummarizationCheckerChain",
|
||||
"MapReduceChain",
|
||||
"MultiPromptChain",
|
||||
"MultiRetrievalQAChain",
|
||||
"MultiRouteChain",
|
||||
"NatBotChain",
|
||||
"NebulaGraphQAChain",
|
||||
"OpenAIModerationChain",
|
||||
"OpenAPIEndpointChain",
|
||||
"PALChain",
|
||||
"QAGenerationChain",
|
||||
"QAWithSourcesChain",
|
||||
"RetrievalQA",
|
||||
"RetrievalQAWithSourcesChain",
|
||||
"ConversationalRetrievalChain",
|
||||
"OpenAPIEndpointChain",
|
||||
"FlareChain",
|
||||
"NebulaGraphQAChain",
|
||||
"RouterChain",
|
||||
"SQLDatabaseChain",
|
||||
"SQLDatabaseSequentialChain",
|
||||
"SequentialChain",
|
||||
"SimpleSequentialChain",
|
||||
"TransformChain",
|
||||
"VectorDBQA",
|
||||
"VectorDBQAWithSourcesChain",
|
||||
"create_extraction_chain",
|
||||
"create_tagging_chain",
|
||||
"create_extraction_chain_pydantic",
|
||||
"create_tagging_chain",
|
||||
"create_tagging_chain_pydantic",
|
||||
"load_chain",
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Wrappers on top of docstores."""
|
||||
from langchain.docstore.arbitrary_fn import DocstoreFn
|
||||
from langchain.docstore.in_memory import InMemoryDocstore
|
||||
from langchain.docstore.wikipedia import Wikipedia
|
||||
|
||||
__all__ = ["InMemoryDocstore", "Wikipedia"]
|
||||
__all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]
|
||||
|
@ -16,6 +16,12 @@ from langchain.document_loaders.bibtex import BibtexLoader
|
||||
from langchain.document_loaders.bigquery import BigQueryLoader
|
||||
from langchain.document_loaders.bilibili import BiliBiliLoader
|
||||
from langchain.document_loaders.blackboard import BlackboardLoader
|
||||
from langchain.document_loaders.blob_loaders import (
|
||||
Blob,
|
||||
BlobLoader,
|
||||
FileSystemBlobLoader,
|
||||
YoutubeAudioLoader,
|
||||
)
|
||||
from langchain.document_loaders.blockchain import BlockchainDocumentLoader
|
||||
from langchain.document_loaders.chatgpt import ChatGPTLoader
|
||||
from langchain.document_loaders.college_confidential import CollegeConfidentialLoader
|
||||
@ -150,6 +156,8 @@ __all__ = [
|
||||
"BigQueryLoader",
|
||||
"BiliBiliLoader",
|
||||
"BlackboardLoader",
|
||||
"Blob",
|
||||
"BlobLoader",
|
||||
"BlockchainDocumentLoader",
|
||||
"CSVLoader",
|
||||
"ChatGPTLoader",
|
||||
@ -163,10 +171,13 @@ __all__ = [
|
||||
"DocugamiLoader",
|
||||
"Docx2txtLoader",
|
||||
"DuckDBLoader",
|
||||
"FaunaLoader",
|
||||
"EmbaasBlobLoader",
|
||||
"EmbaasLoader",
|
||||
"EverNoteLoader",
|
||||
"FacebookChatLoader",
|
||||
"FaunaLoader",
|
||||
"FigmaFileLoader",
|
||||
"FileSystemBlobLoader",
|
||||
"GCSDirectoryLoader",
|
||||
"GCSFileLoader",
|
||||
"GitHubIssuesLoader",
|
||||
@ -194,8 +205,8 @@ __all__ = [
|
||||
"NotionDBLoader",
|
||||
"NotionDirectoryLoader",
|
||||
"ObsidianLoader",
|
||||
"OneDriveLoader",
|
||||
"OneDriveFileLoader",
|
||||
"OneDriveLoader",
|
||||
"OnlinePDFLoader",
|
||||
"OutlookMessageLoader",
|
||||
"PDFMinerLoader",
|
||||
@ -219,6 +230,7 @@ __all__ = [
|
||||
"SeleniumURLLoader",
|
||||
"SitemapLoader",
|
||||
"SlackDirectoryLoader",
|
||||
"SnowflakeLoader",
|
||||
"SpreedlyLoader",
|
||||
"StripeLoader",
|
||||
"TelegramChatApiLoader",
|
||||
@ -251,8 +263,6 @@ __all__ = [
|
||||
"WebBaseLoader",
|
||||
"WhatsAppChatLoader",
|
||||
"WikipediaLoader",
|
||||
"YoutubeAudioLoader",
|
||||
"YoutubeLoader",
|
||||
"SnowflakeLoader",
|
||||
"EmbaasLoader",
|
||||
"EmbaasBlobLoader",
|
||||
]
|
||||
|
@ -27,6 +27,7 @@ from langchain.llms.huggingface_pipeline import HuggingFacePipeline
|
||||
from langchain.llms.huggingface_text_gen_inference import HuggingFaceTextGenInference
|
||||
from langchain.llms.human import HumanInputLLM
|
||||
from langchain.llms.llamacpp import LlamaCpp
|
||||
from langchain.llms.manifest import ManifestWrapper
|
||||
from langchain.llms.modal import Modal
|
||||
from langchain.llms.mosaicml import MosaicML
|
||||
from langchain.llms.nlpcloud import NLPCloud
|
||||
@ -47,25 +48,34 @@ from langchain.llms.vertexai import VertexAI
|
||||
from langchain.llms.writer import Writer
|
||||
|
||||
__all__ = [
|
||||
"Anthropic",
|
||||
"AI21",
|
||||
"AlephAlpha",
|
||||
"Anthropic",
|
||||
"Anyscale",
|
||||
"Aviary",
|
||||
"AzureOpenAI",
|
||||
"Banana",
|
||||
"Baseten",
|
||||
"Beam",
|
||||
"Bedrock",
|
||||
"CTransformers",
|
||||
"CerebriumAI",
|
||||
"Cohere",
|
||||
"CTransformers",
|
||||
"Databricks",
|
||||
"DeepInfra",
|
||||
"FakeListLLM",
|
||||
"ForefrontAI",
|
||||
"GPT4All",
|
||||
"GooglePalm",
|
||||
"GooseAI",
|
||||
"GPT4All",
|
||||
"HuggingFaceEndpoint",
|
||||
"HuggingFaceHub",
|
||||
"HuggingFacePipeline",
|
||||
"HuggingFaceTextGenInference",
|
||||
"HumanInputLLM",
|
||||
"LlamaCpp",
|
||||
"TextGen",
|
||||
"ManifestWrapper",
|
||||
"Modal",
|
||||
"MosaicML",
|
||||
"NLPCloud",
|
||||
@ -74,25 +84,17 @@ __all__ = [
|
||||
"OpenLM",
|
||||
"Petals",
|
||||
"PipelineAI",
|
||||
"HuggingFaceEndpoint",
|
||||
"HuggingFaceHub",
|
||||
"SagemakerEndpoint",
|
||||
"HuggingFacePipeline",
|
||||
"AI21",
|
||||
"AzureOpenAI",
|
||||
"Replicate",
|
||||
"SelfHostedPipeline",
|
||||
"SelfHostedHuggingFaceLLM",
|
||||
"PredictionGuard",
|
||||
"PromptLayerOpenAI",
|
||||
"PromptLayerOpenAIChat",
|
||||
"StochasticAI",
|
||||
"Writer",
|
||||
"RWKV",
|
||||
"PredictionGuard",
|
||||
"HumanInputLLM",
|
||||
"HuggingFaceTextGenInference",
|
||||
"FakeListLLM",
|
||||
"Replicate",
|
||||
"SagemakerEndpoint",
|
||||
"SelfHostedHuggingFaceLLM",
|
||||
"SelfHostedPipeline",
|
||||
"StochasticAI",
|
||||
"VertexAI",
|
||||
"Writer",
|
||||
]
|
||||
|
||||
type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||
@ -101,6 +103,7 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||
"anthropic": Anthropic,
|
||||
"anyscale": Anyscale,
|
||||
"aviary": Aviary,
|
||||
"azure": AzureOpenAI,
|
||||
"bananadev": Banana,
|
||||
"baseten": Baseten,
|
||||
"beam": Beam,
|
||||
@ -109,32 +112,31 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||
"ctransformers": CTransformers,
|
||||
"databricks": Databricks,
|
||||
"deepinfra": DeepInfra,
|
||||
"fake-list": FakeListLLM,
|
||||
"forefrontai": ForefrontAI,
|
||||
"google_palm": GooglePalm,
|
||||
"gooseai": GooseAI,
|
||||
"gpt4all": GPT4All,
|
||||
"huggingface_hub": HuggingFaceHub,
|
||||
"huggingface_endpoint": HuggingFaceEndpoint,
|
||||
"huggingface_hub": HuggingFaceHub,
|
||||
"huggingface_pipeline": HuggingFacePipeline,
|
||||
"huggingface_textgen_inference": HuggingFaceTextGenInference,
|
||||
"human-input": HumanInputLLM,
|
||||
"llamacpp": LlamaCpp,
|
||||
"textgen": TextGen,
|
||||
"modal": Modal,
|
||||
"mosaic": MosaicML,
|
||||
"sagemaker_endpoint": SagemakerEndpoint,
|
||||
"nlpcloud": NLPCloud,
|
||||
"human-input": HumanInputLLM,
|
||||
"openai": OpenAI,
|
||||
"openlm": OpenLM,
|
||||
"petals": Petals,
|
||||
"pipelineai": PipelineAI,
|
||||
"huggingface_pipeline": HuggingFacePipeline,
|
||||
"azure": AzureOpenAI,
|
||||
"replicate": Replicate,
|
||||
"rwkv": RWKV,
|
||||
"sagemaker_endpoint": SagemakerEndpoint,
|
||||
"self_hosted": SelfHostedPipeline,
|
||||
"self_hosted_hugging_face": SelfHostedHuggingFaceLLM,
|
||||
"stochasticai": StochasticAI,
|
||||
"writer": Writer,
|
||||
"rwkv": RWKV,
|
||||
"huggingface_textgen_inference": HuggingFaceTextGenInference,
|
||||
"fake-list": FakeListLLM,
|
||||
"vertexai": VertexAI,
|
||||
"writer": Writer,
|
||||
}
|
||||
|
@ -3,17 +3,19 @@ from langchain.memory.buffer import (
|
||||
ConversationStringBufferMemory,
|
||||
)
|
||||
from langchain.memory.buffer_window import ConversationBufferWindowMemory
|
||||
from langchain.memory.chat_message_histories import MomentoChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.cassandra import (
|
||||
from langchain.memory.chat_message_histories import (
|
||||
CassandraChatMessageHistory,
|
||||
ChatMessageHistory,
|
||||
CosmosDBChatMessageHistory,
|
||||
DynamoDBChatMessageHistory,
|
||||
FileChatMessageHistory,
|
||||
MomentoChatMessageHistory,
|
||||
MongoDBChatMessageHistory,
|
||||
PostgresChatMessageHistory,
|
||||
RedisChatMessageHistory,
|
||||
SQLChatMessageHistory,
|
||||
ZepChatMessageHistory,
|
||||
)
|
||||
from langchain.memory.chat_message_histories.cosmos_db import CosmosDBChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.dynamodb import DynamoDBChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.file import FileChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.redis import RedisChatMessageHistory
|
||||
from langchain.memory.combined import CombinedMemory
|
||||
from langchain.memory.entity import (
|
||||
ConversationEntityMemory,
|
||||
@ -22,6 +24,7 @@ from langchain.memory.entity import (
|
||||
SQLiteEntityStore,
|
||||
)
|
||||
from langchain.memory.kg import ConversationKGMemory
|
||||
from langchain.memory.motorhead_memory import MotorheadMemory
|
||||
from langchain.memory.readonly import ReadOnlySharedMemory
|
||||
from langchain.memory.simple import SimpleMemory
|
||||
from langchain.memory.summary import ConversationSummaryMemory
|
||||
@ -30,28 +33,31 @@ from langchain.memory.token_buffer import ConversationTokenBufferMemory
|
||||
from langchain.memory.vectorstore import VectorStoreRetrieverMemory
|
||||
|
||||
__all__ = [
|
||||
"CombinedMemory",
|
||||
"ConversationBufferWindowMemory",
|
||||
"ConversationBufferMemory",
|
||||
"SimpleMemory",
|
||||
"ConversationSummaryBufferMemory",
|
||||
"ConversationKGMemory",
|
||||
"ConversationEntityMemory",
|
||||
"InMemoryEntityStore",
|
||||
"RedisEntityStore",
|
||||
"SQLiteEntityStore",
|
||||
"ConversationSummaryMemory",
|
||||
"ChatMessageHistory",
|
||||
"ConversationStringBufferMemory",
|
||||
"ReadOnlySharedMemory",
|
||||
"ConversationTokenBufferMemory",
|
||||
"RedisChatMessageHistory",
|
||||
"DynamoDBChatMessageHistory",
|
||||
"PostgresChatMessageHistory",
|
||||
"VectorStoreRetrieverMemory",
|
||||
"CosmosDBChatMessageHistory",
|
||||
"FileChatMessageHistory",
|
||||
"MongoDBChatMessageHistory",
|
||||
"CassandraChatMessageHistory",
|
||||
"ChatMessageHistory",
|
||||
"CombinedMemory",
|
||||
"ConversationBufferMemory",
|
||||
"ConversationBufferWindowMemory",
|
||||
"ConversationEntityMemory",
|
||||
"ConversationKGMemory",
|
||||
"ConversationStringBufferMemory",
|
||||
"ConversationSummaryBufferMemory",
|
||||
"ConversationSummaryMemory",
|
||||
"ConversationTokenBufferMemory",
|
||||
"CosmosDBChatMessageHistory",
|
||||
"DynamoDBChatMessageHistory",
|
||||
"FileChatMessageHistory",
|
||||
"InMemoryEntityStore",
|
||||
"MomentoChatMessageHistory",
|
||||
"MongoDBChatMessageHistory",
|
||||
"MotorheadMemory",
|
||||
"PostgresChatMessageHistory",
|
||||
"ReadOnlySharedMemory",
|
||||
"RedisChatMessageHistory",
|
||||
"RedisEntityStore",
|
||||
"SQLChatMessageHistory",
|
||||
"SQLiteEntityStore",
|
||||
"SimpleMemory",
|
||||
"VectorStoreRetrieverMemory",
|
||||
"ZepChatMessageHistory",
|
||||
]
|
||||
|
@ -7,6 +7,7 @@ from langchain.memory.chat_message_histories.file import FileChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.firestore import (
|
||||
FirestoreChatMessageHistory,
|
||||
)
|
||||
from langchain.memory.chat_message_histories.in_memory import ChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.momento import MomentoChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.mongodb import MongoDBChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory
|
||||
@ -15,15 +16,16 @@ from langchain.memory.chat_message_histories.sql import SQLChatMessageHistory
|
||||
from langchain.memory.chat_message_histories.zep import ZepChatMessageHistory
|
||||
|
||||
__all__ = [
|
||||
"DynamoDBChatMessageHistory",
|
||||
"RedisChatMessageHistory",
|
||||
"PostgresChatMessageHistory",
|
||||
"SQLChatMessageHistory",
|
||||
"FileChatMessageHistory",
|
||||
"CosmosDBChatMessageHistory",
|
||||
"FirestoreChatMessageHistory",
|
||||
"MongoDBChatMessageHistory",
|
||||
"ChatMessageHistory",
|
||||
"CassandraChatMessageHistory",
|
||||
"ZepChatMessageHistory",
|
||||
"CosmosDBChatMessageHistory",
|
||||
"DynamoDBChatMessageHistory",
|
||||
"FileChatMessageHistory",
|
||||
"FirestoreChatMessageHistory",
|
||||
"MomentoChatMessageHistory",
|
||||
"MongoDBChatMessageHistory",
|
||||
"PostgresChatMessageHistory",
|
||||
"RedisChatMessageHistory",
|
||||
"SQLChatMessageHistory",
|
||||
"ZepChatMessageHistory",
|
||||
]
|
||||
|
@ -1,4 +1,7 @@
|
||||
from langchain.output_parsers.boolean import BooleanOutputParser
|
||||
from langchain.output_parsers.combining import CombiningOutputParser
|
||||
from langchain.output_parsers.datetime import DatetimeOutputParser
|
||||
from langchain.output_parsers.enum import EnumOutputParser
|
||||
from langchain.output_parsers.fix import OutputFixingParser
|
||||
from langchain.output_parsers.list import (
|
||||
CommaSeparatedListOutputParser,
|
||||
@ -12,16 +15,19 @@ from langchain.output_parsers.retry import RetryOutputParser, RetryWithErrorOutp
|
||||
from langchain.output_parsers.structured import ResponseSchema, StructuredOutputParser
|
||||
|
||||
__all__ = [
|
||||
"RegexParser",
|
||||
"RegexDictParser",
|
||||
"ListOutputParser",
|
||||
"BooleanOutputParser",
|
||||
"CombiningOutputParser",
|
||||
"CommaSeparatedListOutputParser",
|
||||
"StructuredOutputParser",
|
||||
"ResponseSchema",
|
||||
"DatetimeOutputParser",
|
||||
"EnumOutputParser",
|
||||
"GuardrailsOutputParser",
|
||||
"ListOutputParser",
|
||||
"OutputFixingParser",
|
||||
"PydanticOutputParser",
|
||||
"RegexDictParser",
|
||||
"RegexParser",
|
||||
"ResponseSchema",
|
||||
"RetryOutputParser",
|
||||
"RetryWithErrorOutputParser",
|
||||
"OutputFixingParser",
|
||||
"DatetimeOutputParser",
|
||||
"StructuredOutputParser",
|
||||
]
|
||||
|
@ -9,24 +9,36 @@ from langchain.prompts.chat import (
|
||||
MessagesPlaceholder,
|
||||
SystemMessagePromptTemplate,
|
||||
)
|
||||
from langchain.prompts.example_selector import (
|
||||
LengthBasedExampleSelector,
|
||||
MaxMarginalRelevanceExampleSelector,
|
||||
NGramOverlapExampleSelector,
|
||||
SemanticSimilarityExampleSelector,
|
||||
)
|
||||
from langchain.prompts.few_shot import FewShotPromptTemplate
|
||||
from langchain.prompts.few_shot_with_templates import FewShotPromptWithTemplates
|
||||
from langchain.prompts.loading import load_prompt
|
||||
from langchain.prompts.pipeline import PipelinePromptTemplate
|
||||
from langchain.prompts.prompt import Prompt, PromptTemplate
|
||||
|
||||
__all__ = [
|
||||
"BasePromptTemplate",
|
||||
"StringPromptTemplate",
|
||||
"load_prompt",
|
||||
"PromptTemplate",
|
||||
"FewShotPromptTemplate",
|
||||
"Prompt",
|
||||
"FewShotPromptWithTemplates",
|
||||
"ChatPromptTemplate",
|
||||
"MessagesPlaceholder",
|
||||
"HumanMessagePromptTemplate",
|
||||
"AIMessagePromptTemplate",
|
||||
"SystemMessagePromptTemplate",
|
||||
"ChatMessagePromptTemplate",
|
||||
"BaseChatPromptTemplate",
|
||||
"BasePromptTemplate",
|
||||
"ChatMessagePromptTemplate",
|
||||
"ChatPromptTemplate",
|
||||
"FewShotPromptTemplate",
|
||||
"FewShotPromptWithTemplates",
|
||||
"HumanMessagePromptTemplate",
|
||||
"LengthBasedExampleSelector",
|
||||
"MaxMarginalRelevanceExampleSelector",
|
||||
"MessagesPlaceholder",
|
||||
"NGramOverlapExampleSelector",
|
||||
"PipelinePromptTemplate",
|
||||
"Prompt",
|
||||
"PromptTemplate",
|
||||
"SemanticSimilarityExampleSelector",
|
||||
"StringPromptTemplate",
|
||||
"SystemMessagePromptTemplate",
|
||||
"load_prompt",
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Logic for selecting examples to include in prompts."""
|
||||
from langchain.prompts.example_selector.length_based import LengthBasedExampleSelector
|
||||
from langchain.prompts.example_selector.ngram_overlap import NGramOverlapExampleSelector
|
||||
from langchain.prompts.example_selector.semantic_similarity import (
|
||||
MaxMarginalRelevanceExampleSelector,
|
||||
SemanticSimilarityExampleSelector,
|
||||
@ -7,6 +8,7 @@ from langchain.prompts.example_selector.semantic_similarity import (
|
||||
|
||||
__all__ = [
|
||||
"LengthBasedExampleSelector",
|
||||
"SemanticSimilarityExampleSelector",
|
||||
"MaxMarginalRelevanceExampleSelector",
|
||||
"NGramOverlapExampleSelector",
|
||||
"SemanticSimilarityExampleSelector",
|
||||
]
|
||||
|
@ -7,8 +7,13 @@ from langchain.retrievers.databerry import DataberryRetriever
|
||||
from langchain.retrievers.docarray import DocArrayRetriever
|
||||
from langchain.retrievers.elastic_search_bm25 import ElasticSearchBM25Retriever
|
||||
from langchain.retrievers.knn import KNNRetriever
|
||||
from langchain.retrievers.llama_index import (
|
||||
LlamaIndexGraphRetriever,
|
||||
LlamaIndexRetriever,
|
||||
)
|
||||
from langchain.retrievers.merger_retriever import MergerRetriever
|
||||
from langchain.retrievers.metal import MetalRetriever
|
||||
from langchain.retrievers.milvus import MilvusRetriever
|
||||
from langchain.retrievers.pinecone_hybrid_search import PineconeHybridSearchRetriever
|
||||
from langchain.retrievers.pupmed import PubMedRetriever
|
||||
from langchain.retrievers.remote_retriever import RemoteLangChainRetriever
|
||||
@ -22,10 +27,10 @@ from langchain.retrievers.vespa_retriever import VespaRetriever
|
||||
from langchain.retrievers.weaviate_hybrid_search import WeaviateHybridSearchRetriever
|
||||
from langchain.retrievers.wikipedia import WikipediaRetriever
|
||||
from langchain.retrievers.zep import ZepRetriever
|
||||
from langchain.retrievers.zilliz import ZillizRetriever
|
||||
|
||||
__all__ = [
|
||||
"ArxivRetriever",
|
||||
"PubMedRetriever",
|
||||
"AwsKendraIndexRetriever",
|
||||
"AzureCognitiveSearchRetriever",
|
||||
"ChatGPTPluginRetriever",
|
||||
@ -33,9 +38,13 @@ __all__ = [
|
||||
"DataberryRetriever",
|
||||
"ElasticSearchBM25Retriever",
|
||||
"KNNRetriever",
|
||||
"LlamaIndexGraphRetriever",
|
||||
"LlamaIndexRetriever",
|
||||
"MergerRetriever",
|
||||
"MetalRetriever",
|
||||
"MilvusRetriever",
|
||||
"PineconeHybridSearchRetriever",
|
||||
"PubMedRetriever",
|
||||
"RemoteLangChainRetriever",
|
||||
"SVMRetriever",
|
||||
"SelfQueryRetriever",
|
||||
@ -45,5 +54,6 @@ __all__ = [
|
||||
"WeaviateHybridSearchRetriever",
|
||||
"WikipediaRetriever",
|
||||
"ZepRetriever",
|
||||
"ZillizRetriever",
|
||||
"DocArrayRetriever",
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Core toolkit implementations."""
|
||||
|
||||
from langchain.tools.arxiv.tool import ArxivQueryRun
|
||||
from langchain.tools.azure_cognitive_services import (
|
||||
AzureCogsFormRecognizerTool,
|
||||
AzureCogsImageAnalysisTool,
|
||||
@ -11,13 +12,15 @@ from langchain.tools.bing_search.tool import BingSearchResults, BingSearchRun
|
||||
from langchain.tools.brave_search.tool import BraveSearch
|
||||
from langchain.tools.convert_to_openai import format_tool_to_openai_function
|
||||
from langchain.tools.ddg_search.tool import DuckDuckGoSearchResults, DuckDuckGoSearchRun
|
||||
from langchain.tools.file_management.copy import CopyFileTool
|
||||
from langchain.tools.file_management.delete import DeleteFileTool
|
||||
from langchain.tools.file_management.file_search import FileSearchTool
|
||||
from langchain.tools.file_management.list_dir import ListDirectoryTool
|
||||
from langchain.tools.file_management.move import MoveFileTool
|
||||
from langchain.tools.file_management.read import ReadFileTool
|
||||
from langchain.tools.file_management.write import WriteFileTool
|
||||
from langchain.tools.file_management import (
|
||||
CopyFileTool,
|
||||
DeleteFileTool,
|
||||
FileSearchTool,
|
||||
ListDirectoryTool,
|
||||
MoveFileTool,
|
||||
ReadFileTool,
|
||||
WriteFileTool,
|
||||
)
|
||||
from langchain.tools.gmail import (
|
||||
GmailCreateDraft,
|
||||
GmailGetMessage,
|
||||
@ -28,8 +31,12 @@ from langchain.tools.gmail import (
|
||||
from langchain.tools.google_places.tool import GooglePlacesTool
|
||||
from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
|
||||
from langchain.tools.google_serper.tool import GoogleSerperResults, GoogleSerperRun
|
||||
from langchain.tools.graphql.tool import BaseGraphQLTool
|
||||
from langchain.tools.human.tool import HumanInputRun
|
||||
from langchain.tools.ifttt import IFTTTWebhook
|
||||
from langchain.tools.interaction.tool import StdInInquireTool
|
||||
from langchain.tools.jira.tool import JiraAction
|
||||
from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool
|
||||
from langchain.tools.metaphor_search import MetaphorSearchResults
|
||||
from langchain.tools.openapi.utils.api_models import APIOperation
|
||||
from langchain.tools.openapi.utils.openapi_utils import OpenAPISpec
|
||||
@ -50,8 +57,33 @@ from langchain.tools.powerbi.tool import (
|
||||
QueryPowerBITool,
|
||||
)
|
||||
from langchain.tools.pubmed.tool import PubmedQueryRun
|
||||
from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool
|
||||
from langchain.tools.requests.tool import (
|
||||
BaseRequestsTool,
|
||||
RequestsDeleteTool,
|
||||
RequestsGetTool,
|
||||
RequestsPatchTool,
|
||||
RequestsPostTool,
|
||||
RequestsPutTool,
|
||||
)
|
||||
from langchain.tools.scenexplain.tool import SceneXplainTool
|
||||
from langchain.tools.searx_search.tool import SearxSearchResults, SearxSearchRun
|
||||
from langchain.tools.shell.tool import ShellTool
|
||||
from langchain.tools.sleep.tool import SleepTool
|
||||
from langchain.tools.spark_sql.tool import (
|
||||
BaseSparkSQLTool,
|
||||
InfoSparkSQLTool,
|
||||
ListSparkSQLTool,
|
||||
QueryCheckerTool,
|
||||
QuerySparkSQLTool,
|
||||
)
|
||||
from langchain.tools.sql_database.tool import (
|
||||
BaseSQLDatabaseTool,
|
||||
InfoSQLDatabaseTool,
|
||||
ListSQLDatabaseTool,
|
||||
QuerySQLCheckerTool,
|
||||
QuerySQLDataBaseTool,
|
||||
)
|
||||
from langchain.tools.steamship_image_generation import SteamshipImageGenerationTool
|
||||
from langchain.tools.vectorstore.tool import (
|
||||
VectorStoreQATool,
|
||||
@ -65,15 +97,21 @@ from langchain.tools.zapier.tool import ZapierNLAListActions, ZapierNLARunAction
|
||||
__all__ = [
|
||||
"AIPluginTool",
|
||||
"APIOperation",
|
||||
"ArxivQueryRun",
|
||||
"AzureCogsFormRecognizerTool",
|
||||
"AzureCogsImageAnalysisTool",
|
||||
"AzureCogsSpeech2TextTool",
|
||||
"AzureCogsText2SpeechTool",
|
||||
"BaseGraphQLTool",
|
||||
"BaseRequestsTool",
|
||||
"BaseSQLDatabaseTool",
|
||||
"BaseSparkSQLTool",
|
||||
"BaseTool",
|
||||
"BaseTool",
|
||||
"BaseTool",
|
||||
"BingSearchResults",
|
||||
"BingSearchRun",
|
||||
"BraveSearch",
|
||||
"ClickTool",
|
||||
"CopyFileTool",
|
||||
"CurrentWebPageTool",
|
||||
@ -84,7 +122,6 @@ __all__ = [
|
||||
"ExtractTextTool",
|
||||
"FileSearchTool",
|
||||
"GetElementsTool",
|
||||
"SteamshipImageGenerationTool",
|
||||
"GmailCreateDraft",
|
||||
"GmailGetMessage",
|
||||
"GmailGetThread",
|
||||
@ -98,18 +135,42 @@ __all__ = [
|
||||
"HumanInputRun",
|
||||
"IFTTTWebhook",
|
||||
"InfoPowerBITool",
|
||||
"InfoSQLDatabaseTool",
|
||||
"InfoSparkSQLTool",
|
||||
"JiraAction",
|
||||
"JsonGetValueTool",
|
||||
"JsonListKeysTool",
|
||||
"ListDirectoryTool",
|
||||
"ListPowerBITool",
|
||||
"ListSQLDatabaseTool",
|
||||
"ListSparkSQLTool",
|
||||
"MetaphorSearchResults",
|
||||
"MoveFileTool",
|
||||
"NavigateBackTool",
|
||||
"NavigateTool",
|
||||
"OpenAPISpec",
|
||||
"OpenWeatherMapQueryRun",
|
||||
"PubmedQueryRun",
|
||||
"PythonAstREPLTool",
|
||||
"PythonREPLTool",
|
||||
"QueryCheckerTool",
|
||||
"QueryPowerBITool",
|
||||
"QuerySQLCheckerTool",
|
||||
"QuerySQLDataBaseTool",
|
||||
"QuerySparkSQLTool",
|
||||
"ReadFileTool",
|
||||
"RequestsDeleteTool",
|
||||
"RequestsGetTool",
|
||||
"RequestsPatchTool",
|
||||
"RequestsPostTool",
|
||||
"RequestsPutTool",
|
||||
"SceneXplainTool",
|
||||
"SearxSearchResults",
|
||||
"SearxSearchRun",
|
||||
"ShellTool",
|
||||
"SleepTool",
|
||||
"StdInInquireTool",
|
||||
"SteamshipImageGenerationTool",
|
||||
"StructuredTool",
|
||||
"Tool",
|
||||
"VectorStoreQATool",
|
||||
@ -117,11 +178,9 @@ __all__ = [
|
||||
"WikipediaQueryRun",
|
||||
"WolframAlphaQueryRun",
|
||||
"WriteFileTool",
|
||||
"YouTubeSearchTool",
|
||||
"ZapierNLAListActions",
|
||||
"ZapierNLARunAction",
|
||||
"tool",
|
||||
"YouTubeSearchTool",
|
||||
"BraveSearch",
|
||||
"PubmedQueryRun",
|
||||
"format_tool_to_openai_function",
|
||||
"tool",
|
||||
]
|
||||
|
@ -40,9 +40,9 @@ class SearxSearchRun(BaseTool):
|
||||
|
||||
|
||||
class SearxSearchResults(BaseTool):
|
||||
"""Tool that has capability to query a Searx instance and get back json."""
|
||||
"""Tool that has the capability to query a Searx instance and get back json."""
|
||||
|
||||
name = "Searx Search"
|
||||
name = "Searx Search Results"
|
||||
description = (
|
||||
"A meta search engine."
|
||||
"Useful for when you need to answer questions about current events."
|
||||
|
@ -33,7 +33,7 @@ class BaseSQLDatabaseTool(BaseModel):
|
||||
class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
"""Tool for querying a SQL database."""
|
||||
|
||||
name = "query_sql_db"
|
||||
name = "sql_db_query"
|
||||
description = """
|
||||
Input to this tool is a detailed and correct SQL query, output is a result from the database.
|
||||
If the query is not correct, an error message will be returned.
|
||||
@ -59,7 +59,7 @@ class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
"""Tool for getting metadata about a SQL database."""
|
||||
|
||||
name = "schema_sql_db"
|
||||
name = "sql_db_schema"
|
||||
description = """
|
||||
Input to this tool is a comma-separated list of tables, output is the schema and sample rows for those tables.
|
||||
|
||||
@ -85,7 +85,7 @@ class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
"""Tool for getting tables names."""
|
||||
|
||||
name = "list_tables_sql_db"
|
||||
name = "sql_db_list_tables"
|
||||
description = "Input is an empty string, output is a comma separated list of tables in the database."
|
||||
|
||||
def _run(
|
||||
@ -104,14 +104,14 @@ class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool):
|
||||
raise NotImplementedError("ListTablesSqlDbTool does not support async")
|
||||
|
||||
|
||||
class QueryCheckerTool(BaseSQLDatabaseTool, BaseTool):
|
||||
class QuerySQLCheckerTool(BaseSQLDatabaseTool, BaseTool):
|
||||
"""Use an LLM to check if a query is correct.
|
||||
Adapted from https://www.patterns.app/blog/2023/01/18/crunchbot-sql-analyst-gpt/"""
|
||||
|
||||
template: str = QUERY_CHECKER
|
||||
llm: BaseLanguageModel
|
||||
llm_chain: LLMChain = Field(init=False)
|
||||
name = "query_checker_sql_db"
|
||||
name = "sql_db_query_checker"
|
||||
description = """
|
||||
Use this tool to double check if your query is correct before executing it.
|
||||
Always use this tool before executing a query with query_sql_db!
|
||||
|
@ -4,40 +4,51 @@ from langchain.utilities.apify import ApifyWrapper
|
||||
from langchain.utilities.arxiv import ArxivAPIWrapper
|
||||
from langchain.utilities.awslambda import LambdaWrapper
|
||||
from langchain.utilities.bash import BashProcess
|
||||
from langchain.utilities.bibtex import BibtexparserWrapper
|
||||
from langchain.utilities.bing_search import BingSearchAPIWrapper
|
||||
from langchain.utilities.brave_search import BraveSearchWrapper
|
||||
from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper
|
||||
from langchain.utilities.google_places_api import GooglePlacesAPIWrapper
|
||||
from langchain.utilities.google_search import GoogleSearchAPIWrapper
|
||||
from langchain.utilities.google_serper import GoogleSerperAPIWrapper
|
||||
from langchain.utilities.graphql import GraphQLAPIWrapper
|
||||
from langchain.utilities.jira import JiraAPIWrapper
|
||||
from langchain.utilities.max_compute import MaxComputeAPIWrapper
|
||||
from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper
|
||||
from langchain.utilities.openweathermap import OpenWeatherMapAPIWrapper
|
||||
from langchain.utilities.powerbi import PowerBIDataset
|
||||
from langchain.utilities.pupmed import PubMedAPIWrapper
|
||||
from langchain.utilities.python import PythonREPL
|
||||
from langchain.utilities.scenexplain import SceneXplainAPIWrapper
|
||||
from langchain.utilities.searx_search import SearxSearchWrapper
|
||||
from langchain.utilities.serpapi import SerpAPIWrapper
|
||||
from langchain.utilities.spark_sql import SparkSQL
|
||||
from langchain.utilities.twilio import TwilioAPIWrapper
|
||||
from langchain.utilities.wikipedia import WikipediaAPIWrapper
|
||||
from langchain.utilities.wolfram_alpha import WolframAlphaAPIWrapper
|
||||
from langchain.utilities.zapier import ZapierNLAWrapper
|
||||
|
||||
__all__ = [
|
||||
"ApifyWrapper",
|
||||
"ArxivAPIWrapper",
|
||||
"PubMedAPIWrapper",
|
||||
"BashProcess",
|
||||
"BibtexparserWrapper",
|
||||
"BingSearchAPIWrapper",
|
||||
"BraveSearchWrapper",
|
||||
"DuckDuckGoSearchAPIWrapper",
|
||||
"GooglePlacesAPIWrapper",
|
||||
"GoogleSearchAPIWrapper",
|
||||
"GoogleSerperAPIWrapper",
|
||||
"GraphQLAPIWrapper",
|
||||
"JiraAPIWrapper",
|
||||
"LambdaWrapper",
|
||||
"MaxComputeAPIWrapper",
|
||||
"MetaphorSearchAPIWrapper",
|
||||
"OpenWeatherMapAPIWrapper",
|
||||
"PowerBIDataset",
|
||||
"PubMedAPIWrapper",
|
||||
"PythonREPL",
|
||||
"SceneXplainAPIWrapper",
|
||||
"SearxSearchWrapper",
|
||||
"SerpAPIWrapper",
|
||||
"SparkSQL",
|
||||
@ -45,4 +56,5 @@ __all__ = [
|
||||
"TwilioAPIWrapper",
|
||||
"WikipediaAPIWrapper",
|
||||
"WolframAlphaAPIWrapper",
|
||||
"ZapierNLAWrapper",
|
||||
]
|
||||
|
@ -32,38 +32,38 @@ from langchain.vectorstores.weaviate import Weaviate
|
||||
from langchain.vectorstores.zilliz import Zilliz
|
||||
|
||||
__all__ = [
|
||||
"AnalyticDB",
|
||||
"Annoy",
|
||||
"AtlasDB",
|
||||
"AwaDB",
|
||||
"AzureSearch",
|
||||
"Redis",
|
||||
"Chroma",
|
||||
"Clickhouse",
|
||||
"ClickhouseSettings",
|
||||
"DeepLake",
|
||||
"DocArrayHnswSearch",
|
||||
"DocArrayInMemorySearch",
|
||||
"ElasticVectorSearch",
|
||||
"FAISS",
|
||||
"VectorStore",
|
||||
"Pinecone",
|
||||
"Weaviate",
|
||||
"Qdrant",
|
||||
"Hologres",
|
||||
"LanceDB",
|
||||
"MatchingEngine",
|
||||
"Milvus",
|
||||
"Zilliz",
|
||||
"SingleStoreDB",
|
||||
"Chroma",
|
||||
"OpenSearchVectorSearch",
|
||||
"AtlasDB",
|
||||
"DeepLake",
|
||||
"Annoy",
|
||||
"MongoDBAtlasVectorSearch",
|
||||
"MyScale",
|
||||
"MyScaleSettings",
|
||||
"OpenSearchVectorSearch",
|
||||
"Pinecone",
|
||||
"Qdrant",
|
||||
"Redis",
|
||||
"SKLearnVectorStore",
|
||||
"SingleStoreDB",
|
||||
"SupabaseVectorStore",
|
||||
"AnalyticDB",
|
||||
"Vectara",
|
||||
"Tair",
|
||||
"LanceDB",
|
||||
"DocArrayHnswSearch",
|
||||
"DocArrayInMemorySearch",
|
||||
"Typesense",
|
||||
"Hologres",
|
||||
"Clickhouse",
|
||||
"ClickhouseSettings",
|
||||
"Tigris",
|
||||
"MatchingEngine",
|
||||
"AwaDB",
|
||||
"Typesense",
|
||||
"Vectara",
|
||||
"VectorStore",
|
||||
"Weaviate",
|
||||
"Zilliz",
|
||||
]
|
||||
|
1250
poetry.lock
generated
1250
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -108,6 +108,9 @@ nebula3-python = {version = "^3.4.0", optional = true}
|
||||
langchainplus-sdk = ">=0.0.9"
|
||||
awadb = {version = "^0.3.3", optional = true}
|
||||
azure-search-documents = {version = "11.4.0a20230509004", source = "azure-sdk-dev", optional = true}
|
||||
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out.
|
||||
#streamlit = {version = "^1.18.0", optional = true}
|
||||
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
autodoc_pydantic = "^1.8.0"
|
||||
@ -137,6 +140,7 @@ freezegun = "^1.2.2"
|
||||
responses = "^0.22.0"
|
||||
pytest-asyncio = "^0.20.3"
|
||||
lark = "^1.1.5"
|
||||
pandas = "^2.0.0"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-socket = "^0.6.0"
|
||||
syrupy = "^4.0.2"
|
||||
@ -169,9 +173,7 @@ pinecone-client = "^2.2.1"
|
||||
pinecone-text = "^0.4.2"
|
||||
pymongo = "^4.3.3"
|
||||
clickhouse-connect = "^0.5.14"
|
||||
pgvector = "^0.1.6"
|
||||
transformers = "^4.27.4"
|
||||
pandas = "^2.0.0"
|
||||
deeplake = "^3.2.21"
|
||||
weaviate-client = "^3.15.5"
|
||||
torch = "^1.0.0"
|
||||
@ -309,6 +311,7 @@ extended_testing = [
|
||||
"chardet",
|
||||
"jq",
|
||||
"pdfminer.six",
|
||||
"pgvector",
|
||||
"pypdf",
|
||||
"pymupdf",
|
||||
"pypdfium2",
|
||||
@ -325,6 +328,8 @@ extended_testing = [
|
||||
"html2text",
|
||||
"py-trello",
|
||||
"scikit-learn",
|
||||
# now streamlit requires Python >=3.7, !=3.9.7 So, it is commented out.
|
||||
# "streamlit",
|
||||
"pyspark",
|
||||
"openai"
|
||||
]
|
||||
|
@ -11,6 +11,7 @@ _EXPECTED = [
|
||||
"ConversationalChatAgent",
|
||||
"LLMSingleActionAgent",
|
||||
"MRKLChain",
|
||||
"OpenAIFunctionsAgent",
|
||||
"ReActChain",
|
||||
"ReActTextWorldAgent",
|
||||
"SelfAskWithSearchChain",
|
||||
|
@ -21,7 +21,13 @@ def non_abstract_subclasses(
|
||||
return subclasses
|
||||
|
||||
|
||||
_PARSERS_TO_SKIP = {"FakeOutputParser", "BaseOutputParser"}
|
||||
# parsers defined not in the output_parsers module:
|
||||
_PARSERS_TO_SKIP = {
|
||||
"FakeOutputParser",
|
||||
"BaseOutputParser",
|
||||
"FinishedOutputParser",
|
||||
"RouterOutputParser",
|
||||
}
|
||||
_NON_ABSTRACT_PARSERS = non_abstract_subclasses(
|
||||
BaseOutputParser, to_skip=_PARSERS_TO_SKIP
|
||||
)
|
||||
|
@ -61,9 +61,10 @@ def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
|
||||
test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])
|
||||
|
||||
assert test_group_deps == [
|
||||
"duckdb-engine", # Should be removed
|
||||
"duckdb-engine",
|
||||
"freezegun",
|
||||
"lark", # Should be removed
|
||||
"lark",
|
||||
"pandas",
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
"pytest-cov",
|
||||
|
@ -18,10 +18,9 @@ def _get_tool_classes(skip_tools_without_default_names: bool) -> List[Type[BaseT
|
||||
if isinstance(tool_class, type) and issubclass(tool_class, BaseTool):
|
||||
if tool_class in _EXCLUDE:
|
||||
continue
|
||||
if (
|
||||
skip_tools_without_default_names
|
||||
and tool_class.__fields__["name"].default is None
|
||||
):
|
||||
if skip_tools_without_default_names and tool_class.__fields__[
|
||||
"name"
|
||||
].default in [None, ""]:
|
||||
continue
|
||||
results.append(tool_class)
|
||||
return results
|
||||
|
@ -4,15 +4,21 @@ from langchain.tools import __all__ as public_api
|
||||
_EXPECTED = [
|
||||
"AIPluginTool",
|
||||
"APIOperation",
|
||||
"ArxivQueryRun",
|
||||
"AzureCogsFormRecognizerTool",
|
||||
"AzureCogsImageAnalysisTool",
|
||||
"AzureCogsSpeech2TextTool",
|
||||
"AzureCogsText2SpeechTool",
|
||||
"BaseGraphQLTool",
|
||||
"BaseRequestsTool",
|
||||
"BaseSQLDatabaseTool",
|
||||
"BaseSparkSQLTool",
|
||||
"BaseTool",
|
||||
"BaseTool",
|
||||
"BaseTool",
|
||||
"BingSearchResults",
|
||||
"BingSearchRun",
|
||||
"BraveSearch",
|
||||
"ClickTool",
|
||||
"CopyFileTool",
|
||||
"CurrentWebPageTool",
|
||||
@ -36,18 +42,41 @@ _EXPECTED = [
|
||||
"HumanInputRun",
|
||||
"IFTTTWebhook",
|
||||
"InfoPowerBITool",
|
||||
"InfoSQLDatabaseTool",
|
||||
"InfoSparkSQLTool",
|
||||
"JiraAction",
|
||||
"JsonGetValueTool",
|
||||
"JsonListKeysTool",
|
||||
"ListDirectoryTool",
|
||||
"ListPowerBITool",
|
||||
"ListSQLDatabaseTool",
|
||||
"ListSparkSQLTool",
|
||||
"MetaphorSearchResults",
|
||||
"MoveFileTool",
|
||||
"NavigateBackTool",
|
||||
"NavigateTool",
|
||||
"OpenAPISpec",
|
||||
"OpenWeatherMapQueryRun",
|
||||
"PubmedQueryRun",
|
||||
"PythonAstREPLTool",
|
||||
"PythonREPLTool",
|
||||
"QueryCheckerTool",
|
||||
"QueryPowerBITool",
|
||||
"QuerySQLCheckerTool",
|
||||
"QuerySQLDataBaseTool",
|
||||
"QuerySparkSQLTool",
|
||||
"ReadFileTool",
|
||||
"RequestsDeleteTool",
|
||||
"RequestsGetTool",
|
||||
"RequestsPatchTool",
|
||||
"RequestsPostTool",
|
||||
"RequestsPutTool",
|
||||
"SceneXplainTool",
|
||||
"SearxSearchResults",
|
||||
"SearxSearchRun",
|
||||
"ShellTool",
|
||||
"SleepTool",
|
||||
"StdInInquireTool",
|
||||
"SteamshipImageGenerationTool",
|
||||
"StructuredTool",
|
||||
"Tool",
|
||||
@ -56,13 +85,11 @@ _EXPECTED = [
|
||||
"WikipediaQueryRun",
|
||||
"WolframAlphaQueryRun",
|
||||
"WriteFileTool",
|
||||
"YouTubeSearchTool",
|
||||
"ZapierNLAListActions",
|
||||
"ZapierNLARunAction",
|
||||
"tool",
|
||||
"YouTubeSearchTool",
|
||||
"BraveSearch",
|
||||
"PubmedQueryRun",
|
||||
"format_tool_to_openai_function",
|
||||
"tool",
|
||||
]
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user