multiple: Remove unnecessary Ruff suppression comments (#21050)

## Summary

I ran `ruff check --extend-select RUF100 -n` to identify `# noqa`
comments that weren't having any effect in Ruff, and then `ruff check
--extend-select RUF100 -n --fix` on select files to remove all of the
unnecessary `# noqa: F401` violations. It's possible that these were
needed at some point in the past, but they're not necessary in Ruff
v0.1.15 (used by LangChain) or in the latest release.

Co-authored-by: Erick Friis <erick@langchain.dev>
pull/21104/head
Charlie Marsh 3 weeks ago committed by GitHub
parent 748f2ba9ea
commit 8f38b7a725
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -7,94 +7,94 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.agent_toolkits.ainetwork.toolkit import (
AINetworkToolkit, # noqa: F401
AINetworkToolkit,
)
from langchain_community.agent_toolkits.amadeus.toolkit import (
AmadeusToolkit, # noqa: F401
AmadeusToolkit,
)
from langchain_community.agent_toolkits.azure_ai_services import (
AzureAiServicesToolkit, # noqa: F401
AzureAiServicesToolkit,
)
from langchain_community.agent_toolkits.azure_cognitive_services import (
AzureCognitiveServicesToolkit, # noqa: F401
AzureCognitiveServicesToolkit,
)
from langchain_community.agent_toolkits.cassandra_database.toolkit import (
CassandraDatabaseToolkit, # noqa: F401
)
from langchain_community.agent_toolkits.cogniswitch.toolkit import (
CogniswitchToolkit, # noqa: F401
CogniswitchToolkit,
)
from langchain_community.agent_toolkits.connery import (
ConneryToolkit, # noqa: F401
ConneryToolkit,
)
from langchain_community.agent_toolkits.file_management.toolkit import (
FileManagementToolkit, # noqa: F401
FileManagementToolkit,
)
from langchain_community.agent_toolkits.gmail.toolkit import (
GmailToolkit, # noqa: F401
GmailToolkit,
)
from langchain_community.agent_toolkits.jira.toolkit import (
JiraToolkit, # noqa: F401
JiraToolkit,
)
from langchain_community.agent_toolkits.json.base import (
create_json_agent, # noqa: F401
create_json_agent,
)
from langchain_community.agent_toolkits.json.toolkit import (
JsonToolkit, # noqa: F401
JsonToolkit,
)
from langchain_community.agent_toolkits.multion.toolkit import (
MultionToolkit, # noqa: F401
MultionToolkit,
)
from langchain_community.agent_toolkits.nasa.toolkit import (
NasaToolkit, # noqa: F401
NasaToolkit,
)
from langchain_community.agent_toolkits.nla.toolkit import (
NLAToolkit, # noqa: F401
NLAToolkit,
)
from langchain_community.agent_toolkits.office365.toolkit import (
O365Toolkit, # noqa: F401
O365Toolkit,
)
from langchain_community.agent_toolkits.openapi.base import (
create_openapi_agent, # noqa: F401
create_openapi_agent,
)
from langchain_community.agent_toolkits.openapi.toolkit import (
OpenAPIToolkit, # noqa: F401
OpenAPIToolkit,
)
from langchain_community.agent_toolkits.playwright.toolkit import (
PlayWrightBrowserToolkit, # noqa: F401
PlayWrightBrowserToolkit,
)
from langchain_community.agent_toolkits.polygon.toolkit import (
PolygonToolkit, # noqa: F401
PolygonToolkit,
)
from langchain_community.agent_toolkits.powerbi.base import (
create_pbi_agent, # noqa: F401
create_pbi_agent,
)
from langchain_community.agent_toolkits.powerbi.chat_base import (
create_pbi_chat_agent, # noqa: F401
create_pbi_chat_agent,
)
from langchain_community.agent_toolkits.powerbi.toolkit import (
PowerBIToolkit, # noqa: F401
PowerBIToolkit,
)
from langchain_community.agent_toolkits.slack.toolkit import (
SlackToolkit, # noqa: F401
SlackToolkit,
)
from langchain_community.agent_toolkits.spark_sql.base import (
create_spark_sql_agent, # noqa: F401
create_spark_sql_agent,
)
from langchain_community.agent_toolkits.spark_sql.toolkit import (
SparkSQLToolkit, # noqa: F401
SparkSQLToolkit,
)
from langchain_community.agent_toolkits.sql.base import (
create_sql_agent, # noqa: F401
create_sql_agent,
)
from langchain_community.agent_toolkits.sql.toolkit import (
SQLDatabaseToolkit, # noqa: F401
SQLDatabaseToolkit,
)
from langchain_community.agent_toolkits.steam.toolkit import (
SteamToolkit, # noqa: F401
SteamToolkit,
)
from langchain_community.agent_toolkits.zapier.toolkit import (
ZapierToolkit, # noqa: F401
ZapierToolkit,
)
__all__ = [

@ -11,75 +11,75 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.callbacks.aim_callback import (
AimCallbackHandler, # noqa: F401
AimCallbackHandler,
)
from langchain_community.callbacks.argilla_callback import (
ArgillaCallbackHandler, # noqa: F401
ArgillaCallbackHandler,
)
from langchain_community.callbacks.arize_callback import (
ArizeCallbackHandler, # noqa: F401
ArizeCallbackHandler,
)
from langchain_community.callbacks.arthur_callback import (
ArthurCallbackHandler, # noqa: F401
ArthurCallbackHandler,
)
from langchain_community.callbacks.clearml_callback import (
ClearMLCallbackHandler, # noqa: F401
ClearMLCallbackHandler,
)
from langchain_community.callbacks.comet_ml_callback import (
CometCallbackHandler, # noqa: F401
CometCallbackHandler,
)
from langchain_community.callbacks.context_callback import (
ContextCallbackHandler, # noqa: F401
ContextCallbackHandler,
)
from langchain_community.callbacks.fiddler_callback import (
FiddlerCallbackHandler, # noqa: F401
FiddlerCallbackHandler,
)
from langchain_community.callbacks.flyte_callback import (
FlyteCallbackHandler, # noqa: F401
FlyteCallbackHandler,
)
from langchain_community.callbacks.human import (
HumanApprovalCallbackHandler, # noqa: F401
HumanApprovalCallbackHandler,
)
from langchain_community.callbacks.infino_callback import (
InfinoCallbackHandler, # noqa: F401
InfinoCallbackHandler,
)
from langchain_community.callbacks.labelstudio_callback import (
LabelStudioCallbackHandler, # noqa: F401
LabelStudioCallbackHandler,
)
from langchain_community.callbacks.llmonitor_callback import (
LLMonitorCallbackHandler, # noqa: F401
LLMonitorCallbackHandler,
)
from langchain_community.callbacks.manager import ( # noqa: F401
from langchain_community.callbacks.manager import (
get_openai_callback,
wandb_tracing_enabled,
)
from langchain_community.callbacks.mlflow_callback import (
MlflowCallbackHandler, # noqa: F401
MlflowCallbackHandler,
)
from langchain_community.callbacks.openai_info import (
OpenAICallbackHandler, # noqa: F401
OpenAICallbackHandler,
)
from langchain_community.callbacks.promptlayer_callback import (
PromptLayerCallbackHandler, # noqa: F401
PromptLayerCallbackHandler,
)
from langchain_community.callbacks.sagemaker_callback import (
SageMakerCallbackHandler, # noqa: F401
SageMakerCallbackHandler,
)
from langchain_community.callbacks.streamlit import ( # noqa: F401
from langchain_community.callbacks.streamlit import (
LLMThoughtLabeler,
StreamlitCallbackHandler,
)
from langchain_community.callbacks.trubrics_callback import (
TrubricsCallbackHandler, # noqa: F401
TrubricsCallbackHandler,
)
from langchain_community.callbacks.uptrain_callback import (
UpTrainCallbackHandler, # noqa: F401
UpTrainCallbackHandler,
)
from langchain_community.callbacks.wandb_callback import (
WandbCallbackHandler, # noqa: F401
WandbCallbackHandler,
)
from langchain_community.callbacks.whylabs_callback import (
WhyLabsCallbackHandler, # noqa: F401
WhyLabsCallbackHandler,
)

@ -54,7 +54,7 @@ class ArgillaCallbackHandler(BaseCallbackHandler):
REPO_URL: str = "https://github.com/argilla-io/argilla"
ISSUES_URL: str = f"{REPO_URL}/issues"
BLOG_URL: str = "https://docs.argilla.io/en/latest/tutorials_and_integrations/integrations/use_argilla_callback_in_langchain.html" # noqa: E501
BLOG_URL: str = "https://docs.argilla.io/en/latest/tutorials_and_integrations/integrations/use_argilla_callback_in_langchain.html"
DEFAULT_API_URL: str = "http://localhost:6900"
@ -92,7 +92,7 @@ class ArgillaCallbackHandler(BaseCallbackHandler):
# Import Argilla (not via `import_argilla` to keep hints in IDEs)
try:
import argilla as rg # noqa: F401
import argilla as rg
self.ARGILLA_VERSION = rg.__version__
except ImportError:

@ -26,7 +26,7 @@ if TYPE_CHECKING:
def import_clearml() -> Any:
"""Import the clearml python package and raise an error if it is not installed."""
try:
import clearml # noqa: F401
import clearml
except ImportError:
raise ImportError(
"To use the clearml callback manager you need to have the `clearml` python "

@ -22,7 +22,7 @@ LANGCHAIN_MODEL_NAME = "langchain-model"
def import_comet_ml() -> Any:
"""Import comet_ml and raise an error if it is not installed."""
try:
import comet_ml # noqa: F401
import comet_ml
except ImportError:
raise ImportError(
"To use the comet_ml callback manager you need to have the "

@ -11,14 +11,14 @@ from langchain_core.outputs import LLMResult
def import_context() -> Any:
"""Import the `getcontext` package."""
try:
import getcontext # noqa: F401
import getcontext
from getcontext.generated.models import (
Conversation,
Message,
MessageRole,
Rating,
)
from getcontext.token import Credential # noqa: F401
from getcontext.token import Credential
except ImportError:
raise ImportError(
"To use the context callback manager you need to have the "

@ -55,7 +55,7 @@ _dataset_dict = {
def import_fiddler() -> Any:
"""Import the fiddler python package and raise an error if it is not installed."""
try:
import fiddler # noqa: F401
import fiddler
except ImportError:
raise ImportError(
"To use fiddler callback handler you need to have `fiddler-client`"
@ -169,7 +169,7 @@ class FiddlerCallbackHandler(BaseCallbackHandler):
)
print( # noqa: T201
f"adding model {self.model} to project {self.project}."
"This only has to be done once." # noqa: T201
"This only has to be done once."
)
try:
self.fiddler_client.add_model(
@ -181,7 +181,7 @@ class FiddlerCallbackHandler(BaseCallbackHandler):
except Exception as e:
print( # noqa: T201
f"Error adding model {self.model}: {e}."
"Fiddler integration will not work." # noqa: T201
"Fiddler integration will not work."
)
raise e

@ -27,8 +27,8 @@ logger = logging.getLogger(__name__)
def import_flytekit() -> Tuple[flytekit, renderer]:
"""Import flytekit and flytekitplugins-deck-standard."""
try:
import flytekit # noqa: F401
from flytekitplugins.deck import renderer # noqa: F401
import flytekit
from flytekitplugins.deck import renderer
except ImportError:
raise ImportError(
"To use the flyte callback manager you need"

@ -25,13 +25,13 @@ def import_comet_llm_api() -> SimpleNamespace:
"""Import comet_llm api and raise an error if it is not installed."""
try:
from comet_llm import (
experiment_info, # noqa: F401
flush, # noqa: F401
experiment_info,
flush,
)
from comet_llm.chains import api as chain_api # noqa: F401
from comet_llm.chains import api as chain_api
from comet_llm.chains import (
chain, # noqa: F401
span, # noqa: F401
chain,
span,
)
except ImportError:

@ -21,7 +21,7 @@ from langchain_community.callbacks.utils import (
def import_wandb() -> Any:
"""Import the wandb python package and raise an error if it is not installed."""
try:
import wandb # noqa: F401
import wandb
except ImportError:
raise ImportError(
"To use the wandb callback manager you need to have the `wandb` python "

@ -28,16 +28,16 @@ def import_langkit(
The imported langkit module.
"""
try:
import langkit # noqa: F401
import langkit.regexes # noqa: F401
import langkit.textstat # noqa: F401
import langkit
import langkit.regexes
import langkit.textstat
if sentiment:
import langkit.sentiment # noqa: F401
import langkit.sentiment
if toxicity:
import langkit.toxicity # noqa: F401
import langkit.toxicity
if themes:
import langkit.themes # noqa: F401
import langkit.themes
except ImportError:
raise ImportError(
"To use the whylabs callback manager you need to have the `langkit` python "

@ -23,30 +23,30 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.chat_loaders.base import (
BaseChatLoader, # noqa: F401
BaseChatLoader,
)
from langchain_community.chat_loaders.facebook_messenger import (
FolderFacebookMessengerChatLoader, # noqa: F401
SingleFileFacebookMessengerChatLoader, # noqa: F401
FolderFacebookMessengerChatLoader,
SingleFileFacebookMessengerChatLoader,
)
from langchain_community.chat_loaders.gmail import (
GMailLoader, # noqa: F401
GMailLoader,
)
from langchain_community.chat_loaders.imessage import (
IMessageChatLoader, # noqa: F401
IMessageChatLoader,
)
from langchain_community.chat_loaders.langsmith import (
LangSmithDatasetChatLoader, # noqa: F401
LangSmithRunChatLoader, # noqa: F401
LangSmithDatasetChatLoader,
LangSmithRunChatLoader,
)
from langchain_community.chat_loaders.slack import (
SlackChatLoader, # noqa: F401
SlackChatLoader,
)
from langchain_community.chat_loaders.telegram import (
TelegramChatLoader, # noqa: F401
TelegramChatLoader,
)
from langchain_community.chat_loaders.whatsapp import (
WhatsAppChatLoader, # noqa: F401
WhatsAppChatLoader,
)
__all__ = [

@ -20,67 +20,67 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.chat_message_histories.astradb import (
AstraDBChatMessageHistory, # noqa: F401
AstraDBChatMessageHistory,
)
from langchain_community.chat_message_histories.cassandra import (
CassandraChatMessageHistory, # noqa: F401
CassandraChatMessageHistory,
)
from langchain_community.chat_message_histories.cosmos_db import (
CosmosDBChatMessageHistory, # noqa: F401
CosmosDBChatMessageHistory,
)
from langchain_community.chat_message_histories.dynamodb import (
DynamoDBChatMessageHistory, # noqa: F401
DynamoDBChatMessageHistory,
)
from langchain_community.chat_message_histories.elasticsearch import (
ElasticsearchChatMessageHistory, # noqa: F401
ElasticsearchChatMessageHistory,
)
from langchain_community.chat_message_histories.file import (
FileChatMessageHistory, # noqa: F401
FileChatMessageHistory,
)
from langchain_community.chat_message_histories.firestore import (
FirestoreChatMessageHistory, # noqa: F401
FirestoreChatMessageHistory,
)
from langchain_community.chat_message_histories.in_memory import (
ChatMessageHistory, # noqa: F401
ChatMessageHistory,
)
from langchain_community.chat_message_histories.momento import (
MomentoChatMessageHistory, # noqa: F401
MomentoChatMessageHistory,
)
from langchain_community.chat_message_histories.mongodb import (
MongoDBChatMessageHistory, # noqa: F401
MongoDBChatMessageHistory,
)
from langchain_community.chat_message_histories.neo4j import (
Neo4jChatMessageHistory, # noqa: F401
Neo4jChatMessageHistory,
)
from langchain_community.chat_message_histories.postgres import (
PostgresChatMessageHistory, # noqa: F401
PostgresChatMessageHistory,
)
from langchain_community.chat_message_histories.redis import (
RedisChatMessageHistory, # noqa: F401
RedisChatMessageHistory,
)
from langchain_community.chat_message_histories.rocksetdb import (
RocksetChatMessageHistory, # noqa: F401
RocksetChatMessageHistory,
)
from langchain_community.chat_message_histories.singlestoredb import (
SingleStoreDBChatMessageHistory, # noqa: F401
SingleStoreDBChatMessageHistory,
)
from langchain_community.chat_message_histories.sql import (
SQLChatMessageHistory, # noqa: F401
SQLChatMessageHistory,
)
from langchain_community.chat_message_histories.streamlit import (
StreamlitChatMessageHistory, # noqa: F401
StreamlitChatMessageHistory,
)
from langchain_community.chat_message_histories.tidb import (
TiDBChatMessageHistory, # noqa: F401
TiDBChatMessageHistory,
)
from langchain_community.chat_message_histories.upstash_redis import (
UpstashRedisChatMessageHistory, # noqa: F401
UpstashRedisChatMessageHistory,
)
from langchain_community.chat_message_histories.xata import (
XataChatMessageHistory, # noqa: F401
XataChatMessageHistory,
)
from langchain_community.chat_message_histories.zep import (
ZepChatMessageHistory, # noqa: F401
ZepChatMessageHistory,
)
__all__ = [

@ -23,7 +23,7 @@ class XataChatMessageHistory(BaseChatMessageHistory):
) -> None:
"""Initialize with Xata client."""
try:
from xata.client import XataClient # noqa: F401
from xata.client import XataClient
except ImportError:
raise ImportError(
"Could not import xata python package. "

@ -22,145 +22,145 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.chat_models.anthropic import (
ChatAnthropic, # noqa: F401
ChatAnthropic,
)
from langchain_community.chat_models.anyscale import (
ChatAnyscale, # noqa: F401
ChatAnyscale,
)
from langchain_community.chat_models.azure_openai import (
AzureChatOpenAI, # noqa: F401
AzureChatOpenAI,
)
from langchain_community.chat_models.baichuan import (
ChatBaichuan, # noqa: F401
ChatBaichuan,
)
from langchain_community.chat_models.baidu_qianfan_endpoint import (
QianfanChatEndpoint, # noqa: F401
QianfanChatEndpoint,
)
from langchain_community.chat_models.bedrock import (
BedrockChat, # noqa: F401
BedrockChat,
)
from langchain_community.chat_models.cohere import (
ChatCohere, # noqa: F401
ChatCohere,
)
from langchain_community.chat_models.coze import (
ChatCoze, # noqa: F401
ChatCoze,
)
from langchain_community.chat_models.databricks import (
ChatDatabricks, # noqa: F401
ChatDatabricks,
)
from langchain_community.chat_models.deepinfra import (
ChatDeepInfra, # noqa: F401
ChatDeepInfra,
)
from langchain_community.chat_models.ernie import (
ErnieBotChat, # noqa: F401
ErnieBotChat,
)
from langchain_community.chat_models.everlyai import (
ChatEverlyAI, # noqa: F401
ChatEverlyAI,
)
from langchain_community.chat_models.fake import (
FakeListChatModel, # noqa: F401
FakeListChatModel,
)
from langchain_community.chat_models.fireworks import (
ChatFireworks, # noqa: F401
ChatFireworks,
)
from langchain_community.chat_models.friendli import (
ChatFriendli, # noqa: F401
ChatFriendli,
)
from langchain_community.chat_models.gigachat import (
GigaChat, # noqa: F401
GigaChat,
)
from langchain_community.chat_models.google_palm import (
ChatGooglePalm, # noqa: F401
ChatGooglePalm,
)
from langchain_community.chat_models.gpt_router import (
GPTRouter, # noqa: F401
GPTRouter,
)
from langchain_community.chat_models.huggingface import (
ChatHuggingFace, # noqa: F401
ChatHuggingFace,
)
from langchain_community.chat_models.human import (
HumanInputChatModel, # noqa: F401
HumanInputChatModel,
)
from langchain_community.chat_models.hunyuan import (
ChatHunyuan, # noqa: F401
ChatHunyuan,
)
from langchain_community.chat_models.javelin_ai_gateway import (
ChatJavelinAIGateway, # noqa: F401
ChatJavelinAIGateway,
)
from langchain_community.chat_models.jinachat import (
JinaChat, # noqa: F401
JinaChat,
)
from langchain_community.chat_models.kinetica import (
ChatKinetica, # noqa: F401
ChatKinetica,
)
from langchain_community.chat_models.konko import (
ChatKonko, # noqa: F401
ChatKonko,
)
from langchain_community.chat_models.litellm import (
ChatLiteLLM, # noqa: F401
ChatLiteLLM,
)
from langchain_community.chat_models.litellm_router import (
ChatLiteLLMRouter, # noqa: F401
ChatLiteLLMRouter,
)
from langchain_community.chat_models.llama_edge import (
LlamaEdgeChatService, # noqa: F401
LlamaEdgeChatService,
)
from langchain_community.chat_models.maritalk import (
ChatMaritalk, # noqa: F401
ChatMaritalk,
)
from langchain_community.chat_models.minimax import (
MiniMaxChat, # noqa: F401
MiniMaxChat,
)
from langchain_community.chat_models.mlflow import (
ChatMlflow, # noqa: F401
ChatMlflow,
)
from langchain_community.chat_models.mlflow_ai_gateway import (
ChatMLflowAIGateway, # noqa: F401
ChatMLflowAIGateway,
)
from langchain_community.chat_models.mlx import (
ChatMLX, # noqa: F401
ChatMLX,
)
from langchain_community.chat_models.ollama import (
ChatOllama, # noqa: F401
ChatOllama,
)
from langchain_community.chat_models.openai import (
ChatOpenAI, # noqa: F401
ChatOpenAI,
)
from langchain_community.chat_models.pai_eas_endpoint import (
PaiEasChatEndpoint, # noqa: F401
PaiEasChatEndpoint,
)
from langchain_community.chat_models.perplexity import (
ChatPerplexity, # noqa: F401
ChatPerplexity,
)
from langchain_community.chat_models.premai import (
ChatPremAI, # noqa: F401
ChatPremAI,
)
from langchain_community.chat_models.promptlayer_openai import (
PromptLayerChatOpenAI, # noqa: F401
PromptLayerChatOpenAI,
)
from langchain_community.chat_models.solar import (
SolarChat, # noqa: F401
SolarChat,
)
from langchain_community.chat_models.sparkllm import (
ChatSparkLLM, # noqa: F401
ChatSparkLLM,
)
from langchain_community.chat_models.tongyi import (
ChatTongyi, # noqa: F401
ChatTongyi,
)
from langchain_community.chat_models.vertexai import (
ChatVertexAI, # noqa: F401
ChatVertexAI,
)
from langchain_community.chat_models.volcengine_maas import (
VolcEngineMaasChat, # noqa: F401
VolcEngineMaasChat,
)
from langchain_community.chat_models.yandex import (
ChatYandexGPT, # noqa: F401
ChatYandexGPT,
)
from langchain_community.chat_models.yuan2 import (
ChatYuan2, # noqa: F401
ChatYuan2,
)
from langchain_community.chat_models.zhipuai import (
ChatZhipuAI, # noqa: F401
ChatZhipuAI,
)
__all__ = [

@ -120,7 +120,7 @@ class ChatPerplexity(BaseChatModel):
values, "pplx_api_key", "PPLX_API_KEY"
)
try:
import openai # noqa: F401
import openai
except ImportError:
raise ImportError(
"Could not import openai python package. "

@ -14,16 +14,16 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.cross_encoders.base import (
BaseCrossEncoder, # noqa: F401
BaseCrossEncoder,
)
from langchain_community.cross_encoders.fake import (
FakeCrossEncoder, # noqa: F401
FakeCrossEncoder,
)
from langchain_community.cross_encoders.huggingface import (
HuggingFaceCrossEncoder, # noqa: F401
HuggingFaceCrossEncoder,
)
from langchain_community.cross_encoders.sagemaker_endpoint import (
SagemakerEndpointCrossEncoder, # noqa: F401
SagemakerEndpointCrossEncoder,
)
__all__ = [

@ -20,13 +20,13 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.docstore.arbitrary_fn import (
DocstoreFn, # noqa: F401
DocstoreFn,
)
from langchain_community.docstore.in_memory import (
InMemoryDocstore, # noqa: F401
InMemoryDocstore,
)
from langchain_community.docstore.wikipedia import (
Wikipedia, # noqa: F401
Wikipedia,
)
__all__ = ["DocstoreFn", "InMemoryDocstore", "Wikipedia"]

@ -6,10 +6,10 @@ if TYPE_CHECKING:
JinaRerank, # noqa: F401
)
from langchain_community.document_compressors.llmlingua_filter import (
LLMLinguaCompressor, # noqa: F401
LLMLinguaCompressor,
)
from langchain_community.document_compressors.openvino_rerank import (
OpenVINOReranker, # noqa: F401
OpenVINOReranker,
)
__all__ = ["LLMLinguaCompressor", "OpenVINOReranker"]

@ -20,496 +20,496 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.document_loaders.acreom import (
AcreomLoader, # noqa: F401
AcreomLoader,
)
from langchain_community.document_loaders.airbyte import (
AirbyteCDKLoader, # noqa: F401
AirbyteGongLoader, # noqa: F401
AirbyteHubspotLoader, # noqa: F401
AirbyteSalesforceLoader, # noqa: F401
AirbyteShopifyLoader, # noqa: F401
AirbyteStripeLoader, # noqa: F401
AirbyteTypeformLoader, # noqa: F401
AirbyteZendeskSupportLoader, # noqa: F401
AirbyteCDKLoader,
AirbyteGongLoader,
AirbyteHubspotLoader,
AirbyteSalesforceLoader,
AirbyteShopifyLoader,
AirbyteStripeLoader,
AirbyteTypeformLoader,
AirbyteZendeskSupportLoader,
)
from langchain_community.document_loaders.airbyte_json import (
AirbyteJSONLoader, # noqa: F401
AirbyteJSONLoader,
)
from langchain_community.document_loaders.airtable import (
AirtableLoader, # noqa: F401
AirtableLoader,
)
from langchain_community.document_loaders.apify_dataset import (
ApifyDatasetLoader, # noqa: F401
ApifyDatasetLoader,
)
from langchain_community.document_loaders.arcgis_loader import (
ArcGISLoader, # noqa: F401
ArcGISLoader,
)
from langchain_community.document_loaders.arxiv import (
ArxivLoader, # noqa: F401
ArxivLoader,
)
from langchain_community.document_loaders.assemblyai import (
AssemblyAIAudioLoaderById, # noqa: F401
AssemblyAIAudioTranscriptLoader, # noqa: F401
AssemblyAIAudioLoaderById,
AssemblyAIAudioTranscriptLoader,
)
from langchain_community.document_loaders.astradb import (
AstraDBLoader, # noqa: F401
AstraDBLoader,
)
from langchain_community.document_loaders.async_html import (
AsyncHtmlLoader, # noqa: F401
AsyncHtmlLoader,
)
from langchain_community.document_loaders.athena import (
AthenaLoader, # noqa: F401
AthenaLoader,
)
from langchain_community.document_loaders.azlyrics import (
AZLyricsLoader, # noqa: F401
AZLyricsLoader,
)
from langchain_community.document_loaders.azure_ai_data import (
AzureAIDataLoader, # noqa: F401
AzureAIDataLoader,
)
from langchain_community.document_loaders.azure_blob_storage_container import (
AzureBlobStorageContainerLoader, # noqa: F401
AzureBlobStorageContainerLoader,
)
from langchain_community.document_loaders.azure_blob_storage_file import (
AzureBlobStorageFileLoader, # noqa: F401
AzureBlobStorageFileLoader,
)
from langchain_community.document_loaders.bibtex import (
BibtexLoader, # noqa: F401
BibtexLoader,
)
from langchain_community.document_loaders.bigquery import (
BigQueryLoader, # noqa: F401
BigQueryLoader,
)
from langchain_community.document_loaders.bilibili import (
BiliBiliLoader, # noqa: F401
BiliBiliLoader,
)
from langchain_community.document_loaders.blackboard import (
BlackboardLoader, # noqa: F401
BlackboardLoader,
)
from langchain_community.document_loaders.blob_loaders import (
Blob, # noqa: F401
BlobLoader, # noqa: F401
FileSystemBlobLoader, # noqa: F401
YoutubeAudioLoader, # noqa: F401
Blob,
BlobLoader,
FileSystemBlobLoader,
YoutubeAudioLoader,
)
from langchain_community.document_loaders.blockchain import (
BlockchainDocumentLoader, # noqa: F401
BlockchainDocumentLoader,
)
from langchain_community.document_loaders.brave_search import (
BraveSearchLoader, # noqa: F401
BraveSearchLoader,
)
from langchain_community.document_loaders.browserbase import (
BrowserbaseLoader, # noqa: F401
BrowserbaseLoader,
)
from langchain_community.document_loaders.browserless import (
BrowserlessLoader, # noqa: F401
BrowserlessLoader,
)
from langchain_community.document_loaders.cassandra import (
CassandraLoader, # noqa: F401
CassandraLoader,
)
from langchain_community.document_loaders.chatgpt import (
ChatGPTLoader, # noqa: F401
ChatGPTLoader,
)
from langchain_community.document_loaders.chm import (
UnstructuredCHMLoader, # noqa: F401
UnstructuredCHMLoader,
)
from langchain_community.document_loaders.chromium import (
AsyncChromiumLoader, # noqa: F401
AsyncChromiumLoader,
)
from langchain_community.document_loaders.college_confidential import (
CollegeConfidentialLoader, # noqa: F401
CollegeConfidentialLoader,
)
from langchain_community.document_loaders.concurrent import (
ConcurrentLoader, # noqa: F401
ConcurrentLoader,
)
from langchain_community.document_loaders.confluence import (
ConfluenceLoader, # noqa: F401
ConfluenceLoader,
)
from langchain_community.document_loaders.conllu import (
CoNLLULoader, # noqa: F401
CoNLLULoader,
)
from langchain_community.document_loaders.couchbase import (
CouchbaseLoader, # noqa: F401
CouchbaseLoader,
)
from langchain_community.document_loaders.csv_loader import (
CSVLoader, # noqa: F401
UnstructuredCSVLoader, # noqa: F401
CSVLoader,
UnstructuredCSVLoader,
)
from langchain_community.document_loaders.cube_semantic import (
CubeSemanticLoader, # noqa: F401
CubeSemanticLoader,
)
from langchain_community.document_loaders.datadog_logs import (
DatadogLogsLoader, # noqa: F401
DatadogLogsLoader,
)
from langchain_community.document_loaders.dataframe import (
DataFrameLoader, # noqa: F401
DataFrameLoader,
)
from langchain_community.document_loaders.diffbot import (
DiffbotLoader, # noqa: F401
DiffbotLoader,
)
from langchain_community.document_loaders.directory import (
DirectoryLoader, # noqa: F401
DirectoryLoader,
)
from langchain_community.document_loaders.discord import (
DiscordChatLoader, # noqa: F401
DiscordChatLoader,
)
from langchain_community.document_loaders.doc_intelligence import (
AzureAIDocumentIntelligenceLoader, # noqa: F401
AzureAIDocumentIntelligenceLoader,
)
from langchain_community.document_loaders.docugami import (
DocugamiLoader, # noqa: F401
DocugamiLoader,
)
from langchain_community.document_loaders.docusaurus import (
DocusaurusLoader, # noqa: F401
DocusaurusLoader,
)
from langchain_community.document_loaders.dropbox import (
DropboxLoader, # noqa: F401
DropboxLoader,
)
from langchain_community.document_loaders.duckdb_loader import (
DuckDBLoader, # noqa: F401
DuckDBLoader,
)
from langchain_community.document_loaders.email import (
OutlookMessageLoader, # noqa: F401
UnstructuredEmailLoader, # noqa: F401
OutlookMessageLoader,
UnstructuredEmailLoader,
)
from langchain_community.document_loaders.epub import (
UnstructuredEPubLoader, # noqa: F401
UnstructuredEPubLoader,
)
from langchain_community.document_loaders.etherscan import (
EtherscanLoader, # noqa: F401
EtherscanLoader,
)
from langchain_community.document_loaders.evernote import (
EverNoteLoader, # noqa: F401
EverNoteLoader,
)
from langchain_community.document_loaders.excel import (
UnstructuredExcelLoader, # noqa: F401
UnstructuredExcelLoader,
)
from langchain_community.document_loaders.facebook_chat import (
FacebookChatLoader, # noqa: F401
FacebookChatLoader,
)
from langchain_community.document_loaders.fauna import (
FaunaLoader, # noqa: F401
FaunaLoader,
)
from langchain_community.document_loaders.figma import (
FigmaFileLoader, # noqa: F401
FigmaFileLoader,
)
from langchain_community.document_loaders.firecrawl import (
FireCrawlLoader, # noqa: F401
FireCrawlLoader,
)
from langchain_community.document_loaders.gcs_directory import (
GCSDirectoryLoader, # noqa: F401
GCSDirectoryLoader,
)
from langchain_community.document_loaders.gcs_file import (
GCSFileLoader, # noqa: F401
GCSFileLoader,
)
from langchain_community.document_loaders.geodataframe import (
GeoDataFrameLoader, # noqa: F401
GeoDataFrameLoader,
)
from langchain_community.document_loaders.git import (
GitLoader, # noqa: F401
GitLoader,
)
from langchain_community.document_loaders.gitbook import (
GitbookLoader, # noqa: F401
GitbookLoader,
)
from langchain_community.document_loaders.github import (
GithubFileLoader, # noqa: F401
GitHubIssuesLoader, # noqa: F401
GithubFileLoader,
GitHubIssuesLoader,
)
from langchain_community.document_loaders.glue_catalog import (
GlueCatalogLoader, # noqa: F401
)
from langchain_community.document_loaders.google_speech_to_text import (
GoogleSpeechToTextLoader, # noqa: F401
GoogleSpeechToTextLoader,
)
from langchain_community.document_loaders.googledrive import (
GoogleDriveLoader, # noqa: F401
GoogleDriveLoader,
)
from langchain_community.document_loaders.gutenberg import (
GutenbergLoader, # noqa: F401
GutenbergLoader,
)
from langchain_community.document_loaders.hn import (
HNLoader, # noqa: F401
HNLoader,
)
from langchain_community.document_loaders.html import (
UnstructuredHTMLLoader, # noqa: F401
UnstructuredHTMLLoader,
)
from langchain_community.document_loaders.html_bs import (
BSHTMLLoader, # noqa: F401
BSHTMLLoader,
)
from langchain_community.document_loaders.hugging_face_dataset import (
HuggingFaceDatasetLoader, # noqa: F401
HuggingFaceDatasetLoader,
)
from langchain_community.document_loaders.hugging_face_model import (
HuggingFaceModelLoader, # noqa: F401
HuggingFaceModelLoader,
)
from langchain_community.document_loaders.ifixit import (
IFixitLoader, # noqa: F401
IFixitLoader,
)
from langchain_community.document_loaders.image import (
UnstructuredImageLoader, # noqa: F401
UnstructuredImageLoader,
)
from langchain_community.document_loaders.image_captions import (
ImageCaptionLoader, # noqa: F401
ImageCaptionLoader,
)
from langchain_community.document_loaders.imsdb import (
IMSDbLoader, # noqa: F401
IMSDbLoader,
)
from langchain_community.document_loaders.iugu import (
IuguLoader, # noqa: F401
IuguLoader,
)
from langchain_community.document_loaders.joplin import (
JoplinLoader, # noqa: F401
JoplinLoader,
)
from langchain_community.document_loaders.json_loader import (
JSONLoader, # noqa: F401
JSONLoader,
)
from langchain_community.document_loaders.lakefs import (
LakeFSLoader, # noqa: F401
LakeFSLoader,
)
from langchain_community.document_loaders.larksuite import (
LarkSuiteDocLoader, # noqa: F401
LarkSuiteDocLoader,
)
from langchain_community.document_loaders.llmsherpa import (
LLMSherpaFileLoader, # noqa: F401
LLMSherpaFileLoader,
)
from langchain_community.document_loaders.markdown import (
UnstructuredMarkdownLoader, # noqa: F401
UnstructuredMarkdownLoader,
)
from langchain_community.document_loaders.mastodon import (
MastodonTootsLoader, # noqa: F401
MastodonTootsLoader,
)
from langchain_community.document_loaders.max_compute import (
MaxComputeLoader, # noqa: F401
MaxComputeLoader,
)
from langchain_community.document_loaders.mediawikidump import (
MWDumpLoader, # noqa: F401
MWDumpLoader,
)
from langchain_community.document_loaders.merge import (
MergedDataLoader, # noqa: F401
MergedDataLoader,
)
from langchain_community.document_loaders.mhtml import (
MHTMLLoader, # noqa: F401
MHTMLLoader,
)
from langchain_community.document_loaders.modern_treasury import (
ModernTreasuryLoader, # noqa: F401
ModernTreasuryLoader,
)
from langchain_community.document_loaders.mongodb import (
MongodbLoader, # noqa: F401
MongodbLoader,
)
from langchain_community.document_loaders.news import (
NewsURLLoader, # noqa: F401
NewsURLLoader,
)
from langchain_community.document_loaders.notebook import (
NotebookLoader, # noqa: F401
NotebookLoader,
)
from langchain_community.document_loaders.notion import (
NotionDirectoryLoader, # noqa: F401
NotionDirectoryLoader,
)
from langchain_community.document_loaders.notiondb import (
NotionDBLoader, # noqa: F401
NotionDBLoader,
)
from langchain_community.document_loaders.obs_directory import (
OBSDirectoryLoader, # noqa: F401
OBSDirectoryLoader,
)
from langchain_community.document_loaders.obs_file import (
OBSFileLoader, # noqa: F401
OBSFileLoader,
)
from langchain_community.document_loaders.obsidian import (
ObsidianLoader, # noqa: F401
ObsidianLoader,
)
from langchain_community.document_loaders.odt import (
UnstructuredODTLoader, # noqa: F401
UnstructuredODTLoader,
)
from langchain_community.document_loaders.onedrive import (
OneDriveLoader, # noqa: F401
OneDriveLoader,
)
from langchain_community.document_loaders.onedrive_file import (
OneDriveFileLoader, # noqa: F401
OneDriveFileLoader,
)
from langchain_community.document_loaders.open_city_data import (
OpenCityDataLoader, # noqa: F401
OpenCityDataLoader,
)
from langchain_community.document_loaders.oracleadb_loader import (
OracleAutonomousDatabaseLoader, # noqa: F401
OracleAutonomousDatabaseLoader,
)
from langchain_community.document_loaders.org_mode import (
UnstructuredOrgModeLoader, # noqa: F401
UnstructuredOrgModeLoader,
)
from langchain_community.document_loaders.pdf import (
AmazonTextractPDFLoader, # noqa: F401
MathpixPDFLoader, # noqa: F401
OnlinePDFLoader, # noqa: F401
PagedPDFSplitter, # noqa: F401
PDFMinerLoader, # noqa: F401
PDFMinerPDFasHTMLLoader, # noqa: F401
PDFPlumberLoader, # noqa: F401
PyMuPDFLoader, # noqa: F401
PyPDFDirectoryLoader, # noqa: F401
PyPDFium2Loader, # noqa: F401
PyPDFLoader, # noqa: F401
UnstructuredPDFLoader, # noqa: F401
AmazonTextractPDFLoader,
MathpixPDFLoader,
OnlinePDFLoader,
PagedPDFSplitter,
PDFMinerLoader,
PDFMinerPDFasHTMLLoader,
PDFPlumberLoader,
PyMuPDFLoader,
PyPDFDirectoryLoader,
PyPDFium2Loader,
PyPDFLoader,
UnstructuredPDFLoader,
)
from langchain_community.document_loaders.pebblo import (
PebbloSafeLoader, # noqa: F401
PebbloSafeLoader,
)
from langchain_community.document_loaders.polars_dataframe import (
PolarsDataFrameLoader, # noqa: F401
PolarsDataFrameLoader,
)
from langchain_community.document_loaders.powerpoint import (
UnstructuredPowerPointLoader, # noqa: F401
UnstructuredPowerPointLoader,
)
from langchain_community.document_loaders.psychic import (
PsychicLoader, # noqa: F401
PsychicLoader,
)
from langchain_community.document_loaders.pubmed import (
PubMedLoader, # noqa: F401
PubMedLoader,
)
from langchain_community.document_loaders.pyspark_dataframe import (
PySparkDataFrameLoader, # noqa: F401
PySparkDataFrameLoader,
)
from langchain_community.document_loaders.python import (
PythonLoader, # noqa: F401
PythonLoader,
)
from langchain_community.document_loaders.readthedocs import (
ReadTheDocsLoader, # noqa: F401
ReadTheDocsLoader,
)
from langchain_community.document_loaders.recursive_url_loader import (
RecursiveUrlLoader, # noqa: F401
RecursiveUrlLoader,
)
from langchain_community.document_loaders.reddit import (
RedditPostsLoader, # noqa: F401
RedditPostsLoader,
)
from langchain_community.document_loaders.roam import (
RoamLoader, # noqa: F401
RoamLoader,
)
from langchain_community.document_loaders.rocksetdb import (
RocksetLoader, # noqa: F401
RocksetLoader,
)
from langchain_community.document_loaders.rss import (
RSSFeedLoader, # noqa: F401
RSSFeedLoader,
)
from langchain_community.document_loaders.rst import (
UnstructuredRSTLoader, # noqa: F401
UnstructuredRSTLoader,
)
from langchain_community.document_loaders.rtf import (
UnstructuredRTFLoader, # noqa: F401
UnstructuredRTFLoader,
)
from langchain_community.document_loaders.s3_directory import (
S3DirectoryLoader, # noqa: F401
S3DirectoryLoader,
)
from langchain_community.document_loaders.s3_file import (
S3FileLoader, # noqa: F401
S3FileLoader,
)
from langchain_community.document_loaders.sharepoint import (
SharePointLoader, # noqa: F401
SharePointLoader,
)
from langchain_community.document_loaders.sitemap import (
SitemapLoader, # noqa: F401
SitemapLoader,
)
from langchain_community.document_loaders.slack_directory import (
SlackDirectoryLoader, # noqa: F401
SlackDirectoryLoader,
)
from langchain_community.document_loaders.snowflake_loader import (
SnowflakeLoader, # noqa: F401
SnowflakeLoader,
)
from langchain_community.document_loaders.spider import (
SpiderLoader, # noqa: F401
SpiderLoader,
)
from langchain_community.document_loaders.spreedly import (
SpreedlyLoader, # noqa: F401
SpreedlyLoader,
)
from langchain_community.document_loaders.sql_database import (
SQLDatabaseLoader, # noqa: F401
SQLDatabaseLoader,
)
from langchain_community.document_loaders.srt import (
SRTLoader, # noqa: F401
SRTLoader,
)
from langchain_community.document_loaders.stripe import (
StripeLoader, # noqa: F401
StripeLoader,
)
from langchain_community.document_loaders.surrealdb import (
SurrealDBLoader, # noqa: F401
SurrealDBLoader,
)
from langchain_community.document_loaders.telegram import (
TelegramChatApiLoader, # noqa: F401
TelegramChatFileLoader, # noqa: F401
TelegramChatLoader, # noqa: F401
TelegramChatApiLoader,
TelegramChatFileLoader,
TelegramChatLoader,
)
from langchain_community.document_loaders.tencent_cos_directory import (
TencentCOSDirectoryLoader, # noqa: F401
TencentCOSDirectoryLoader,
)
from langchain_community.document_loaders.tencent_cos_file import (
TencentCOSFileLoader, # noqa: F401
TencentCOSFileLoader,
)
from langchain_community.document_loaders.tensorflow_datasets import (
TensorflowDatasetLoader, # noqa: F401
TensorflowDatasetLoader,
)
from langchain_community.document_loaders.text import (
TextLoader, # noqa: F401
TextLoader,
)
from langchain_community.document_loaders.tidb import (
TiDBLoader, # noqa: F401
TiDBLoader,
)
from langchain_community.document_loaders.tomarkdown import (
ToMarkdownLoader, # noqa: F401
ToMarkdownLoader,
)
from langchain_community.document_loaders.toml import (
TomlLoader, # noqa: F401
TomlLoader,
)
from langchain_community.document_loaders.trello import (
TrelloLoader, # noqa: F401
TrelloLoader,
)
from langchain_community.document_loaders.tsv import (
UnstructuredTSVLoader, # noqa: F401
UnstructuredTSVLoader,
)
from langchain_community.document_loaders.twitter import (
TwitterTweetLoader, # noqa: F401
TwitterTweetLoader,
)
from langchain_community.document_loaders.unstructured import (
UnstructuredAPIFileIOLoader, # noqa: F401
UnstructuredAPIFileLoader, # noqa: F401
UnstructuredFileIOLoader, # noqa: F401
UnstructuredFileLoader, # noqa: F401
UnstructuredAPIFileIOLoader,
UnstructuredAPIFileLoader,
UnstructuredFileIOLoader,
UnstructuredFileLoader,
)
from langchain_community.document_loaders.url import (
UnstructuredURLLoader, # noqa: F401
UnstructuredURLLoader,
)
from langchain_community.document_loaders.url_playwright import (
PlaywrightURLLoader, # noqa: F401
PlaywrightURLLoader,
)
from langchain_community.document_loaders.url_selenium import (
SeleniumURLLoader, # noqa: F401
SeleniumURLLoader,
)
from langchain_community.document_loaders.vsdx import (
VsdxLoader, # noqa: F401
VsdxLoader,
)
from langchain_community.document_loaders.weather import (
WeatherDataLoader, # noqa: F401
WeatherDataLoader,
)
from langchain_community.document_loaders.web_base import (
WebBaseLoader, # noqa: F401
WebBaseLoader,
)
from langchain_community.document_loaders.whatsapp_chat import (
WhatsAppChatLoader, # noqa: F401
WhatsAppChatLoader,
)
from langchain_community.document_loaders.wikipedia import (
WikipediaLoader, # noqa: F401
WikipediaLoader,
)
from langchain_community.document_loaders.word_document import (
Docx2txtLoader, # noqa: F401
UnstructuredWordDocumentLoader, # noqa: F401
Docx2txtLoader,
UnstructuredWordDocumentLoader,
)
from langchain_community.document_loaders.xml import (
UnstructuredXMLLoader, # noqa: F401
UnstructuredXMLLoader,
)
from langchain_community.document_loaders.xorbits import (
XorbitsLoader, # noqa: F401
XorbitsLoader,
)
from langchain_community.document_loaders.youtube import (
GoogleApiClient, # noqa: F401
GoogleApiYoutubeLoader, # noqa: F401
YoutubeLoader, # noqa: F401
GoogleApiClient,
GoogleApiYoutubeLoader,
YoutubeLoader,
)
from langchain_community.document_loaders.yuque import (
YuqueLoader, # noqa: F401
YuqueLoader,
)
__all__ = [

@ -200,7 +200,7 @@ class ConfluenceLoader(BaseLoader):
if errors:
raise ValueError(f"Error(s) while validating input: {errors}")
try:
from atlassian import Confluence # noqa: F401
from atlassian import Confluence
except ImportError:
raise ImportError(
"`atlassian` package not found, please run "
@ -613,8 +613,8 @@ class ConfluenceLoader(BaseLoader):
ocr_languages: Optional[str] = None,
) -> str:
try:
import pytesseract # noqa: F401
from pdf2image import convert_from_bytes # noqa: F401
import pytesseract
from pdf2image import convert_from_bytes
except ImportError:
raise ImportError(
"`pytesseract` or `pdf2image` package not found, "
@ -647,8 +647,8 @@ class ConfluenceLoader(BaseLoader):
ocr_languages: Optional[str] = None,
) -> str:
try:
import pytesseract # noqa: F401
from PIL import Image # noqa: F401
import pytesseract
from PIL import Image
except ImportError:
raise ImportError(
"`pytesseract` or `Pillow` package not found, "
@ -673,7 +673,7 @@ class ConfluenceLoader(BaseLoader):
def process_doc(self, link: str) -> str:
try:
import docx2txt # noqa: F401
import docx2txt
except ImportError:
raise ImportError(
"`docx2txt` package not found, please run `pip install docx2txt`"
@ -697,7 +697,7 @@ class ConfluenceLoader(BaseLoader):
import os
try:
import xlrd # noqa: F401
import xlrd
except ImportError:
raise ImportError("`xlrd` package not found, please run `pip install xlrd`")
@ -749,10 +749,10 @@ class ConfluenceLoader(BaseLoader):
ocr_languages: Optional[str] = None,
) -> str:
try:
import pytesseract # noqa: F401
from PIL import Image # noqa: F401
from reportlab.graphics import renderPM # noqa: F401
from svglib.svglib import svg2rlg # noqa: F401
import pytesseract
from PIL import Image
from reportlab.graphics import renderPM
from svglib.svglib import svg2rlg
except ImportError:
raise ImportError(
"`pytesseract`, `Pillow`, `reportlab` or `svglib` package not found, "

@ -35,7 +35,7 @@ class FireCrawlLoader(BaseLoader):
"""
try:
from firecrawl import FirecrawlApp # noqa: F401
from firecrawl import FirecrawlApp
except ImportError:
raise ImportError(
"`firecrawl` package not found, please run `pip install firecrawl-py`"

@ -51,7 +51,7 @@ class JSONLoader(BaseLoader):
JSON Lines format.
"""
try:
import jq # noqa:F401
import jq
self.jq = jq
except ImportError:

@ -50,7 +50,7 @@ class NewsURLLoader(BaseLoader):
) -> None:
"""Initialize with file path."""
try:
import newspaper # noqa:F401
import newspaper
self.__version = newspaper.__version__
except ImportError:

@ -632,7 +632,7 @@ class AmazonTextractPDFLoader(BasePDFLoader):
super().__init__(file_path, headers=headers)
try:
import textractcaller as tc # noqa: F401
import textractcaller as tc
except ImportError:
raise ImportError(
"Could not import amazon-textract-caller python package. "

@ -20,7 +20,7 @@ class PsychicLoader(BaseLoader):
"""
try:
from psychicapi import ConnectorId, Psychic # noqa: F401
from psychicapi import ConnectorId, Psychic
except ImportError:
raise ImportError(
"`psychicapi` package not found, please run `pip install psychicapi`"

@ -96,7 +96,7 @@ class RSSFeedLoader(BaseLoader):
def lazy_load(self) -> Iterator[Document]:
try:
import feedparser # noqa:F401
import feedparser
except ImportError:
raise ImportError(
"feedparser package not found, please install it with "

@ -32,7 +32,7 @@ class SpiderLoader(BaseLoader):
params: Additional parameters for the Spider API.
"""
try:
from spider import Spider # noqa: F401
from spider import Spider
except ImportError:
raise ImportError(
"`spider` package not found, please run `pip install spider-client`"

@ -20,36 +20,36 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.document_transformers.beautiful_soup_transformer import (
BeautifulSoupTransformer, # noqa: F401
BeautifulSoupTransformer,
)
from langchain_community.document_transformers.doctran_text_extract import (
DoctranPropertyExtractor, # noqa: F401
DoctranPropertyExtractor,
)
from langchain_community.document_transformers.doctran_text_qa import (
DoctranQATransformer, # noqa: F401
DoctranQATransformer,
)
from langchain_community.document_transformers.doctran_text_translate import (
DoctranTextTranslator, # noqa: F401
DoctranTextTranslator,
)
from langchain_community.document_transformers.embeddings_redundant_filter import (
EmbeddingsClusteringFilter, # noqa: F401
EmbeddingsRedundantFilter, # noqa: F401
get_stateful_documents, # noqa: F401
EmbeddingsClusteringFilter,
EmbeddingsRedundantFilter,
get_stateful_documents,
)
from langchain_community.document_transformers.google_translate import (
GoogleTranslateTransformer, # noqa: F401
GoogleTranslateTransformer,
)
from langchain_community.document_transformers.html2text import (
Html2TextTransformer, # noqa: F401
Html2TextTransformer,
)
from langchain_community.document_transformers.long_context_reorder import (
LongContextReorder, # noqa: F401
LongContextReorder,
)
from langchain_community.document_transformers.nuclia_text_transform import (
NucliaTextTransformer, # noqa: F401
NucliaTextTransformer,
)
from langchain_community.document_transformers.openai_functions import (
OpenAIMetadataTagger, # noqa: F401
OpenAIMetadataTagger,
)
__all__ = [

@ -16,201 +16,201 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.embeddings.aleph_alpha import (
AlephAlphaAsymmetricSemanticEmbedding, # noqa: F401
AlephAlphaSymmetricSemanticEmbedding, # noqa: F401
AlephAlphaAsymmetricSemanticEmbedding,
AlephAlphaSymmetricSemanticEmbedding,
)
from langchain_community.embeddings.anyscale import (
AnyscaleEmbeddings, # noqa: F401
AnyscaleEmbeddings,
)
from langchain_community.embeddings.awa import (
AwaEmbeddings, # noqa: F401
AwaEmbeddings,
)
from langchain_community.embeddings.azure_openai import (
AzureOpenAIEmbeddings, # noqa: F401
AzureOpenAIEmbeddings,
)
from langchain_community.embeddings.baichuan import (
BaichuanTextEmbeddings, # noqa: F401
BaichuanTextEmbeddings,
)
from langchain_community.embeddings.baidu_qianfan_endpoint import (
QianfanEmbeddingsEndpoint, # noqa: F401
QianfanEmbeddingsEndpoint,
)
from langchain_community.embeddings.bedrock import (
BedrockEmbeddings, # noqa: F401
BedrockEmbeddings,
)
from langchain_community.embeddings.bookend import (
BookendEmbeddings, # noqa: F401
BookendEmbeddings,
)
from langchain_community.embeddings.clarifai import (
ClarifaiEmbeddings, # noqa: F401
ClarifaiEmbeddings,
)
from langchain_community.embeddings.cohere import (
CohereEmbeddings, # noqa: F401
CohereEmbeddings,
)
from langchain_community.embeddings.dashscope import (
DashScopeEmbeddings, # noqa: F401
DashScopeEmbeddings,
)
from langchain_community.embeddings.databricks import (
DatabricksEmbeddings, # noqa: F401
DatabricksEmbeddings,
)
from langchain_community.embeddings.deepinfra import (
DeepInfraEmbeddings, # noqa: F401
DeepInfraEmbeddings,
)
from langchain_community.embeddings.edenai import (
EdenAiEmbeddings, # noqa: F401
EdenAiEmbeddings,
)
from langchain_community.embeddings.elasticsearch import (
ElasticsearchEmbeddings, # noqa: F401
ElasticsearchEmbeddings,
)
from langchain_community.embeddings.embaas import (
EmbaasEmbeddings, # noqa: F401
EmbaasEmbeddings,
)
from langchain_community.embeddings.ernie import (
ErnieEmbeddings, # noqa: F401
ErnieEmbeddings,
)
from langchain_community.embeddings.fake import (
DeterministicFakeEmbedding, # noqa: F401
FakeEmbeddings, # noqa: F401
DeterministicFakeEmbedding,
FakeEmbeddings,
)
from langchain_community.embeddings.fastembed import (
FastEmbedEmbeddings, # noqa: F401
FastEmbedEmbeddings,
)
from langchain_community.embeddings.gigachat import (
GigaChatEmbeddings, # noqa: F401
GigaChatEmbeddings,
)
from langchain_community.embeddings.google_palm import (
GooglePalmEmbeddings, # noqa: F401
GooglePalmEmbeddings,
)
from langchain_community.embeddings.gpt4all import (
GPT4AllEmbeddings, # noqa: F401
GPT4AllEmbeddings,
)
from langchain_community.embeddings.gradient_ai import (
GradientEmbeddings, # noqa: F401
GradientEmbeddings,
)
from langchain_community.embeddings.huggingface import (
HuggingFaceBgeEmbeddings, # noqa: F401
HuggingFaceEmbeddings, # noqa: F401
HuggingFaceInferenceAPIEmbeddings, # noqa: F401
HuggingFaceInstructEmbeddings, # noqa: F401
HuggingFaceBgeEmbeddings,
HuggingFaceEmbeddings,
HuggingFaceInferenceAPIEmbeddings,
HuggingFaceInstructEmbeddings,
)
from langchain_community.embeddings.huggingface_hub import (
HuggingFaceHubEmbeddings, # noqa: F401
HuggingFaceHubEmbeddings,
)
from langchain_community.embeddings.infinity import (
InfinityEmbeddings, # noqa: F401
InfinityEmbeddings,
)
from langchain_community.embeddings.infinity_local import (
InfinityEmbeddingsLocal, # noqa: F401
InfinityEmbeddingsLocal,
)
from langchain_community.embeddings.itrex import (
QuantizedBgeEmbeddings, # noqa: F401
QuantizedBgeEmbeddings,
)
from langchain_community.embeddings.javelin_ai_gateway import (
JavelinAIGatewayEmbeddings, # noqa: F401
JavelinAIGatewayEmbeddings,
)
from langchain_community.embeddings.jina import (
JinaEmbeddings, # noqa: F401
JinaEmbeddings,
)
from langchain_community.embeddings.johnsnowlabs import (
JohnSnowLabsEmbeddings, # noqa: F401
JohnSnowLabsEmbeddings,
)
from langchain_community.embeddings.laser import (
LaserEmbeddings, # noqa: F401
LaserEmbeddings,
)
from langchain_community.embeddings.llamacpp import (
LlamaCppEmbeddings, # noqa: F401
LlamaCppEmbeddings,
)
from langchain_community.embeddings.llamafile import (
LlamafileEmbeddings, # noqa: F401
LlamafileEmbeddings,
)
from langchain_community.embeddings.llm_rails import (
LLMRailsEmbeddings, # noqa: F401
LLMRailsEmbeddings,
)
from langchain_community.embeddings.localai import (
LocalAIEmbeddings, # noqa: F401
LocalAIEmbeddings,
)
from langchain_community.embeddings.minimax import (
MiniMaxEmbeddings, # noqa: F401
MiniMaxEmbeddings,
)
from langchain_community.embeddings.mlflow import (
MlflowCohereEmbeddings, # noqa: F401
MlflowEmbeddings, # noqa: F401
MlflowCohereEmbeddings,
MlflowEmbeddings,
)
from langchain_community.embeddings.mlflow_gateway import (
MlflowAIGatewayEmbeddings, # noqa: F401
MlflowAIGatewayEmbeddings,
)
from langchain_community.embeddings.modelscope_hub import (
ModelScopeEmbeddings, # noqa: F401
ModelScopeEmbeddings,
)
from langchain_community.embeddings.mosaicml import (
MosaicMLInstructorEmbeddings, # noqa: F401
MosaicMLInstructorEmbeddings,
)
from langchain_community.embeddings.nemo import (
NeMoEmbeddings, # noqa: F401
NeMoEmbeddings,
)
from langchain_community.embeddings.nlpcloud import (
NLPCloudEmbeddings, # noqa: F401
NLPCloudEmbeddings,
)
from langchain_community.embeddings.oci_generative_ai import (
OCIGenAIEmbeddings, # noqa: F401
OCIGenAIEmbeddings,
)
from langchain_community.embeddings.octoai_embeddings import (
OctoAIEmbeddings, # noqa: F401
OctoAIEmbeddings,
)
from langchain_community.embeddings.ollama import (
OllamaEmbeddings, # noqa: F401
OllamaEmbeddings,
)
from langchain_community.embeddings.openai import (
OpenAIEmbeddings, # noqa: F401
OpenAIEmbeddings,
)
from langchain_community.embeddings.openvino import (
OpenVINOBgeEmbeddings, # noqa: F401
OpenVINOEmbeddings, # noqa: F401
OpenVINOBgeEmbeddings,
OpenVINOEmbeddings,
)
from langchain_community.embeddings.optimum_intel import (
QuantizedBiEncoderEmbeddings, # noqa: F401
QuantizedBiEncoderEmbeddings,
)
from langchain_community.embeddings.premai import (
PremAIEmbeddings, # noqa: F401
PremAIEmbeddings,
)
from langchain_community.embeddings.sagemaker_endpoint import (
SagemakerEndpointEmbeddings, # noqa: F401
SagemakerEndpointEmbeddings,
)
from langchain_community.embeddings.self_hosted import (
SelfHostedEmbeddings, # noqa: F401
SelfHostedEmbeddings,
)
from langchain_community.embeddings.self_hosted_hugging_face import (
SelfHostedHuggingFaceEmbeddings, # noqa: F401
SelfHostedHuggingFaceInstructEmbeddings, # noqa: F401
SelfHostedHuggingFaceEmbeddings,
SelfHostedHuggingFaceInstructEmbeddings,
)
from langchain_community.embeddings.sentence_transformer import (
SentenceTransformerEmbeddings, # noqa: F401
SentenceTransformerEmbeddings,
)
from langchain_community.embeddings.solar import (
SolarEmbeddings, # noqa: F401
SolarEmbeddings,
)
from langchain_community.embeddings.spacy_embeddings import (
SpacyEmbeddings, # noqa: F401
SpacyEmbeddings,
)
from langchain_community.embeddings.sparkllm import (
SparkLLMTextEmbeddings, # noqa: F401
SparkLLMTextEmbeddings,
)
from langchain_community.embeddings.tensorflow_hub import (
TensorflowHubEmbeddings, # noqa: F401
TensorflowHubEmbeddings,
)
from langchain_community.embeddings.vertexai import (
VertexAIEmbeddings, # noqa: F401
VertexAIEmbeddings,
)
from langchain_community.embeddings.volcengine import (
VolcanoEmbeddings, # noqa: F401
VolcanoEmbeddings,
)
from langchain_community.embeddings.voyageai import (
VoyageEmbeddings, # noqa: F401
VoyageEmbeddings,
)
from langchain_community.embeddings.xinference import (
XinferenceEmbeddings, # noqa: F401
XinferenceEmbeddings,
)
from langchain_community.embeddings.yandex import (
YandexGPTEmbeddings, # noqa: F401
YandexGPTEmbeddings,
)
__all__ = [

@ -5,46 +5,46 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.graphs.arangodb_graph import (
ArangoGraph, # noqa: F401
ArangoGraph,
)
from langchain_community.graphs.falkordb_graph import (
FalkorDBGraph, # noqa: F401
FalkorDBGraph,
)
from langchain_community.graphs.gremlin_graph import (
GremlinGraph, # noqa: F401
GremlinGraph,
)
from langchain_community.graphs.hugegraph import (
HugeGraph, # noqa: F401
HugeGraph,
)
from langchain_community.graphs.kuzu_graph import (
KuzuGraph, # noqa: F401
KuzuGraph,
)
from langchain_community.graphs.memgraph_graph import (
MemgraphGraph, # noqa: F401
MemgraphGraph,
)
from langchain_community.graphs.nebula_graph import (
NebulaGraph, # noqa: F401
NebulaGraph,
)
from langchain_community.graphs.neo4j_graph import (
Neo4jGraph, # noqa: F401
Neo4jGraph,
)
from langchain_community.graphs.neptune_graph import (
NeptuneGraph, # noqa: F401
NeptuneGraph,
)
from langchain_community.graphs.neptune_rdf_graph import (
NeptuneRdfGraph, # noqa: F401
NeptuneRdfGraph,
)
from langchain_community.graphs.networkx_graph import (
NetworkxEntityGraph, # noqa: F401
NetworkxEntityGraph,
)
from langchain_community.graphs.ontotext_graphdb_graph import (
OntotextGraphDBGraph, # noqa: F401
OntotextGraphDBGraph,
)
from langchain_community.graphs.rdf_graph import (
RdfGraph, # noqa: F401
RdfGraph,
)
from langchain_community.graphs.tigergraph_graph import (
TigerGraph, # noqa: F401
TigerGraph,
)
__all__ = [

@ -23,112 +23,112 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.retrievers.arcee import (
ArceeRetriever, # noqa: F401
ArceeRetriever,
)
from langchain_community.retrievers.arxiv import (
ArxivRetriever, # noqa: F401
ArxivRetriever,
)
from langchain_community.retrievers.azure_cognitive_search import (
AzureCognitiveSearchRetriever, # noqa: F401
AzureCognitiveSearchRetriever,
)
from langchain_community.retrievers.bedrock import (
AmazonKnowledgeBasesRetriever, # noqa: F401
AmazonKnowledgeBasesRetriever,
)
from langchain_community.retrievers.bm25 import (
BM25Retriever, # noqa: F401
BM25Retriever,
)
from langchain_community.retrievers.breebs import (
BreebsRetriever, # noqa: F401
BreebsRetriever,
)
from langchain_community.retrievers.chaindesk import (
ChaindeskRetriever, # noqa: F401
ChaindeskRetriever,
)
from langchain_community.retrievers.chatgpt_plugin_retriever import (
ChatGPTPluginRetriever, # noqa: F401
ChatGPTPluginRetriever,
)
from langchain_community.retrievers.cohere_rag_retriever import (
CohereRagRetriever, # noqa: F401
CohereRagRetriever,
)
from langchain_community.retrievers.docarray import (
DocArrayRetriever, # noqa: F401
DocArrayRetriever,
)
from langchain_community.retrievers.dria_index import (
DriaRetriever, # noqa: F401
DriaRetriever,
)
from langchain_community.retrievers.elastic_search_bm25 import (
ElasticSearchBM25Retriever, # noqa: F401
ElasticSearchBM25Retriever,
)
from langchain_community.retrievers.embedchain import (
EmbedchainRetriever, # noqa: F401
EmbedchainRetriever,
)
from langchain_community.retrievers.google_cloud_documentai_warehouse import (
GoogleDocumentAIWarehouseRetriever, # noqa: F401
GoogleDocumentAIWarehouseRetriever,
)
from langchain_community.retrievers.google_vertex_ai_search import (
GoogleCloudEnterpriseSearchRetriever, # noqa: F401
GoogleVertexAIMultiTurnSearchRetriever, # noqa: F401
GoogleVertexAISearchRetriever, # noqa: F401
GoogleCloudEnterpriseSearchRetriever,
GoogleVertexAIMultiTurnSearchRetriever,
GoogleVertexAISearchRetriever,
)
from langchain_community.retrievers.kay import (
KayAiRetriever, # noqa: F401
KayAiRetriever,
)
from langchain_community.retrievers.kendra import (
AmazonKendraRetriever, # noqa: F401
AmazonKendraRetriever,
)
from langchain_community.retrievers.knn import (
KNNRetriever, # noqa: F401
KNNRetriever,
)
from langchain_community.retrievers.llama_index import (
LlamaIndexGraphRetriever, # noqa: F401
LlamaIndexRetriever, # noqa: F401
LlamaIndexGraphRetriever,
LlamaIndexRetriever,
)
from langchain_community.retrievers.metal import (
MetalRetriever, # noqa: F401
MetalRetriever,
)
from langchain_community.retrievers.milvus import (
MilvusRetriever, # noqa: F401
MilvusRetriever,
)
from langchain_community.retrievers.outline import (
OutlineRetriever, # noqa: F401
OutlineRetriever,
)
from langchain_community.retrievers.pinecone_hybrid_search import (
PineconeHybridSearchRetriever, # noqa: F401
PineconeHybridSearchRetriever,
)
from langchain_community.retrievers.pubmed import (
PubMedRetriever, # noqa: F401
PubMedRetriever,
)
from langchain_community.retrievers.qdrant_sparse_vector_retriever import (
QdrantSparseVectorRetriever, # noqa: F401
QdrantSparseVectorRetriever,
)
from langchain_community.retrievers.remote_retriever import (
RemoteLangChainRetriever, # noqa: F401
RemoteLangChainRetriever,
)
from langchain_community.retrievers.svm import (
SVMRetriever, # noqa: F401
SVMRetriever,
)
from langchain_community.retrievers.tavily_search_api import (
TavilySearchAPIRetriever, # noqa: F401
TavilySearchAPIRetriever,
)
from langchain_community.retrievers.tfidf import (
TFIDFRetriever, # noqa: F401
TFIDFRetriever,
)
from langchain_community.retrievers.vespa_retriever import (
VespaRetriever, # noqa: F401
VespaRetriever,
)
from langchain_community.retrievers.weaviate_hybrid_search import (
WeaviateHybridSearchRetriever, # noqa: F401
WeaviateHybridSearchRetriever,
)
from langchain_community.retrievers.wikipedia import (
WikipediaRetriever, # noqa: F401
WikipediaRetriever,
)
from langchain_community.retrievers.you import (
YouRetriever, # noqa: F401
YouRetriever,
)
from langchain_community.retrievers.zep import (
ZepRetriever, # noqa: F401
ZepRetriever,
)
from langchain_community.retrievers.zilliz import (
ZillizRetriever, # noqa: F401
ZillizRetriever,
)
__all__ = [
@ -177,8 +177,8 @@ _module_lookup = {
"AmazonKnowledgeBasesRetriever": "langchain_community.retrievers.bedrock",
"ArceeRetriever": "langchain_community.retrievers.arcee",
"ArxivRetriever": "langchain_community.retrievers.arxiv",
"AzureAISearchRetriever": "langchain_community.retrievers.azure_ai_search", # noqa: E501
"AzureCognitiveSearchRetriever": "langchain_community.retrievers.azure_ai_search", # noqa: E501
"AzureAISearchRetriever": "langchain_community.retrievers.azure_ai_search",
"AzureCognitiveSearchRetriever": "langchain_community.retrievers.azure_ai_search",
"BM25Retriever": "langchain_community.retrievers.bm25",
"BreebsRetriever": "langchain_community.retrievers.breebs",
"ChaindeskRetriever": "langchain_community.retrievers.chaindesk",

@ -51,7 +51,7 @@ class GoogleDocumentAIWarehouseRetriever(BaseRetriever):
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validates the environment."""
try: # noqa: F401
try:
from google.cloud.contentwarehouse_v1 import DocumentServiceClient
except ImportError as exc:
raise ImportError(

@ -12,25 +12,25 @@ The primary goal of these storages is to support caching.
BaseStore --> <name>Store # Examples: MongoDBStore, RedisStore
""" # noqa: E501
"""
import importlib
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.storage.astradb import (
AstraDBByteStore, # noqa: F401
AstraDBStore, # noqa: F401
AstraDBByteStore,
AstraDBStore,
)
from langchain_community.storage.mongodb import (
MongoDBStore, # noqa: F401
MongoDBStore,
)
from langchain_community.storage.redis import (
RedisStore, # noqa: F401
RedisStore,
)
from langchain_community.storage.upstash_redis import (
UpstashRedisByteStore, # noqa: F401
UpstashRedisStore, # noqa: F401
UpstashRedisByteStore,
UpstashRedisStore,
)
__all__ = [

@ -22,53 +22,53 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_core.tools import (
BaseTool, # noqa: F401
StructuredTool, # noqa: F401
Tool, # noqa: F401
tool, # noqa: F401
BaseTool,
StructuredTool,
Tool,
tool,
)
from langchain_community.tools.ainetwork.app import (
AINAppOps, # noqa: F401
AINAppOps,
)
from langchain_community.tools.ainetwork.owner import (
AINOwnerOps, # noqa: F401
AINOwnerOps,
)
from langchain_community.tools.ainetwork.rule import (
AINRuleOps, # noqa: F401
AINRuleOps,
)
from langchain_community.tools.ainetwork.transfer import (
AINTransfer, # noqa: F401
AINTransfer,
)
from langchain_community.tools.ainetwork.value import (
AINValueOps, # noqa: F401
AINValueOps,
)
from langchain_community.tools.arxiv.tool import (
ArxivQueryRun, # noqa: F401
ArxivQueryRun,
)
from langchain_community.tools.azure_ai_services import (
AzureAiServicesDocumentIntelligenceTool, # noqa: F401
AzureAiServicesImageAnalysisTool, # noqa: F401
AzureAiServicesSpeechToTextTool, # noqa: F401
AzureAiServicesTextAnalyticsForHealthTool, # noqa: F401
AzureAiServicesTextToSpeechTool, # noqa: F401
AzureAiServicesDocumentIntelligenceTool,
AzureAiServicesImageAnalysisTool,
AzureAiServicesSpeechToTextTool,
AzureAiServicesTextAnalyticsForHealthTool,
AzureAiServicesTextToSpeechTool,
)
from langchain_community.tools.azure_cognitive_services import (
AzureCogsFormRecognizerTool, # noqa: F401
AzureCogsImageAnalysisTool, # noqa: F401
AzureCogsSpeech2TextTool, # noqa: F401
AzureCogsText2SpeechTool, # noqa: F401
AzureCogsTextAnalyticsHealthTool, # noqa: F401
AzureCogsFormRecognizerTool,
AzureCogsImageAnalysisTool,
AzureCogsSpeech2TextTool,
AzureCogsText2SpeechTool,
AzureCogsTextAnalyticsHealthTool,
)
from langchain_community.tools.bearly.tool import (
BearlyInterpreterTool, # noqa: F401
BearlyInterpreterTool,
)
from langchain_community.tools.bing_search.tool import (
BingSearchResults, # noqa: F401
BingSearchRun, # noqa: F401
BingSearchResults,
BingSearchRun,
)
from langchain_community.tools.brave_search.tool import (
BraveSearch, # noqa: F401
BraveSearch,
)
from langchain_community.tools.cassandra_database.tool import (
GetSchemaCassandraDatabaseTool, # noqa: F401
@ -76,243 +76,243 @@ if TYPE_CHECKING:
QueryCassandraDatabaseTool, # noqa: F401
)
from langchain_community.tools.cogniswitch.tool import (
CogniswitchKnowledgeRequest, # noqa: F401
CogniswitchKnowledgeSourceFile, # noqa: F401
CogniswitchKnowledgeSourceURL, # noqa: F401
CogniswitchKnowledgeStatus, # noqa: F401
CogniswitchKnowledgeRequest,
CogniswitchKnowledgeSourceFile,
CogniswitchKnowledgeSourceURL,
CogniswitchKnowledgeStatus,
)
from langchain_community.tools.connery import (
ConneryAction, # noqa: F401
ConneryAction,
)
from langchain_community.tools.convert_to_openai import (
format_tool_to_openai_function, # noqa: F401
format_tool_to_openai_function,
)
from langchain_community.tools.ddg_search.tool import (
DuckDuckGoSearchResults, # noqa: F401
DuckDuckGoSearchRun, # noqa: F401
DuckDuckGoSearchResults,
DuckDuckGoSearchRun,
)
from langchain_community.tools.e2b_data_analysis.tool import (
E2BDataAnalysisTool, # noqa: F401
E2BDataAnalysisTool,
)
from langchain_community.tools.edenai import (
EdenAiExplicitImageTool, # noqa: F401
EdenAiObjectDetectionTool, # noqa: F401
EdenAiParsingIDTool, # noqa: F401
EdenAiParsingInvoiceTool, # noqa: F401
EdenAiSpeechToTextTool, # noqa: F401
EdenAiTextModerationTool, # noqa: F401
EdenAiTextToSpeechTool, # noqa: F401
EdenaiTool, # noqa: F401
EdenAiExplicitImageTool,
EdenAiObjectDetectionTool,
EdenAiParsingIDTool,
EdenAiParsingInvoiceTool,
EdenAiSpeechToTextTool,
EdenAiTextModerationTool,
EdenAiTextToSpeechTool,
EdenaiTool,
)
from langchain_community.tools.eleven_labs.text2speech import (
ElevenLabsText2SpeechTool, # noqa: F401
ElevenLabsText2SpeechTool,
)
from langchain_community.tools.file_management import (
CopyFileTool, # noqa: F401
DeleteFileTool, # noqa: F401
FileSearchTool, # noqa: F401
ListDirectoryTool, # noqa: F401
MoveFileTool, # noqa: F401
ReadFileTool, # noqa: F401
WriteFileTool, # noqa: F401
CopyFileTool,
DeleteFileTool,
FileSearchTool,
ListDirectoryTool,
MoveFileTool,
ReadFileTool,
WriteFileTool,
)
from langchain_community.tools.gmail import (
GmailCreateDraft, # noqa: F401
GmailGetMessage, # noqa: F401
GmailGetThread, # noqa: F401
GmailSearch, # noqa: F401
GmailSendMessage, # noqa: F401
GmailCreateDraft,
GmailGetMessage,
GmailGetThread,
GmailSearch,
GmailSendMessage,
)
from langchain_community.tools.google_cloud.texttospeech import (
GoogleCloudTextToSpeechTool, # noqa: F401
GoogleCloudTextToSpeechTool,
)
from langchain_community.tools.google_places.tool import (
GooglePlacesTool, # noqa: F401
GooglePlacesTool,
)
from langchain_community.tools.google_search.tool import (
GoogleSearchResults, # noqa: F401
GoogleSearchRun, # noqa: F401
GoogleSearchResults,
GoogleSearchRun,
)
from langchain_community.tools.google_serper.tool import (
GoogleSerperResults, # noqa: F401
GoogleSerperRun, # noqa: F401
GoogleSerperResults,
GoogleSerperRun,
)
from langchain_community.tools.graphql.tool import (
BaseGraphQLTool, # noqa: F401
BaseGraphQLTool,
)
from langchain_community.tools.human.tool import (
HumanInputRun, # noqa: F401
HumanInputRun,
)
from langchain_community.tools.ifttt import (
IFTTTWebhook, # noqa: F401
IFTTTWebhook,
)
from langchain_community.tools.interaction.tool import (
StdInInquireTool, # noqa: F401
StdInInquireTool,
)
from langchain_community.tools.jira.tool import (
JiraAction, # noqa: F401
JiraAction,
)
from langchain_community.tools.json.tool import (
JsonGetValueTool, # noqa: F401
JsonListKeysTool, # noqa: F401
JsonGetValueTool,
JsonListKeysTool,
)
from langchain_community.tools.merriam_webster.tool import (
MerriamWebsterQueryRun, # noqa: F401
MerriamWebsterQueryRun,
)
from langchain_community.tools.metaphor_search import (
MetaphorSearchResults, # noqa: F401
MetaphorSearchResults,
)
from langchain_community.tools.mojeek_search.tool import (
MojeekSearch, # noqa: F401
MojeekSearch,
)
from langchain_community.tools.nasa.tool import (
NasaAction, # noqa: F401
NasaAction,
)
from langchain_community.tools.office365.create_draft_message import (
O365CreateDraftMessage, # noqa: F401
O365CreateDraftMessage,
)
from langchain_community.tools.office365.events_search import (
O365SearchEvents, # noqa: F401
O365SearchEvents,
)
from langchain_community.tools.office365.messages_search import (
O365SearchEmails, # noqa: F401
O365SearchEmails,
)
from langchain_community.tools.office365.send_event import (
O365SendEvent, # noqa: F401
O365SendEvent,
)
from langchain_community.tools.office365.send_message import (
O365SendMessage, # noqa: F401
O365SendMessage,
)
from langchain_community.tools.office365.utils import (
authenticate, # noqa: F401
authenticate,
)
from langchain_community.tools.openapi.utils.api_models import (
APIOperation, # noqa: F401
APIOperation,
)
from langchain_community.tools.openapi.utils.openapi_utils import (
OpenAPISpec, # noqa: F401
OpenAPISpec,
)
from langchain_community.tools.openweathermap.tool import (
OpenWeatherMapQueryRun, # noqa: F401
OpenWeatherMapQueryRun,
)
from langchain_community.tools.playwright import (
ClickTool, # noqa: F401
CurrentWebPageTool, # noqa: F401
ExtractHyperlinksTool, # noqa: F401
ExtractTextTool, # noqa: F401
GetElementsTool, # noqa: F401
NavigateBackTool, # noqa: F401
NavigateTool, # noqa: F401
ClickTool,
CurrentWebPageTool,
ExtractHyperlinksTool,
ExtractTextTool,
GetElementsTool,
NavigateBackTool,
NavigateTool,
)
from langchain_community.tools.plugin import (
AIPluginTool, # noqa: F401
AIPluginTool,
)
from langchain_community.tools.polygon.aggregates import (
PolygonAggregates, # noqa: F401
PolygonAggregates,
)
from langchain_community.tools.polygon.financials import (
PolygonFinancials, # noqa: F401
PolygonFinancials,
)
from langchain_community.tools.polygon.last_quote import (
PolygonLastQuote, # noqa: F401
PolygonLastQuote,
)
from langchain_community.tools.polygon.ticker_news import (
PolygonTickerNews, # noqa: F401
PolygonTickerNews,
)
from langchain_community.tools.powerbi.tool import (
InfoPowerBITool, # noqa: F401
ListPowerBITool, # noqa: F401
QueryPowerBITool, # noqa: F401
InfoPowerBITool,
ListPowerBITool,
QueryPowerBITool,
)
from langchain_community.tools.pubmed.tool import (
PubmedQueryRun, # noqa: F401
PubmedQueryRun,
)
from langchain_community.tools.reddit_search.tool import (
RedditSearchRun, # noqa: F401
RedditSearchSchema, # noqa: F401
RedditSearchRun,
RedditSearchSchema,
)
from langchain_community.tools.requests.tool import (
BaseRequestsTool, # noqa: F401
RequestsDeleteTool, # noqa: F401
RequestsGetTool, # noqa: F401
RequestsPatchTool, # noqa: F401
RequestsPostTool, # noqa: F401
RequestsPutTool, # noqa: F401
BaseRequestsTool,
RequestsDeleteTool,
RequestsGetTool,
RequestsPatchTool,
RequestsPostTool,
RequestsPutTool,
)
from langchain_community.tools.scenexplain.tool import (
SceneXplainTool, # noqa: F401
SceneXplainTool,
)
from langchain_community.tools.searchapi.tool import (
SearchAPIResults, # noqa: F401
SearchAPIRun, # noqa: F401
SearchAPIResults,
SearchAPIRun,
)
from langchain_community.tools.searx_search.tool import (
SearxSearchResults, # noqa: F401
SearxSearchRun, # noqa: F401
SearxSearchResults,
SearxSearchRun,
)
from langchain_community.tools.shell.tool import (
ShellTool, # noqa: F401
ShellTool,
)
from langchain_community.tools.slack.get_channel import (
SlackGetChannel, # noqa: F401
SlackGetChannel,
)
from langchain_community.tools.slack.get_message import (
SlackGetMessage, # noqa: F401
SlackGetMessage,
)
from langchain_community.tools.slack.schedule_message import (
SlackScheduleMessage, # noqa: F401
SlackScheduleMessage,
)
from langchain_community.tools.slack.send_message import (
SlackSendMessage, # noqa: F401
SlackSendMessage,
)
from langchain_community.tools.sleep.tool import (
SleepTool, # noqa: F401
SleepTool,
)
from langchain_community.tools.spark_sql.tool import (
BaseSparkSQLTool, # noqa: F401
InfoSparkSQLTool, # noqa: F401
ListSparkSQLTool, # noqa: F401
QueryCheckerTool, # noqa: F401
QuerySparkSQLTool, # noqa: F401
BaseSparkSQLTool,
InfoSparkSQLTool,
ListSparkSQLTool,
QueryCheckerTool,
QuerySparkSQLTool,
)
from langchain_community.tools.sql_database.tool import (
BaseSQLDatabaseTool, # noqa: F401
InfoSQLDatabaseTool, # noqa: F401
ListSQLDatabaseTool, # noqa: F401
QuerySQLCheckerTool, # noqa: F401
QuerySQLDataBaseTool, # noqa: F401
BaseSQLDatabaseTool,
InfoSQLDatabaseTool,
ListSQLDatabaseTool,
QuerySQLCheckerTool,
QuerySQLDataBaseTool,
)
from langchain_community.tools.stackexchange.tool import (
StackExchangeTool, # noqa: F401
StackExchangeTool,
)
from langchain_community.tools.steam.tool import (
SteamWebAPIQueryRun, # noqa: F401
SteamWebAPIQueryRun,
)
from langchain_community.tools.steamship_image_generation import (
SteamshipImageGenerationTool, # noqa: F401
SteamshipImageGenerationTool,
)
from langchain_community.tools.vectorstore.tool import (
VectorStoreQATool, # noqa: F401
VectorStoreQAWithSourcesTool, # noqa: F401
VectorStoreQATool,
VectorStoreQAWithSourcesTool,
)
from langchain_community.tools.wikipedia.tool import (
WikipediaQueryRun, # noqa: F401
WikipediaQueryRun,
)
from langchain_community.tools.wolfram_alpha.tool import (
WolframAlphaQueryRun, # noqa: F401
WolframAlphaQueryRun,
)
from langchain_community.tools.yahoo_finance_news import (
YahooFinanceNewsTool, # noqa: F401
YahooFinanceNewsTool,
)
from langchain_community.tools.you.tool import (
YouSearchTool, # noqa: F401
YouSearchTool,
)
from langchain_community.tools.youtube.search import (
YouTubeSearchTool, # noqa: F401
YouTubeSearchTool,
)
from langchain_community.tools.zapier.tool import (
ZapierNLAListActions, # noqa: F401
ZapierNLARunAction, # noqa: F401
ZapierNLAListActions,
ZapierNLARunAction,
)
__all__ = [

@ -23,8 +23,8 @@ def import_google() -> Tuple[Request, Credentials]:
"""
# google-auth-httplib2
try:
from google.auth.transport.requests import Request # noqa: F401
from google.oauth2.credentials import Credentials # noqa: F401
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
except ImportError:
raise ImportError(
"You need to install google-auth-httplib2 to use this toolkit. "

@ -26,8 +26,8 @@ def lazy_import_playwright_browsers() -> Tuple[Type[AsyncBrowser], Type[SyncBrow
AsyncBrowser and SyncBrowser classes.
"""
try:
from playwright.async_api import Browser as AsyncBrowser # noqa: F401
from playwright.sync_api import Browser as SyncBrowser # noqa: F401
from playwright.async_api import Browser as AsyncBrowser
from playwright.sync_api import Browser as SyncBrowser
except ImportError:
raise ImportError(
"The 'playwright' package is required to use the playwright tools."

@ -8,161 +8,161 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_community.utilities.alpha_vantage import (
AlphaVantageAPIWrapper, # noqa: F401
AlphaVantageAPIWrapper,
)
from langchain_community.utilities.apify import (
ApifyWrapper, # noqa: F401
ApifyWrapper,
)
from langchain_community.utilities.arcee import (
ArceeWrapper, # noqa: F401
ArceeWrapper,
)
from langchain_community.utilities.arxiv import (
ArxivAPIWrapper, # noqa: F401
ArxivAPIWrapper,
)
from langchain_community.utilities.awslambda import (
LambdaWrapper, # noqa: F401
LambdaWrapper,
)
from langchain_community.utilities.bibtex import (
BibtexparserWrapper, # noqa: F401
BibtexparserWrapper,
)
from langchain_community.utilities.bing_search import (
BingSearchAPIWrapper, # noqa: F401
BingSearchAPIWrapper,
)
from langchain_community.utilities.brave_search import (
BraveSearchWrapper, # noqa: F401
BraveSearchWrapper,
)
from langchain_community.utilities.dria_index import (
DriaAPIWrapper, # noqa: F401
DriaAPIWrapper,
)
from langchain_community.utilities.duckduckgo_search import (
DuckDuckGoSearchAPIWrapper, # noqa: F401
DuckDuckGoSearchAPIWrapper,
)
from langchain_community.utilities.golden_query import (
GoldenQueryAPIWrapper, # noqa: F401
GoldenQueryAPIWrapper,
)
from langchain_community.utilities.google_finance import (
GoogleFinanceAPIWrapper, # noqa: F401
GoogleFinanceAPIWrapper,
)
from langchain_community.utilities.google_jobs import (
GoogleJobsAPIWrapper, # noqa: F401
GoogleJobsAPIWrapper,
)
from langchain_community.utilities.google_lens import (
GoogleLensAPIWrapper, # noqa: F401
GoogleLensAPIWrapper,
)
from langchain_community.utilities.google_places_api import (
GooglePlacesAPIWrapper, # noqa: F401
GooglePlacesAPIWrapper,
)
from langchain_community.utilities.google_scholar import (
GoogleScholarAPIWrapper, # noqa: F401
GoogleScholarAPIWrapper,
)
from langchain_community.utilities.google_search import (
GoogleSearchAPIWrapper, # noqa: F401
GoogleSearchAPIWrapper,
)
from langchain_community.utilities.google_serper import (
GoogleSerperAPIWrapper, # noqa: F401
GoogleSerperAPIWrapper,
)
from langchain_community.utilities.google_trends import (
GoogleTrendsAPIWrapper, # noqa: F401
GoogleTrendsAPIWrapper,
)
from langchain_community.utilities.graphql import (
GraphQLAPIWrapper, # noqa: F401
GraphQLAPIWrapper,
)
from langchain_community.utilities.infobip import (
InfobipAPIWrapper, # noqa: F401
InfobipAPIWrapper,
)
from langchain_community.utilities.jira import (
JiraAPIWrapper, # noqa: F401
JiraAPIWrapper,
)
from langchain_community.utilities.max_compute import (
MaxComputeAPIWrapper, # noqa: F401
MaxComputeAPIWrapper,
)
from langchain_community.utilities.merriam_webster import (
MerriamWebsterAPIWrapper, # noqa: F401
MerriamWebsterAPIWrapper,
)
from langchain_community.utilities.metaphor_search import (
MetaphorSearchAPIWrapper, # noqa: F401
MetaphorSearchAPIWrapper,
)
from langchain_community.utilities.mojeek_search import (
MojeekSearchAPIWrapper, # noqa: F401
MojeekSearchAPIWrapper,
)
from langchain_community.utilities.nasa import (
NasaAPIWrapper, # noqa: F401
NasaAPIWrapper,
)
from langchain_community.utilities.nvidia_riva import (
AudioStream, # noqa: F401
NVIDIARivaASR, # noqa: F401
NVIDIARivaStream, # noqa: F401
NVIDIARivaTTS, # noqa: F401
RivaASR, # noqa: F401
RivaTTS, # noqa: F401
AudioStream,
NVIDIARivaASR,
NVIDIARivaStream,
NVIDIARivaTTS,
RivaASR,
RivaTTS,
)
from langchain_community.utilities.openweathermap import (
OpenWeatherMapAPIWrapper, # noqa: F401
OpenWeatherMapAPIWrapper,
)
from langchain_community.utilities.outline import (
OutlineAPIWrapper, # noqa: F401
OutlineAPIWrapper,
)
from langchain_community.utilities.passio_nutrition_ai import (
NutritionAIAPI, # noqa: F401
NutritionAIAPI,
)
from langchain_community.utilities.portkey import (
Portkey, # noqa: F401
Portkey,
)
from langchain_community.utilities.powerbi import (
PowerBIDataset, # noqa: F401
PowerBIDataset,
)
from langchain_community.utilities.pubmed import (
PubMedAPIWrapper, # noqa: F401
PubMedAPIWrapper,
)
from langchain_community.utilities.python import (
PythonREPL, # noqa: F401
PythonREPL,
)
from langchain_community.utilities.requests import (
Requests, # noqa: F401
RequestsWrapper, # noqa: F401
TextRequestsWrapper, # noqa: F401
Requests,
RequestsWrapper,
TextRequestsWrapper,
)
from langchain_community.utilities.scenexplain import (
SceneXplainAPIWrapper, # noqa: F401
SceneXplainAPIWrapper,
)
from langchain_community.utilities.searchapi import (
SearchApiAPIWrapper, # noqa: F401
SearchApiAPIWrapper,
)
from langchain_community.utilities.searx_search import (
SearxSearchWrapper, # noqa: F401
SearxSearchWrapper,
)
from langchain_community.utilities.serpapi import (
SerpAPIWrapper, # noqa: F401
SerpAPIWrapper,
)
from langchain_community.utilities.spark_sql import (
SparkSQL, # noqa: F401
SparkSQL,
)
from langchain_community.utilities.sql_database import (
SQLDatabase, # noqa: F401
SQLDatabase,
)
from langchain_community.utilities.stackexchange import (
StackExchangeAPIWrapper, # noqa: F401
StackExchangeAPIWrapper,
)
from langchain_community.utilities.steam import (
SteamWebAPIWrapper, # noqa: F401
SteamWebAPIWrapper,
)
from langchain_community.utilities.tensorflow_datasets import (
TensorflowDatasets, # noqa: F401
TensorflowDatasets,
)
from langchain_community.utilities.twilio import (
TwilioAPIWrapper, # noqa: F401
TwilioAPIWrapper,
)
from langchain_community.utilities.wikipedia import (
WikipediaAPIWrapper, # noqa: F401
WikipediaAPIWrapper,
)
from langchain_community.utilities.wolfram_alpha import (
WolframAlphaAPIWrapper, # noqa: F401
WolframAlphaAPIWrapper,
)
from langchain_community.utilities.you import (
YouSearchAPIWrapper, # noqa: F401
YouSearchAPIWrapper,
)
from langchain_community.utilities.zapier import (
ZapierNLAWrapper, # noqa: F401
ZapierNLAWrapper,
)
__all__ = [

@ -451,7 +451,7 @@ class CassandraDatabase:
# If a session is not provided, create one using cassio if available
# dynamically import cassio to avoid circular imports
try:
import cassio.config # noqa: F401
import cassio.config
except ImportError:
raise ValueError(
"cassio package not found, please install with" " `pip install cassio`"

@ -24,267 +24,267 @@ from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from langchain_core.vectorstores import (
VectorStore, # noqa: F401
VectorStore,
)
from langchain_community.vectorstores.alibabacloud_opensearch import (
AlibabaCloudOpenSearch, # noqa: F401
AlibabaCloudOpenSearchSettings, # noqa: F401
AlibabaCloudOpenSearch,
AlibabaCloudOpenSearchSettings,
)
from langchain_community.vectorstores.analyticdb import (
AnalyticDB, # noqa: F401
AnalyticDB,
)
from langchain_community.vectorstores.annoy import (
Annoy, # noqa: F401
Annoy,
)
from langchain_community.vectorstores.apache_doris import (
ApacheDoris, # noqa: F401
ApacheDoris,
)
from langchain_community.vectorstores.astradb import (
AstraDB, # noqa: F401
AstraDB,
)
from langchain_community.vectorstores.atlas import (
AtlasDB, # noqa: F401
AtlasDB,
)
from langchain_community.vectorstores.awadb import (
AwaDB, # noqa: F401
AwaDB,
)
from langchain_community.vectorstores.azure_cosmos_db import (
AzureCosmosDBVectorSearch, # noqa: F401
AzureCosmosDBVectorSearch,
)
from langchain_community.vectorstores.azuresearch import (
AzureSearch, # noqa: F401
AzureSearch,
)
from langchain_community.vectorstores.bageldb import (
Bagel, # noqa: F401
Bagel,
)
from langchain_community.vectorstores.baiducloud_vector_search import (
BESVectorStore, # noqa: F401
BESVectorStore,
)
from langchain_community.vectorstores.baiduvectordb import (
BaiduVectorDB, # noqa: F401
BaiduVectorDB,
)
from langchain_community.vectorstores.bigquery_vector_search import (
BigQueryVectorSearch, # noqa: F401
BigQueryVectorSearch,
)
from langchain_community.vectorstores.cassandra import (
Cassandra, # noqa: F401
Cassandra,
)
from langchain_community.vectorstores.chroma import (
Chroma, # noqa: F401
Chroma,
)
from langchain_community.vectorstores.clarifai import (
Clarifai, # noqa: F401
Clarifai,
)
from langchain_community.vectorstores.clickhouse import (
Clickhouse, # noqa: F401
ClickhouseSettings, # noqa: F401
Clickhouse,
ClickhouseSettings,
)
from langchain_community.vectorstores.couchbase import (
CouchbaseVectorStore, # noqa: F401
CouchbaseVectorStore,
)
from langchain_community.vectorstores.dashvector import (
DashVector, # noqa: F401
DashVector,
)
from langchain_community.vectorstores.databricks_vector_search import (
DatabricksVectorSearch, # noqa: F401
DatabricksVectorSearch,
)
from langchain_community.vectorstores.deeplake import (
DeepLake, # noqa: F401
DeepLake,
)
from langchain_community.vectorstores.dingo import (
Dingo, # noqa: F401
Dingo,
)
from langchain_community.vectorstores.docarray import (
DocArrayHnswSearch, # noqa: F401
DocArrayInMemorySearch, # noqa: F401
DocArrayHnswSearch,
DocArrayInMemorySearch,
)
from langchain_community.vectorstores.documentdb import (
DocumentDBVectorSearch, # noqa: F401
DocumentDBVectorSearch,
)
from langchain_community.vectorstores.duckdb import (
DuckDB, # noqa: F401
DuckDB,
)
from langchain_community.vectorstores.ecloud_vector_search import (
EcloudESVectorStore, # noqa: F401
EcloudESVectorStore,
)
from langchain_community.vectorstores.elastic_vector_search import (
ElasticKnnSearch, # noqa: F401
ElasticVectorSearch, # noqa: F401
ElasticKnnSearch,
ElasticVectorSearch,
)
from langchain_community.vectorstores.elasticsearch import (
ElasticsearchStore, # noqa: F401
ElasticsearchStore,
)
from langchain_community.vectorstores.epsilla import (
Epsilla, # noqa: F401
Epsilla,
)
from langchain_community.vectorstores.faiss import (
FAISS, # noqa: F401
FAISS,
)
from langchain_community.vectorstores.hanavector import (
HanaDB, # noqa: F401
HanaDB,
)
from langchain_community.vectorstores.hologres import (
Hologres, # noqa: F401
Hologres,
)
from langchain_community.vectorstores.infinispanvs import (
InfinispanVS, # noqa: F401
InfinispanVS,
)
from langchain_community.vectorstores.inmemory import (
InMemoryVectorStore, # noqa: F401
InMemoryVectorStore,
)
from langchain_community.vectorstores.kdbai import (
KDBAI, # noqa: F401
KDBAI,
)
from langchain_community.vectorstores.kinetica import (
DistanceStrategy, # noqa: F401
Kinetica, # noqa: F401
KineticaSettings, # noqa: F401
DistanceStrategy,
Kinetica,
KineticaSettings,
)
from langchain_community.vectorstores.lancedb import (
LanceDB, # noqa: F401
LanceDB,
)
from langchain_community.vectorstores.lantern import (
Lantern, # noqa: F401
Lantern,
)
from langchain_community.vectorstores.llm_rails import (
LLMRails, # noqa: F401
LLMRails,
)
from langchain_community.vectorstores.marqo import (
Marqo, # noqa: F401
Marqo,
)
from langchain_community.vectorstores.matching_engine import (
MatchingEngine, # noqa: F401
MatchingEngine,
)
from langchain_community.vectorstores.meilisearch import (
Meilisearch, # noqa: F401
Meilisearch,
)
from langchain_community.vectorstores.milvus import (
Milvus, # noqa: F401
Milvus,
)
from langchain_community.vectorstores.momento_vector_index import (
MomentoVectorIndex, # noqa: F401
MomentoVectorIndex,
)
from langchain_community.vectorstores.mongodb_atlas import (
MongoDBAtlasVectorSearch, # noqa: F401
MongoDBAtlasVectorSearch,
)
from langchain_community.vectorstores.myscale import (
MyScale, # noqa: F401
MyScaleSettings, # noqa: F401
MyScale,
MyScaleSettings,
)
from langchain_community.vectorstores.neo4j_vector import (
Neo4jVector, # noqa: F401
Neo4jVector,
)
from langchain_community.vectorstores.opensearch_vector_search import (
OpenSearchVectorSearch, # noqa: F401
OpenSearchVectorSearch,
)
from langchain_community.vectorstores.pathway import (
PathwayVectorClient, # noqa: F401
PathwayVectorClient,
)
from langchain_community.vectorstores.pgembedding import (
PGEmbedding, # noqa: F401
PGEmbedding,
)
from langchain_community.vectorstores.pgvector import (
PGVector, # noqa: F401
PGVector,
)
from langchain_community.vectorstores.pinecone import (
Pinecone, # noqa: F401
Pinecone,
)
from langchain_community.vectorstores.qdrant import (
Qdrant, # noqa: F401
Qdrant,
)
from langchain_community.vectorstores.redis import (
Redis, # noqa: F401
Redis,
)
from langchain_community.vectorstores.relyt import (
Relyt, # noqa: F401
Relyt,
)
from langchain_community.vectorstores.rocksetdb import (
Rockset, # noqa: F401
Rockset,
)
from langchain_community.vectorstores.scann import (
ScaNN, # noqa: F401
ScaNN,
)
from langchain_community.vectorstores.semadb import (
SemaDB, # noqa: F401
SemaDB,
)
from langchain_community.vectorstores.singlestoredb import (
SingleStoreDB, # noqa: F401
SingleStoreDB,
)
from langchain_community.vectorstores.sklearn import (
SKLearnVectorStore, # noqa: F401
SKLearnVectorStore,
)
from langchain_community.vectorstores.sqlitevss import (
SQLiteVSS, # noqa: F401
SQLiteVSS,
)
from langchain_community.vectorstores.starrocks import (
StarRocks, # noqa: F401
StarRocks,
)
from langchain_community.vectorstores.supabase import (
SupabaseVectorStore, # noqa: F401
SupabaseVectorStore,
)
from langchain_community.vectorstores.surrealdb import (
SurrealDBStore, # noqa: F401
SurrealDBStore,
)
from langchain_community.vectorstores.tair import (
Tair, # noqa: F401
Tair,
)
from langchain_community.vectorstores.tencentvectordb import (
TencentVectorDB, # noqa: F401
TencentVectorDB,
)
from langchain_community.vectorstores.thirdai_neuraldb import (
NeuralDBVectorStore, # noqa: F401
NeuralDBVectorStore,
)
from langchain_community.vectorstores.tidb_vector import (
TiDBVectorStore, # noqa: F401
TiDBVectorStore,
)
from langchain_community.vectorstores.tigris import (
Tigris, # noqa: F401
Tigris,
)
from langchain_community.vectorstores.tiledb import (
TileDB, # noqa: F401
TileDB,
)
from langchain_community.vectorstores.timescalevector import (
TimescaleVector, # noqa: F401
TimescaleVector,
)
from langchain_community.vectorstores.typesense import (
Typesense, # noqa: F401
Typesense,
)
from langchain_community.vectorstores.upstash import (
UpstashVectorStore, # noqa: F401
UpstashVectorStore,
)
from langchain_community.vectorstores.usearch import (
USearch, # noqa: F401
USearch,
)
from langchain_community.vectorstores.vald import (
Vald, # noqa: F401
Vald,
)
from langchain_community.vectorstores.vdms import (
VDMS, # noqa: F401
VDMS,
)
from langchain_community.vectorstores.vearch import (
Vearch, # noqa: F401
Vearch,
)
from langchain_community.vectorstores.vectara import (
Vectara, # noqa: F401
Vectara,
)
from langchain_community.vectorstores.vespa import (
VespaStore, # noqa: F401
VespaStore,
)
from langchain_community.vectorstores.vlite import (
VLite, # noqa: F401
VLite,
)
from langchain_community.vectorstores.weaviate import (
Weaviate, # noqa: F401
Weaviate,
)
from langchain_community.vectorstores.yellowbrick import (
Yellowbrick, # noqa: F401
Yellowbrick,
)
from langchain_community.vectorstores.zep import (
ZepVectorStore, # noqa: F401
ZepVectorStore,
)
from langchain_community.vectorstores.zilliz import (
Zilliz, # noqa: F401
Zilliz,
)
__all__ = [

@ -27,7 +27,7 @@ class XataVectorStore(VectorStore):
) -> None:
"""Initialize with Xata client."""
try:
from xata.client import XataClient # noqa: F401
from xata.client import XataClient
except ImportError:
raise ImportError(
"Could not import xata python package. "

@ -81,7 +81,7 @@ class LLMMathChain(Chain):
return [self.output_key]
def _evaluate_expression(self, expression: str) -> str:
import numexpr # noqa: F401
import numexpr
try:
local_dict = {"pi": math.pi, "e": math.e}

@ -46,7 +46,7 @@ class JsonEditDistanceEvaluator(StringEvaluator):
self._string_distance = string_distance
else:
try:
from rapidfuzz import distance as rfd # noqa: F401
from rapidfuzz import distance as rfd
except ImportError:
raise ImportError(
"The default string_distance operator for the "

@ -74,7 +74,7 @@ class JsonSchemaEvaluator(StringEvaluator):
return node
def _validate(self, prediction: Any, schema: Any) -> dict:
from jsonschema import ValidationError, validate # noqa: F401
from jsonschema import ValidationError, validate
try:
validate(instance=prediction, schema=schema)

@ -275,7 +275,7 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
# If tiktoken flag set to False
if not self.tiktoken_enabled:
try:
from transformers import AutoTokenizer # noqa: F401
from transformers import AutoTokenizer
except ImportError:
raise ValueError(
"Could not import transformers python package. "

Loading…
Cancel
Save