langchain[patch]: remove unused imports (#14680)

Co-authored-by: Bagatur <baskaryan@gmail.com>
pull/14784/head
Harrison Chase 5 months ago committed by GitHub
parent a0064330b1
commit 16399fd61d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,71 @@
import os
from langchain_community.graphs import Neo4jGraph
from langchain_community.graphs.neo4j_graph import (
node_properties_query,
rel_properties_query,
rel_query,
)
def test_cypher_return_correct_schema() -> None:
"""Test that chain returns direct results."""
url = os.environ.get("NEO4J_URI")
username = os.environ.get("NEO4J_USERNAME")
password = os.environ.get("NEO4J_PASSWORD")
assert url is not None
assert username is not None
assert password is not None
graph = Neo4jGraph(
url=url,
username=username,
password=password,
)
# Delete all nodes in the graph
graph.query("MATCH (n) DETACH DELETE n")
# Create two nodes and a relationship
graph.query(
"""
CREATE (la:LabelA {property_a: 'a'})
CREATE (lb:LabelB)
CREATE (lc:LabelC)
MERGE (la)-[:REL_TYPE]-> (lb)
MERGE (la)-[:REL_TYPE {rel_prop: 'abc'}]-> (lc)
"""
)
# Refresh schema information
graph.refresh_schema()
node_properties = graph.query(node_properties_query)
relationships_properties = graph.query(rel_properties_query)
relationships = graph.query(rel_query)
expected_node_properties = [
{
"output": {
"properties": [{"property": "property_a", "type": "STRING"}],
"labels": "LabelA",
}
}
]
expected_relationships_properties = [
{
"output": {
"type": "REL_TYPE",
"properties": [{"property": "rel_prop", "type": "STRING"}],
}
}
]
expected_relationships = [
{"output": {"start": "LabelA", "type": "REL_TYPE", "end": "LabelB"}},
{"output": {"start": "LabelA", "type": "REL_TYPE", "end": "LabelC"}},
]
assert node_properties == expected_node_properties
assert relationships_properties == expected_relationships_properties
# Order is not guaranteed with Neo4j returns
assert (
sorted(relationships, key=lambda x: x["output"]["end"])
== expected_relationships
)

@ -7,9 +7,6 @@ from langchain_community.adapters.openai import (
ChoiceChunk,
Completions,
IndexableBaseModel,
_convert_message_chunk,
_convert_message_chunk_to_delta,
_has_assistant_message,
chat,
convert_dict_to_message,
convert_message_to_dict,
@ -26,10 +23,7 @@ __all__ = [
"convert_dict_to_message",
"convert_message_to_dict",
"convert_openai_messages",
"_convert_message_chunk",
"_convert_message_chunk_to_delta",
"ChatCompletion",
"_has_assistant_message",
"convert_messages_for_finetuning",
"Completions",
"Chat",

@ -1,6 +1,5 @@
from langchain_community.agent_toolkits.file_management.toolkit import (
_FILE_TOOLS,
FileManagementToolkit,
)
__all__ = ["_FILE_TOOLS", "FileManagementToolkit"]
__all__ = ["FileManagementToolkit"]

@ -5,25 +5,15 @@ from langchain_community.agent_toolkits.openapi.planner import (
RequestsPatchToolWithParsing,
RequestsPostToolWithParsing,
RequestsPutToolWithParsing,
_create_api_controller_agent,
_create_api_controller_tool,
_create_api_planner_tool,
_get_default_llm_chain,
_get_default_llm_chain_factory,
create_openapi_agent,
)
__all__ = [
"MAX_RESPONSE_LENGTH",
"_get_default_llm_chain",
"_get_default_llm_chain_factory",
"RequestsGetToolWithParsing",
"RequestsPostToolWithParsing",
"RequestsPatchToolWithParsing",
"RequestsPutToolWithParsing",
"RequestsDeleteToolWithParsing",
"_create_api_planner_tool",
"_create_api_controller_agent",
"_create_api_controller_tool",
"create_openapi_agent",
]

@ -20,7 +20,7 @@ class OpenAIToolAgentAction(AgentActionMessageLog):
def parse_ai_message_to_openai_tool_action(
message: BaseMessage
message: BaseMessage,
) -> Union[List[AgentAction], AgentFinish]:
"""Parse an AI message potentially containing tool_calls."""
if not isinstance(message, AIMessage):

@ -1,17 +1,6 @@
from langchain_community.cache import (
ASTRA_DB_CACHE_DEFAULT_COLLECTION_NAME,
ASTRA_DB_SEMANTIC_CACHE_DEFAULT_THRESHOLD,
ASTRA_DB_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE,
CASSANDRA_CACHE_DEFAULT_TABLE_NAME,
CASSANDRA_CACHE_DEFAULT_TTL_SECONDS,
CASSANDRA_SEMANTIC_CACHE_DEFAULT_DISTANCE_METRIC,
CASSANDRA_SEMANTIC_CACHE_DEFAULT_SCORE_THRESHOLD,
CASSANDRA_SEMANTIC_CACHE_DEFAULT_TABLE_NAME,
CASSANDRA_SEMANTIC_CACHE_DEFAULT_TTL_SECONDS,
CASSANDRA_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE,
AstraDBCache,
AstraDBSemanticCache,
Base,
CassandraCache,
CassandraSemanticCache,
FullLLMCache,
@ -25,23 +14,10 @@ from langchain_community.cache import (
SQLAlchemyMd5Cache,
SQLiteCache,
UpstashRedisCache,
_dump_generations_to_json,
_dumps_generations,
_ensure_cache_exists,
_hash,
_load_generations_from_json,
_loads_generations,
_validate_ttl,
)
__all__ = [
"_hash",
"_dump_generations_to_json",
"_load_generations_from_json",
"_dumps_generations",
"_loads_generations",
"InMemoryCache",
"Base",
"FullLLMCache",
"SQLAlchemyCache",
"SQLiteCache",
@ -49,24 +25,11 @@ __all__ = [
"RedisCache",
"RedisSemanticCache",
"GPTCache",
"_ensure_cache_exists",
"_validate_ttl",
"MomentoCache",
"CASSANDRA_CACHE_DEFAULT_TABLE_NAME",
"CASSANDRA_CACHE_DEFAULT_TTL_SECONDS",
"CassandraCache",
"CASSANDRA_SEMANTIC_CACHE_DEFAULT_DISTANCE_METRIC",
"CASSANDRA_SEMANTIC_CACHE_DEFAULT_SCORE_THRESHOLD",
"CASSANDRA_SEMANTIC_CACHE_DEFAULT_TABLE_NAME",
"CASSANDRA_SEMANTIC_CACHE_DEFAULT_TTL_SECONDS",
"CASSANDRA_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE",
"CassandraSemanticCache",
"FullMd5LLMCache",
"SQLAlchemyMd5Cache",
"ASTRA_DB_CACHE_DEFAULT_COLLECTION_NAME",
"AstraDBCache",
"ASTRA_DB_SEMANTIC_CACHE_DEFAULT_THRESHOLD",
"ASTRA_DB_CACHE_DEFAULT_COLLECTION_NAME",
"ASTRA_DB_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE",
"AstraDBSemanticCache",
]

@ -1,19 +1,7 @@
from langchain_community.callbacks.arthur_callback import (
COMPLETION_TOKENS,
DURATION,
FINISH_REASON,
PROMPT_TOKENS,
TOKEN_USAGE,
ArthurCallbackHandler,
_lazy_load_arthur,
)
__all__ = [
"PROMPT_TOKENS",
"COMPLETION_TOKENS",
"TOKEN_USAGE",
"FINISH_REASON",
"DURATION",
"_lazy_load_arthur",
"ArthurCallbackHandler",
]

@ -1,6 +1,5 @@
from langchain_community.callbacks.clearml_callback import (
ClearMLCallbackHandler,
import_clearml,
)
__all__ = ["import_clearml", "ClearMLCallbackHandler"]
__all__ = ["ClearMLCallbackHandler"]

@ -1,17 +1,7 @@
from langchain_community.callbacks.comet_ml_callback import (
LANGCHAIN_MODEL_NAME,
CometCallbackHandler,
_fetch_text_complexity_metrics,
_get_experiment,
_summarize_metrics_for_generated_outputs,
import_comet_ml,
)
__all__ = [
"LANGCHAIN_MODEL_NAME",
"import_comet_ml",
"_get_experiment",
"_fetch_text_complexity_metrics",
"_summarize_metrics_for_generated_outputs",
"CometCallbackHandler",
]

@ -1,6 +1,5 @@
from langchain_community.callbacks.context_callback import (
ContextCallbackHandler,
import_context,
)
__all__ = ["import_context", "ContextCallbackHandler"]
__all__ = ["ContextCallbackHandler"]

@ -1,7 +1,5 @@
from langchain_community.callbacks.flyte_callback import (
FlyteCallbackHandler,
analyze_text,
import_flytekit,
)
__all__ = ["import_flytekit", "analyze_text", "FlyteCallbackHandler"]
__all__ = ["FlyteCallbackHandler"]

@ -2,13 +2,9 @@ from langchain_community.callbacks.human import (
AsyncHumanApprovalCallbackHandler,
HumanApprovalCallbackHandler,
HumanRejectedException,
_default_approve,
_default_true,
)
__all__ = [
"_default_approve",
"_default_true",
"HumanRejectedException",
"HumanApprovalCallbackHandler",
"AsyncHumanApprovalCallbackHandler",

@ -1,13 +1,7 @@
from langchain_community.callbacks.infino_callback import (
InfinoCallbackHandler,
get_num_tokens,
import_infino,
import_tiktoken,
)
__all__ = [
"import_infino",
"import_tiktoken",
"get_num_tokens",
"InfinoCallbackHandler",
]

@ -1,35 +1,7 @@
from langchain_community.callbacks.llmonitor_callback import (
DEFAULT_API_URL,
PARAMS_TO_CAPTURE,
LLMonitorCallbackHandler,
UserContextManager,
_get_user_id,
_get_user_props,
_parse_input,
_parse_lc_message,
_parse_lc_messages,
_parse_lc_role,
_parse_output,
_serialize,
identify,
user_ctx,
user_props_ctx,
)
__all__ = [
"DEFAULT_API_URL",
"user_ctx",
"user_props_ctx",
"PARAMS_TO_CAPTURE",
"UserContextManager",
"identify",
"_serialize",
"_parse_input",
"_parse_output",
"_parse_lc_role",
"_get_user_id",
"_get_user_props",
"_parse_lc_message",
"_parse_lc_messages",
"LLMonitorCallbackHandler",
]

@ -3,11 +3,9 @@ from langchain_community.callbacks.mlflow_callback import (
MlflowLogger,
analyze_text,
construct_html_from_prompt_and_generation,
import_mlflow,
)
__all__ = [
"import_mlflow",
"analyze_text",
"construct_html_from_prompt_and_generation",
"MlflowLogger",

@ -1,13 +1,3 @@
from langchain_community.callbacks.openai_info import (
MODEL_COST_PER_1K_TOKENS,
OpenAICallbackHandler,
get_openai_token_cost_for_model,
standardize_model_name,
)
from langchain_community.callbacks.openai_info import OpenAICallbackHandler
__all__ = [
"MODEL_COST_PER_1K_TOKENS",
"standardize_model_name",
"get_openai_token_cost_for_model",
"OpenAICallbackHandler",
]
__all__ = ["OpenAICallbackHandler"]

@ -1,6 +1,5 @@
from langchain_community.callbacks.promptlayer_callback import (
PromptLayerCallbackHandler,
_lazy_import_promptlayer,
)
__all__ = ["_lazy_import_promptlayer", "PromptLayerCallbackHandler"]
__all__ = ["PromptLayerCallbackHandler"]

@ -1,6 +1,5 @@
from langchain_community.callbacks.sagemaker_callback import (
SageMakerCallbackHandler,
save_json,
)
__all__ = ["save_json", "SageMakerCallbackHandler"]
__all__ = ["SageMakerCallbackHandler"]

@ -8,11 +8,9 @@ from langchain_community.callbacks.streamlit.streamlit_callback_handler import (
LLMThoughtState,
StreamlitCallbackHandler,
ToolRecord,
_convert_newlines,
)
__all__ = [
"_convert_newlines",
"CHECKMARK_EMOJI",
"THINKING_EMOJI",
"HISTORY_EMOJI",

@ -1,7 +1,6 @@
from langchain_community.callbacks.tracers.comet import (
CometTracer,
_get_run_type,
import_comet_llm_api,
)
__all__ = ["_get_run_type", "import_comet_llm_api", "CometTracer"]
__all__ = ["import_comet_llm_api", "CometTracer"]

@ -3,12 +3,10 @@ from langchain_community.callbacks.tracers.wandb import (
RunProcessor,
WandbRunArgs,
WandbTracer,
_serialize_io,
)
__all__ = [
"PRINT_WARNINGS",
"_serialize_io",
"RunProcessor",
"WandbRunArgs",
"WandbTracer",

@ -1,6 +1,5 @@
from langchain_community.callbacks.trubrics_callback import (
TrubricsCallbackHandler,
_convert_message_to_dict,
)
__all__ = ["_convert_message_to_dict", "TrubricsCallbackHandler"]
__all__ = ["TrubricsCallbackHandler"]

@ -1,15 +1,7 @@
from langchain_community.callbacks.wandb_callback import (
WandbCallbackHandler,
analyze_text,
construct_html_from_prompt_and_generation,
import_wandb,
load_json_to_dict,
)
__all__ = [
"import_wandb",
"load_json_to_dict",
"analyze_text",
"construct_html_from_prompt_and_generation",
"WandbCallbackHandler",
]

@ -1,7 +1,5 @@
from langchain_community.callbacks.whylabs_callback import (
WhyLabsCallbackHandler,
diagnostic_logger,
import_langkit,
)
__all__ = ["diagnostic_logger", "import_langkit", "WhyLabsCallbackHandler"]
__all__ = ["WhyLabsCallbackHandler"]

@ -136,7 +136,7 @@ def convert_python_function_to_ernie_function(
def convert_to_ernie_function(
function: Union[Dict[str, Any], Type[BaseModel], Callable]
function: Union[Dict[str, Any], Type[BaseModel], Callable],
) -> Dict[str, Any]:
"""Convert a raw function/class to an Ernie function.

@ -140,7 +140,7 @@ def convert_python_function_to_openai_function(
def convert_to_openai_function(
function: Union[Dict[str, Any], Type[BaseModel], Callable]
function: Union[Dict[str, Any], Type[BaseModel], Callable],
) -> Dict[str, Any]:
"""Convert a raw function/class to an OpenAI function.

@ -1,7 +1,5 @@
from langchain_community.chat_loaders.gmail import (
GMailLoader,
_extract_email_content,
_get_message_data,
)
__all__ = ["_extract_email_content", "_get_message_data", "GMailLoader"]
__all__ = ["GMailLoader"]

@ -1,11 +1,9 @@
from langchain_community.chat_models.anthropic import (
ChatAnthropic,
_convert_one_message_to_text,
convert_messages_to_prompt_anthropic,
)
__all__ = [
"_convert_one_message_to_text",
"convert_messages_to_prompt_anthropic",
"ChatAnthropic",
]

@ -1,7 +1,5 @@
from langchain_community.chat_models.anyscale import (
DEFAULT_API_BASE,
DEFAULT_MODEL,
ChatAnyscale,
)
__all__ = ["DEFAULT_API_BASE", "DEFAULT_MODEL", "ChatAnyscale"]
__all__ = ["ChatAnyscale"]

@ -1,17 +1,7 @@
from langchain_community.chat_models.baichuan import (
DEFAULT_API_BASE,
ChatBaichuan,
_convert_delta_to_message_chunk,
_convert_dict_to_message,
_convert_message_to_dict,
_signature,
)
__all__ = [
"DEFAULT_API_BASE",
"_convert_message_to_dict",
"_convert_dict_to_message",
"_convert_delta_to_message_chunk",
"_signature",
"ChatBaichuan",
]

@ -1,7 +1,5 @@
from langchain_community.chat_models.baidu_qianfan_endpoint import (
QianfanChatEndpoint,
_convert_dict_to_message,
convert_message_to_dict,
)
__all__ = ["convert_message_to_dict", "_convert_dict_to_message", "QianfanChatEndpoint"]
__all__ = ["QianfanChatEndpoint"]

@ -1,7 +1,6 @@
from langchain_core.language_models.chat_models import (
BaseChatModel,
SimpleChatModel,
_get_verbosity,
agenerate_from_stream,
generate_from_stream,
)
@ -11,5 +10,4 @@ __all__ = [
"SimpleChatModel",
"generate_from_stream",
"agenerate_from_stream",
"_get_verbosity",
]

@ -1,7 +1,5 @@
from langchain_community.chat_models.cohere import (
ChatCohere,
get_cohere_chat_request,
get_role,
)
__all__ = ["get_role", "get_cohere_chat_request", "ChatCohere"]
__all__ = ["ChatCohere"]

@ -1,3 +1,3 @@
from langchain_community.chat_models.ernie import ErnieBotChat, _convert_message_to_dict
from langchain_community.chat_models.ernie import ErnieBotChat
__all__ = ["_convert_message_to_dict", "ErnieBotChat"]
__all__ = ["ErnieBotChat"]

@ -1,7 +1,5 @@
from langchain_community.chat_models.everlyai import (
DEFAULT_API_BASE,
DEFAULT_MODEL,
ChatEverlyAI,
)
__all__ = ["DEFAULT_API_BASE", "DEFAULT_MODEL", "ChatEverlyAI"]
__all__ = ["ChatEverlyAI"]

@ -1,17 +1,7 @@
from langchain_community.chat_models.fireworks import (
ChatFireworks,
_convert_delta_to_message_chunk,
_create_retry_decorator,
completion_with_retry,
conditional_decorator,
convert_dict_to_message,
)
__all__ = [
"_convert_delta_to_message_chunk",
"convert_dict_to_message",
"ChatFireworks",
"conditional_decorator",
"completion_with_retry",
"_create_retry_decorator",
]

@ -1,7 +1,5 @@
from langchain_community.chat_models.gigachat import (
GigaChat,
_convert_dict_to_message,
_convert_message_to_dict,
)
__all__ = ["_convert_dict_to_message", "_convert_message_to_dict", "GigaChat"]
__all__ = ["GigaChat"]

@ -1,19 +1,6 @@
from langchain_community.chat_models.google_palm import (
ChatGooglePalm,
ChatGooglePalmError,
_create_retry_decorator,
_messages_to_prompt_dict,
_response_to_result,
_truncate_at_stop_tokens,
chat_with_retry,
)
__all__ = [
"ChatGooglePalmError",
"_truncate_at_stop_tokens",
"_response_to_result",
"_messages_to_prompt_dict",
"_create_retry_decorator",
"chat_with_retry",
"ChatGooglePalm",
]
__all__ = ["ChatGooglePalm", "ChatGooglePalmError"]

@ -1,7 +1,5 @@
from langchain_community.chat_models.human import (
HumanInputChatModel,
_collect_yaml_input,
_display_messages,
)
__all__ = ["_display_messages", "_collect_yaml_input", "HumanInputChatModel"]
__all__ = ["HumanInputChatModel"]

@ -1,21 +1,7 @@
from langchain_community.chat_models.hunyuan import (
DEFAULT_API_BASE,
DEFAULT_PATH,
ChatHunyuan,
_convert_delta_to_message_chunk,
_convert_dict_to_message,
_convert_message_to_dict,
_create_chat_result,
_signature,
)
__all__ = [
"DEFAULT_API_BASE",
"DEFAULT_PATH",
"_convert_message_to_dict",
"_convert_dict_to_message",
"_convert_delta_to_message_chunk",
"_signature",
"_create_chat_result",
"ChatHunyuan",
]

@ -3,4 +3,4 @@ from langchain_community.chat_models.javelin_ai_gateway import (
ChatParams,
)
__all__ = ["ChatParams", "ChatJavelinAIGateway"]
__all__ = ["ChatJavelinAIGateway", "ChatParams"]

@ -1,15 +1,7 @@
from langchain_community.chat_models.jinachat import (
JinaChat,
_convert_delta_to_message_chunk,
_convert_dict_to_message,
_convert_message_to_dict,
_create_retry_decorator,
)
__all__ = [
"_create_retry_decorator",
"_convert_delta_to_message_chunk",
"_convert_dict_to_message",
"_convert_message_to_dict",
"JinaChat",
]

@ -1,7 +1,5 @@
from langchain_community.chat_models.konko import (
DEFAULT_API_BASE,
DEFAULT_MODEL,
ChatKonko,
)
__all__ = ["DEFAULT_API_BASE", "DEFAULT_MODEL", "ChatKonko"]
__all__ = ["ChatKonko"]

@ -1,17 +1,3 @@
from langchain_community.chat_models.litellm import (
ChatLiteLLM,
ChatLiteLLMException,
_convert_delta_to_message_chunk,
_convert_dict_to_message,
_convert_message_to_dict,
_create_retry_decorator,
)
from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException
__all__ = [
"ChatLiteLLMException",
"_create_retry_decorator",
"_convert_dict_to_message",
"_convert_delta_to_message_chunk",
"_convert_message_to_dict",
"ChatLiteLLM",
]
__all__ = ["ChatLiteLLM", "ChatLiteLLMException"]

@ -1,6 +1,5 @@
from langchain_community.chat_models.meta import (
_convert_one_message_to_text_llama,
convert_messages_to_prompt_llama,
)
__all__ = ["_convert_one_message_to_text_llama", "convert_messages_to_prompt_llama"]
__all__ = ["convert_messages_to_prompt_llama"]

@ -1,7 +1,5 @@
from langchain_community.chat_models.minimax import (
MiniMaxChat,
_parse_chat_history,
_parse_message,
)
__all__ = ["_parse_message", "_parse_chat_history", "MiniMaxChat"]
__all__ = ["MiniMaxChat"]

@ -3,4 +3,4 @@ from langchain_community.chat_models.mlflow_ai_gateway import (
ChatParams,
)
__all__ = ["ChatParams", "ChatMLflowAIGateway"]
__all__ = ["ChatMLflowAIGateway", "ChatParams"]

@ -1,6 +1,5 @@
from langchain_community.chat_models.ollama import (
ChatOllama,
_stream_response_to_chat_generation_chunk,
)
__all__ = ["_stream_response_to_chat_generation_chunk", "ChatOllama"]
__all__ = ["ChatOllama"]

@ -1,13 +1,7 @@
from langchain_community.chat_models.openai import (
ChatOpenAI,
_convert_delta_to_message_chunk,
_create_retry_decorator,
_import_tiktoken,
)
__all__ = [
"_import_tiktoken",
"_create_retry_decorator",
"_convert_delta_to_message_chunk",
"ChatOpenAI",
]

@ -1,17 +1,7 @@
from langchain_community.chat_models.tongyi import (
ChatTongyi,
_convert_delta_to_message_chunk,
_create_retry_decorator,
_stream_response_to_generation_chunk,
convert_dict_to_message,
convert_message_to_dict,
)
__all__ = [
"convert_dict_to_message",
"convert_message_to_dict",
"_stream_response_to_generation_chunk",
"_create_retry_decorator",
"_convert_delta_to_message_chunk",
"ChatTongyi",
]

@ -1,15 +1,7 @@
from langchain_community.chat_models.vertexai import (
ChatVertexAI,
_ChatHistory,
_get_question,
_parse_chat_history,
_parse_examples,
)
__all__ = [
"_ChatHistory",
"_parse_chat_history",
"_parse_examples",
"_get_question",
"ChatVertexAI",
]

@ -1,7 +1,6 @@
from langchain_community.chat_models.volcengine_maas import (
VolcEngineMaasChat,
_convert_message_to_dict,
convert_dict_to_message,
)
__all__ = ["_convert_message_to_dict", "convert_dict_to_message", "VolcEngineMaasChat"]
__all__ = ["convert_dict_to_message", "VolcEngineMaasChat"]

@ -1,7 +1,5 @@
from langchain_community.chat_models.yandex import (
ChatYandexGPT,
_parse_chat_history,
_parse_message,
)
__all__ = ["_parse_message", "_parse_chat_history", "ChatYandexGPT"]
__all__ = ["ChatYandexGPT"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.arcgis_loader import (
_NOT_PROVIDED,
ArcGISLoader,
)
__all__ = ["_NOT_PROVIDED", "ArcGISLoader"]
__all__ = ["ArcGISLoader"]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.async_html import (
AsyncHtmlLoader,
_build_metadata,
default_header_template,
)
__all__ = ["default_header_template", "_build_metadata", "AsyncHtmlLoader"]
__all__ = ["AsyncHtmlLoader"]

@ -1,17 +1,7 @@
from langchain_community.document_loaders.base_o365 import (
CHUNK_SIZE,
O365BaseLoader,
_FileType,
_O365Settings,
_O365TokenStorage,
fetch_mime_types,
)
__all__ = [
"CHUNK_SIZE",
"_O365Settings",
"_O365TokenStorage",
"_FileType",
"fetch_mime_types",
"O365BaseLoader",
]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.blob_loaders.file_system import (
FileSystemBlobLoader,
T,
_make_iterator,
)
__all__ = ["T", "_make_iterator", "FileSystemBlobLoader"]
__all__ = ["FileSystemBlobLoader"]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.concurrent import (
DEFAULT,
ConcurrentLoader,
_PathLike,
)
__all__ = ["_PathLike", "DEFAULT", "ConcurrentLoader"]
__all__ = ["ConcurrentLoader"]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.directory import (
FILE_LOADER_TYPE,
DirectoryLoader,
_is_visible,
)
__all__ = ["FILE_LOADER_TYPE", "_is_visible", "DirectoryLoader"]
__all__ = ["DirectoryLoader"]

@ -1,25 +1,7 @@
from langchain_community.document_loaders.docugami import (
DEFAULT_API_ENDPOINT,
DOCUMENT_NAME_KEY,
DOCUMENT_SOURCE_KEY,
ID_KEY,
PROJECTS_KEY,
STRUCTURE_KEY,
TABLE_NAME,
TAG_KEY,
XPATH_KEY,
DocugamiLoader,
)
__all__ = [
"TABLE_NAME",
"XPATH_KEY",
"ID_KEY",
"DOCUMENT_SOURCE_KEY",
"DOCUMENT_NAME_KEY",
"STRUCTURE_KEY",
"TAG_KEY",
"PROJECTS_KEY",
"DEFAULT_API_ENDPOINT",
"DocugamiLoader",
]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.generic import (
DEFAULT,
GenericLoader,
_PathLike,
)
__all__ = ["_PathLike", "DEFAULT", "GenericLoader"]
__all__ = ["GenericLoader"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.googledrive import SCOPES, GoogleDriveLoader
from langchain_community.document_loaders.googledrive import GoogleDriveLoader
__all__ = ["SCOPES", "GoogleDriveLoader"]
__all__ = ["GoogleDriveLoader"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.ifixit import IFIXIT_BASE_URL, IFixitLoader
from langchain_community.document_loaders.ifixit import IFixitLoader
__all__ = ["IFIXIT_BASE_URL", "IFixitLoader"]
__all__ = ["IFixitLoader"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.iugu import IUGU_ENDPOINTS, IuguLoader
from langchain_community.document_loaders.iugu import IuguLoader
__all__ = ["IUGU_ENDPOINTS", "IuguLoader"]
__all__ = ["IuguLoader"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.joplin import LINK_NOTE_TEMPLATE, JoplinLoader
from langchain_community.document_loaders.joplin import JoplinLoader
__all__ = ["LINK_NOTE_TEMPLATE", "JoplinLoader"]
__all__ = ["JoplinLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.mastodon import (
MastodonTootsLoader,
_dependable_mastodon_import,
)
__all__ = ["_dependable_mastodon_import", "MastodonTootsLoader"]
__all__ = ["MastodonTootsLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.modern_treasury import (
MODERN_TREASURY_ENDPOINTS,
ModernTreasuryLoader,
)
__all__ = ["MODERN_TREASURY_ENDPOINTS", "ModernTreasuryLoader"]
__all__ = ["ModernTreasuryLoader"]

@ -1,9 +1,5 @@
from langchain_community.document_loaders.notiondb import (
BLOCK_URL,
DATABASE_URL,
NOTION_BASE_URL,
PAGE_URL,
NotionDBLoader,
)
__all__ = ["NOTION_BASE_URL", "DATABASE_URL", "PAGE_URL", "BLOCK_URL", "NotionDBLoader"]
__all__ = ["NotionDBLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.onedrive_file import (
CHUNK_SIZE,
OneDriveFileLoader,
)
__all__ = ["CHUNK_SIZE", "OneDriveFileLoader"]
__all__ = ["OneDriveFileLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.onenote import (
OneNoteLoader,
_OneNoteGraphSettings,
)
__all__ = ["_OneNoteGraphSettings", "OneNoteLoader"]
__all__ = ["OneNoteLoader"]

@ -3,4 +3,4 @@ from langchain_community.document_loaders.parsers.grobid import (
ServerUnavailableException,
)
__all__ = ["ServerUnavailableException", "GrobidParser"]
__all__ = ["GrobidParser", "ServerUnavailableException"]

@ -1,6 +1,4 @@
from langchain_community.document_loaders.parsers.pdf import (
_PDF_FILTER_WITH_LOSS,
_PDF_FILTER_WITHOUT_LOSS,
AmazonTextractPDFParser,
DocumentIntelligenceParser,
PDFMinerParser,
@ -12,8 +10,6 @@ from langchain_community.document_loaders.parsers.pdf import (
)
__all__ = [
"_PDF_FILTER_WITH_LOSS",
"_PDF_FILTER_WITHOUT_LOSS",
"extract_from_images_with_rapidocr",
"PyPDFParser",
"PDFMinerParser",

@ -1,7 +1,5 @@
from langchain_community.document_loaders.parsers.registry import (
_REGISTRY,
_get_default_parser,
get_parser,
)
__all__ = ["_get_default_parser", "_REGISTRY", "get_parser"]
__all__ = ["get_parser"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.quip import _MAXIMUM_TITLE_LENGTH, QuipLoader
from langchain_community.document_loaders.quip import QuipLoader
__all__ = ["_MAXIMUM_TITLE_LENGTH", "QuipLoader"]
__all__ = ["QuipLoader"]

@ -1,13 +1,7 @@
from langchain_community.document_loaders.readthedocs import (
ReadTheDocsLoader,
_get_clean_text,
_get_link_ratio,
_process_element,
)
__all__ = [
"ReadTheDocsLoader",
"_get_clean_text",
"_get_link_ratio",
"_process_element",
]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.recursive_url_loader import (
RecursiveUrlLoader,
_metadata_extractor,
)
__all__ = ["_metadata_extractor", "RecursiveUrlLoader"]
__all__ = ["RecursiveUrlLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.reddit import (
RedditPostsLoader,
_dependable_praw_import,
)
__all__ = ["_dependable_praw_import", "RedditPostsLoader"]
__all__ = ["RedditPostsLoader"]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.rocksetdb import (
ColumnNotFoundError,
RocksetLoader,
default_joiner,
)
__all__ = ["default_joiner", "ColumnNotFoundError", "RocksetLoader"]
__all__ = ["RocksetLoader"]

@ -1,15 +1,7 @@
from langchain_community.document_loaders.sitemap import (
SitemapLoader,
_batch_block,
_default_meta_function,
_default_parsing_function,
_extract_scheme_and_domain,
)
__all__ = [
"_default_parsing_function",
"_default_meta_function",
"_batch_block",
"_extract_scheme_and_domain",
"SitemapLoader",
]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.spreedly import (
SPREEDLY_ENDPOINTS,
SpreedlyLoader,
)
__all__ = ["SPREEDLY_ENDPOINTS", "SpreedlyLoader"]
__all__ = ["SpreedlyLoader"]

@ -1,3 +1,3 @@
from langchain_community.document_loaders.stripe import STRIPE_ENDPOINTS, StripeLoader
from langchain_community.document_loaders.stripe import StripeLoader
__all__ = ["STRIPE_ENDPOINTS", "StripeLoader"]
__all__ = ["StripeLoader"]

@ -1,6 +1,5 @@
from langchain_community.document_loaders.twitter import (
TwitterTweetLoader,
_dependable_tweepy_import,
)
__all__ = ["_dependable_tweepy_import", "TwitterTweetLoader"]
__all__ = ["TwitterTweetLoader"]

@ -1,7 +1,5 @@
from langchain_community.document_loaders.web_base import (
WebBaseLoader,
_build_metadata,
default_header_template,
)
__all__ = ["default_header_template", "_build_metadata", "WebBaseLoader"]
__all__ = ["WebBaseLoader"]

@ -1,19 +1,11 @@
from langchain_community.document_loaders.youtube import (
ALLOWED_NETLOCK,
ALLOWED_SCHEMAS,
SCOPES,
GoogleApiClient,
GoogleApiYoutubeLoader,
YoutubeLoader,
_parse_video_id,
)
__all__ = [
"SCOPES",
"GoogleApiClient",
"ALLOWED_SCHEMAS",
"ALLOWED_NETLOCK",
"_parse_video_id",
"YoutubeLoader",
"GoogleApiYoutubeLoader",
"GoogleApiClient",
]

@ -1,6 +1,5 @@
from langchain_community.document_transformers.beautiful_soup_transformer import (
BeautifulSoupTransformer,
get_navigable_strings,
)
__all__ = ["BeautifulSoupTransformer", "get_navigable_strings"]
__all__ = ["BeautifulSoupTransformer"]

@ -2,18 +2,16 @@ from langchain_community.document_transformers.embeddings_redundant_filter impor
EmbeddingsClusteringFilter,
EmbeddingsRedundantFilter,
_DocumentWithState,
_filter_cluster_embeddings,
_filter_similar_embeddings,
_get_embeddings_from_stateful_docs,
get_stateful_documents,
)
__all__ = [
"EmbeddingsRedundantFilter",
"EmbeddingsClusteringFilter",
"_DocumentWithState",
"get_stateful_documents",
"_filter_similar_embeddings",
"_get_embeddings_from_stateful_docs",
"_filter_cluster_embeddings",
"EmbeddingsRedundantFilter",
"EmbeddingsClusteringFilter",
"_filter_similar_embeddings",
]

@ -1,6 +1,5 @@
from langchain_community.document_transformers.long_context_reorder import (
LongContextReorder,
_litm_reordering,
)
__all__ = ["_litm_reordering", "LongContextReorder"]
__all__ = ["LongContextReorder"]

@ -1,8 +1,5 @@
from langchain_community.embeddings.bookend import (
API_URL,
DEFAULT_TASK,
PATH,
BookendEmbeddings,
)
__all__ = ["API_URL", "DEFAULT_TASK", "PATH", "BookendEmbeddings"]
__all__ = ["BookendEmbeddings"]

@ -1,6 +1,5 @@
from langchain_community.embeddings.cloudflare_workersai import (
DEFAULT_MODEL_NAME,
CloudflareWorkersAIEmbeddings,
)
__all__ = ["DEFAULT_MODEL_NAME", "CloudflareWorkersAIEmbeddings"]
__all__ = ["CloudflareWorkersAIEmbeddings"]

@ -1,7 +1,5 @@
from langchain_community.embeddings.dashscope import (
DashScopeEmbeddings,
_create_retry_decorator,
embed_with_retry,
)
__all__ = ["_create_retry_decorator", "embed_with_retry", "DashScopeEmbeddings"]
__all__ = ["DashScopeEmbeddings"]

@ -1,3 +1,3 @@
from langchain_community.embeddings.databricks import DatabricksEmbeddings, _chunk
from langchain_community.embeddings.databricks import DatabricksEmbeddings
__all__ = ["_chunk", "DatabricksEmbeddings"]
__all__ = ["DatabricksEmbeddings"]

@ -1,6 +1,5 @@
from langchain_community.embeddings.deepinfra import (
DEFAULT_MODEL_ID,
DeepInfraEmbeddings,
)
__all__ = ["DEFAULT_MODEL_ID", "DeepInfraEmbeddings"]
__all__ = ["DeepInfraEmbeddings"]

@ -1,13 +1,7 @@
from langchain_community.embeddings.embaas import (
EMBAAS_API_URL,
MAX_BATCH_SIZE,
EmbaasEmbeddings,
EmbaasEmbeddingsPayload,
)
__all__ = [
"MAX_BATCH_SIZE",
"EMBAAS_API_URL",
"EmbaasEmbeddingsPayload",
"EmbaasEmbeddings",
]

@ -1,7 +1,5 @@
from langchain_community.embeddings.google_palm import (
GooglePalmEmbeddings,
_create_retry_decorator,
embed_with_retry,
)
__all__ = ["_create_retry_decorator", "embed_with_retry", "GooglePalmEmbeddings"]
__all__ = ["GooglePalmEmbeddings"]

@ -1,11 +1,4 @@
from langchain_community.embeddings.huggingface import (
DEFAULT_BGE_MODEL,
DEFAULT_EMBED_INSTRUCTION,
DEFAULT_INSTRUCT_MODEL,
DEFAULT_MODEL_NAME,
DEFAULT_QUERY_BGE_INSTRUCTION_EN,
DEFAULT_QUERY_BGE_INSTRUCTION_ZH,
DEFAULT_QUERY_INSTRUCTION,
HuggingFaceBgeEmbeddings,
HuggingFaceEmbeddings,
HuggingFaceInferenceAPIEmbeddings,
@ -13,13 +6,6 @@ from langchain_community.embeddings.huggingface import (
)
__all__ = [
"DEFAULT_MODEL_NAME",
"DEFAULT_INSTRUCT_MODEL",
"DEFAULT_BGE_MODEL",
"DEFAULT_EMBED_INSTRUCTION",
"DEFAULT_QUERY_INSTRUCTION",
"DEFAULT_QUERY_BGE_INSTRUCTION_EN",
"DEFAULT_QUERY_BGE_INSTRUCTION_ZH",
"HuggingFaceEmbeddings",
"HuggingFaceInstructEmbeddings",
"HuggingFaceBgeEmbeddings",

@ -1,7 +1,5 @@
from langchain_community.embeddings.huggingface_hub import (
DEFAULT_MODEL,
VALID_TASKS,
HuggingFaceHubEmbeddings,
)
__all__ = ["DEFAULT_MODEL", "VALID_TASKS", "HuggingFaceHubEmbeddings"]
__all__ = ["HuggingFaceHubEmbeddings"]

@ -1,6 +1,5 @@
from langchain_community.embeddings.javelin_ai_gateway import (
JavelinAIGatewayEmbeddings,
_chunk,
)
__all__ = ["_chunk", "JavelinAIGatewayEmbeddings"]
__all__ = ["JavelinAIGatewayEmbeddings"]

@ -1,15 +1,7 @@
from langchain_community.embeddings.localai import (
LocalAIEmbeddings,
_async_retry_decorator,
_check_response,
_create_retry_decorator,
embed_with_retry,
)
__all__ = [
"_create_retry_decorator",
"_async_retry_decorator",
"_check_response",
"embed_with_retry",
"LocalAIEmbeddings",
]

@ -1,7 +1,5 @@
from langchain_community.embeddings.minimax import (
MiniMaxEmbeddings,
_create_retry_decorator,
embed_with_retry,
)
__all__ = ["_create_retry_decorator", "embed_with_retry", "MiniMaxEmbeddings"]
__all__ = ["MiniMaxEmbeddings"]

@ -1,3 +1,3 @@
from langchain_community.embeddings.mlflow import MlflowEmbeddings, _chunk
from langchain_community.embeddings.mlflow import MlflowEmbeddings
__all__ = ["_chunk", "MlflowEmbeddings"]
__all__ = ["MlflowEmbeddings"]

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save