diff --git a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/prompt.py b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/prompt.py index a8c775635f..ba8fd70487 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/autogpt/prompt.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/autogpt/prompt.py @@ -13,6 +13,8 @@ from langchain_experimental.autonomous_agents.autogpt.prompt_generator import ge class AutoGPTPrompt(BaseChatPromptTemplate, BaseModel): + """Prompt for AutoGPT.""" + ai_name: str ai_role: str tools: List[BaseTool] diff --git a/libs/langchain/langchain/agents/agent_iterator.py b/libs/langchain/langchain/agents/agent_iterator.py index 829e00c9ea..36b11f7b05 100644 --- a/libs/langchain/langchain/agents/agent_iterator.py +++ b/libs/langchain/langchain/agents/agent_iterator.py @@ -38,6 +38,8 @@ logger = logging.getLogger(__name__) class BaseAgentExecutorIterator(ABC): + """Base class for AgentExecutorIterator.""" + @abstractmethod def build_callback_manager(self) -> None: pass @@ -57,6 +59,8 @@ def rebuild_callback_manager_on_set( class AgentExecutorIterator(BaseAgentExecutorIterator): + """Iterator for AgentExecutor.""" + def __init__( self, agent_executor: AgentExecutor, diff --git a/libs/langchain/langchain/callbacks/base.py b/libs/langchain/langchain/callbacks/base.py index 85ea6c96ae..dcf3766d8f 100644 --- a/libs/langchain/langchain/callbacks/base.py +++ b/libs/langchain/langchain/callbacks/base.py @@ -468,7 +468,7 @@ class AsyncCallbackHandler(BaseCallbackHandler): class BaseCallbackManager(CallbackManagerMixin): - """Base callback manager that can be used to handle callbacks from LangChain.""" + """Base callback manager that handles callbacks from LangChain.""" def __init__( self, diff --git a/libs/langchain/langchain/callbacks/flyte_callback.py b/libs/langchain/langchain/callbacks/flyte_callback.py index 5d80682383..7e22d58e98 100644 --- a/libs/langchain/langchain/callbacks/flyte_callback.py +++ b/libs/langchain/langchain/callbacks/flyte_callback.py @@ -93,7 +93,7 @@ def analyze_text( class FlyteCallbackHandler(BaseMetadataCallbackHandler, BaseCallbackHandler): - """This callback handler is designed specifically for usage within a Flyte task.""" + """This callback handler that is used within a Flyte task.""" def __init__(self) -> None: """Initialize callback handler.""" diff --git a/libs/langchain/langchain/callbacks/manager.py b/libs/langchain/langchain/callbacks/manager.py index bc26656f09..3e2ba86b71 100644 --- a/libs/langchain/langchain/callbacks/manager.py +++ b/libs/langchain/langchain/callbacks/manager.py @@ -1004,7 +1004,7 @@ class AsyncCallbackManagerForRetrieverRun( class CallbackManager(BaseCallbackManager): - """Callback manager that can be used to handle callbacks from langchain.""" + """Callback manager that handles callbacks from langchain.""" def on_llm_start( self, @@ -1273,7 +1273,7 @@ class CallbackManager(BaseCallbackManager): class AsyncCallbackManager(BaseCallbackManager): - """Async callback manager that can be used to handle callbacks from LangChain.""" + """Async callback manager that handles callbacks from LangChain.""" @property def is_async(self) -> bool: diff --git a/libs/langchain/langchain/callbacks/streamlit/__init__.py b/libs/langchain/langchain/callbacks/streamlit/__init__.py index 44774a92d1..e82104a223 100644 --- a/libs/langchain/langchain/callbacks/streamlit/__init__.py +++ b/libs/langchain/langchain/callbacks/streamlit/__init__.py @@ -22,7 +22,9 @@ def StreamlitCallbackHandler( collapse_completed_thoughts: bool = True, thought_labeler: Optional[LLMThoughtLabeler] = None, ) -> BaseCallbackHandler: - """Construct a new StreamlitCallbackHandler. This CallbackHandler is geared towards + """Callback Handler that writes to a Streamlit app. + + This CallbackHandler is geared towards use with a LangChain Agent; it displays the Agent's LLM and tool-usage "thoughts" inside a series of Streamlit expanders. diff --git a/libs/langchain/langchain/callbacks/tracers/schemas.py b/libs/langchain/langchain/callbacks/tracers/schemas.py index e4a90225f5..c9de9e6ae9 100644 --- a/libs/langchain/langchain/callbacks/tracers/schemas.py +++ b/libs/langchain/langchain/callbacks/tracers/schemas.py @@ -31,7 +31,7 @@ class TracerSessionV1(TracerSessionV1Base): class TracerSessionBase(TracerSessionV1Base): - """A creation class for TracerSession.""" + """Base class for TracerSession.""" tenant_id: UUID diff --git a/libs/langchain/langchain/chains/graph_qa/neptune_cypher.py b/libs/langchain/langchain/chains/graph_qa/neptune_cypher.py index 8cc83bdf7d..ae7baa9b21 100644 --- a/libs/langchain/langchain/chains/graph_qa/neptune_cypher.py +++ b/libs/langchain/langchain/chains/graph_qa/neptune_cypher.py @@ -20,6 +20,7 @@ INTERMEDIATE_STEPS_KEY = "intermediate_steps" def extract_cypher(text: str) -> str: + """Extract Cypher code from text using Regex.""" # The pattern to find Cypher code enclosed in triple backticks pattern = r"```(.*?)```" diff --git a/libs/langchain/langchain/chains/transform.py b/libs/langchain/langchain/chains/transform.py index 13e1e65aaa..969796ffe7 100644 --- a/libs/langchain/langchain/chains/transform.py +++ b/libs/langchain/langchain/chains/transform.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) class TransformChain(Chain): - """Chain transform chain output. + """Chain that transforms the chain output. Example: .. code-block:: python diff --git a/libs/langchain/langchain/chat_models/anthropic.py b/libs/langchain/langchain/chat_models/anthropic.py index cf94096e9b..5090e9a0ab 100644 --- a/libs/langchain/langchain/chat_models/anthropic.py +++ b/libs/langchain/langchain/chat_models/anthropic.py @@ -22,7 +22,7 @@ from langchain.schema.output import ChatGenerationChunk class ChatAnthropic(BaseChatModel, _AnthropicCommon): - r"""Wrapper around Anthropic's large language model. + """Anthropic's large language chat model. To use, you should have the ``anthropic`` python package installed, and the environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass diff --git a/libs/langchain/langchain/chat_models/azure_openai.py b/libs/langchain/langchain/chat_models/azure_openai.py index 31095fe518..cf841d68e3 100644 --- a/libs/langchain/langchain/chat_models/azure_openai.py +++ b/libs/langchain/langchain/chat_models/azure_openai.py @@ -14,7 +14,9 @@ logger = logging.getLogger(__name__) class AzureChatOpenAI(ChatOpenAI): - """Wrapper around Azure OpenAI Chat Completion API. To use this class you + """Wrapper around Azure OpenAI Chat Completion API. + + To use this class you must have a deployed model on Azure OpenAI. Use `deployment_name` in the constructor to refer to the "Model deployment name" in the Azure portal. diff --git a/libs/langchain/langchain/chat_models/human.py b/libs/langchain/langchain/chat_models/human.py index 20fb099c20..1f9bf6eaf1 100644 --- a/libs/langchain/langchain/chat_models/human.py +++ b/libs/langchain/langchain/chat_models/human.py @@ -67,7 +67,7 @@ def _collect_yaml_input( class HumanInputChatModel(BaseChatModel): - """ChatModel wrapper which returns user input as the response..""" + """ChatModel which returns user input as the response.""" input_func: Callable = Field(default_factory=lambda: _collect_yaml_input) message_func: Callable = Field(default_factory=lambda: _display_messages) diff --git a/libs/langchain/langchain/chat_models/jinachat.py b/libs/langchain/langchain/chat_models/jinachat.py index 30fee86126..fd1225a9ff 100644 --- a/libs/langchain/langchain/chat_models/jinachat.py +++ b/libs/langchain/langchain/chat_models/jinachat.py @@ -133,8 +133,8 @@ def _convert_message_to_dict(message: BaseMessage) -> dict: class JinaChat(BaseChatModel): - """JinaChat is a wrapper for Jina AI's LLM service, providing cost-effective - image chat capabilities in comparison to other LLM APIs. + """Wrapper for Jina AI's LLM service, providing cost-effective + image chat capabilities. To use, you should have the ``openai`` python package installed, and the environment variable ``JINACHAT_API_KEY`` set to your API key, which you diff --git a/libs/langchain/langchain/document_loaders/epub.py b/libs/langchain/langchain/document_loaders/epub.py index 05dfaea103..2c2b49721e 100644 --- a/libs/langchain/langchain/document_loaders/epub.py +++ b/libs/langchain/langchain/document_loaders/epub.py @@ -8,7 +8,8 @@ from langchain.document_loaders.unstructured import ( class UnstructuredEPubLoader(UnstructuredFileLoader): - """UnstructuredEPubLoader uses unstructured to load EPUB files. + """Loader that uses Unstructured to load EPUB files. + You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single langchain Document object. If you use "elements" mode, the unstructured diff --git a/libs/langchain/langchain/document_loaders/html.py b/libs/langchain/langchain/document_loaders/html.py index 940d879960..dce2697076 100644 --- a/libs/langchain/langchain/document_loaders/html.py +++ b/libs/langchain/langchain/document_loaders/html.py @@ -5,7 +5,8 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader class UnstructuredHTMLLoader(UnstructuredFileLoader): - """UnstructuredHTMLLoader uses unstructured to load HTML files. + """Loader that uses Unstructured to load HTML files. + You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single langchain Document object. If you use "elements" mode, the unstructured diff --git a/libs/langchain/langchain/document_loaders/image.py b/libs/langchain/langchain/document_loaders/image.py index b47339a228..9a31bd00c6 100644 --- a/libs/langchain/langchain/document_loaders/image.py +++ b/libs/langchain/langchain/document_loaders/image.py @@ -5,7 +5,8 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader class UnstructuredImageLoader(UnstructuredFileLoader): - """UnstructuredImageLoader uses unstructured to load PNG and JPG files. + """Loader that uses Unstructured to load PNG and JPG files. + You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single langchain Document object. If you use "elements" mode, the unstructured diff --git a/libs/langchain/langchain/document_loaders/markdown.py b/libs/langchain/langchain/document_loaders/markdown.py index b091208287..820b5b53de 100644 --- a/libs/langchain/langchain/document_loaders/markdown.py +++ b/libs/langchain/langchain/document_loaders/markdown.py @@ -5,7 +5,8 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader class UnstructuredMarkdownLoader(UnstructuredFileLoader): - """UnstructuredMarkdownLoader uses unstructured to load markdown files. + """Loader that uses Unstructured to load markdown files. + You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single langchain Document object. If you use "elements" mode, the unstructured diff --git a/libs/langchain/langchain/document_loaders/tencent_cos_directory.py b/libs/langchain/langchain/document_loaders/tencent_cos_directory.py index b51f60ce93..5b3a878536 100644 --- a/libs/langchain/langchain/document_loaders/tencent_cos_directory.py +++ b/libs/langchain/langchain/document_loaders/tencent_cos_directory.py @@ -7,7 +7,7 @@ from langchain.document_loaders.tencent_cos_file import TencentCOSFileLoader class TencentCOSDirectoryLoader(BaseLoader): - """Loading logic for loading documents from Tencent Cloud COS.""" + """Loader for Tencent Cloud COS directory.""" def __init__(self, conf: Any, bucket: str, prefix: str = ""): """Initialize with COS config, bucket and prefix. diff --git a/libs/langchain/langchain/document_loaders/tencent_cos_file.py b/libs/langchain/langchain/document_loaders/tencent_cos_file.py index a64220eaf9..8391ec54c8 100644 --- a/libs/langchain/langchain/document_loaders/tencent_cos_file.py +++ b/libs/langchain/langchain/document_loaders/tencent_cos_file.py @@ -9,7 +9,7 @@ from langchain.document_loaders.unstructured import UnstructuredFileLoader class TencentCOSFileLoader(BaseLoader): - """Loading logic for loading documents from Tencent Cloud COS.""" + """Loader for Tencent Cloud COS file.""" def __init__(self, conf: Any, bucket: str, key: str): """Initialize with COS config, bucket and key name. diff --git a/libs/langchain/langchain/document_loaders/unstructured.py b/libs/langchain/langchain/document_loaders/unstructured.py index 4b9c191c80..1c729d1e71 100644 --- a/libs/langchain/langchain/document_loaders/unstructured.py +++ b/libs/langchain/langchain/document_loaders/unstructured.py @@ -34,7 +34,7 @@ def validate_unstructured_version(min_unstructured_version: str) -> None: class UnstructuredBaseLoader(BaseLoader, ABC): - """Loader that uses unstructured to load files.""" + """Loader that uses Unstructured to load files.""" def __init__( self, @@ -130,7 +130,9 @@ class UnstructuredBaseLoader(BaseLoader, ABC): class UnstructuredFileLoader(UnstructuredBaseLoader): - """UnstructuredFileLoader uses unstructured to load files. The file loader uses the + """Loader that uses Unstructured to load files. + + The file loader uses the unstructured partition function and will automatically detect the file type. You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single @@ -209,7 +211,8 @@ def get_elements_from_api( class UnstructuredAPIFileLoader(UnstructuredFileLoader): - """UnstructuredAPIFileLoader uses the Unstructured API to load files. + """Loader that uses the Unstructured API to load files. + By default, the loader makes a call to the hosted Unstructured API. If you are running the unstructured API locally, you can change the API rule by passing in the url parameter when you initialize the loader. @@ -272,7 +275,9 @@ class UnstructuredAPIFileLoader(UnstructuredFileLoader): class UnstructuredFileIOLoader(UnstructuredBaseLoader): - """UnstructuredFileIOLoader uses unstructured to load files. The file loader + """Loader that uses Unstructured to load files. + + The file loader uses the unstructured partition function and will automatically detect the file type. You can run the loader in one of two modes: "single" and "elements". If you use "single" mode, the document will be returned as a single @@ -317,7 +322,8 @@ class UnstructuredFileIOLoader(UnstructuredBaseLoader): class UnstructuredAPIFileIOLoader(UnstructuredFileIOLoader): - """UnstructuredAPIFileIOLoader uses the Unstructured API to load files. + """Loader that uses the Unstructured API to load files. + By default, the loader makes a call to the hosted Unstructured API. If you are running the unstructured API locally, you can change the API rule by passing in the url parameter when you initialize the loader. diff --git a/libs/langchain/langchain/document_transformers/doctran_text_extract.py b/libs/langchain/langchain/document_transformers/doctran_text_extract.py index fc618dab33..0de109b31d 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_extract.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_extract.py @@ -5,7 +5,7 @@ from langchain.utils import get_from_env class DoctranPropertyExtractor(BaseDocumentTransformer): - """Extracts properties from text documents using doctran. + """Extract properties from text documents using doctran. Arguments: properties: A list of the properties to extract. diff --git a/libs/langchain/langchain/document_transformers/doctran_text_qa.py b/libs/langchain/langchain/document_transformers/doctran_text_qa.py index c77c9dfe8a..84f286a6ce 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_qa.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_qa.py @@ -5,7 +5,7 @@ from langchain.utils import get_from_env class DoctranQATransformer(BaseDocumentTransformer): - """Extracts QA from text documents using doctran. + """Extract QA from text documents using doctran. Arguments: openai_api_key: OpenAI API key. Can also be specified via environment variable diff --git a/libs/langchain/langchain/document_transformers/doctran_text_translate.py b/libs/langchain/langchain/document_transformers/doctran_text_translate.py index 219c8fd4d2..f3793cee98 100644 --- a/libs/langchain/langchain/document_transformers/doctran_text_translate.py +++ b/libs/langchain/langchain/document_transformers/doctran_text_translate.py @@ -5,7 +5,7 @@ from langchain.utils import get_from_env class DoctranTextTranslator(BaseDocumentTransformer): - """Translates text documents using doctran. + """Translate text documents using doctran. Arguments: openai_api_key: OpenAI API key. Can also be specified via environment variable diff --git a/libs/langchain/langchain/evaluation/qa/eval_chain.py b/libs/langchain/langchain/evaluation/qa/eval_chain.py index a0c8d93d03..90b5e8d5dc 100644 --- a/libs/langchain/langchain/evaluation/qa/eval_chain.py +++ b/libs/langchain/langchain/evaluation/qa/eval_chain.py @@ -1,4 +1,4 @@ -"""LLM Chain specifically for evaluating question answering.""" +"""LLM Chains for evaluating question answering.""" from __future__ import annotations import re @@ -50,7 +50,7 @@ def _parse_string_eval_output(text: str) -> dict: class QAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): - """LLM Chain specifically for evaluating question answering.""" + """LLM Chain for evaluating question answering.""" output_key: str = "results" #: :meta private: @@ -184,7 +184,7 @@ class QAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): class ContextQAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): - """LLM Chain specifically for evaluating QA w/o GT based on context""" + """LLM Chain for evaluating QA w/o GT based on context""" @property def requires_reference(self) -> bool: @@ -308,7 +308,7 @@ class ContextQAEvalChain(LLMChain, StringEvaluator, LLMEvalChain): class CotQAEvalChain(ContextQAEvalChain): - """LLM Chain specifically for evaluating QA using chain of thought reasoning.""" + """LLM Chain for evaluating QA using chain of thought reasoning.""" @property def evaluation_name(self) -> str: diff --git a/libs/langchain/langchain/evaluation/qa/generate_chain.py b/libs/langchain/langchain/evaluation/qa/generate_chain.py index 1a2bc24cd2..cf521b7892 100644 --- a/libs/langchain/langchain/evaluation/qa/generate_chain.py +++ b/libs/langchain/langchain/evaluation/qa/generate_chain.py @@ -1,4 +1,4 @@ -"""LLM Chain specifically for generating examples for question answering.""" +"""LLM Chain for generating examples for question answering.""" from __future__ import annotations from typing import Any @@ -9,7 +9,7 @@ from langchain.schema.language_model import BaseLanguageModel class QAGenerateChain(LLMChain): - """LLM Chain specifically for generating examples for question answering.""" + """LLM Chain for generating examples for question answering.""" @classmethod def from_llm(cls, llm: BaseLanguageModel, **kwargs: Any) -> QAGenerateChain: diff --git a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/output_parser.py b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/output_parser.py index e060495d73..d1d655029c 100644 --- a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/output_parser.py +++ b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/output_parser.py @@ -7,11 +7,15 @@ from langchain.schema import BaseOutputParser class AutoGPTAction(NamedTuple): + """Action returned by AutoGPTOutputParser.""" + name: str args: Dict class BaseAutoGPTOutputParser(BaseOutputParser): + """Base Output parser for AutoGPT.""" + @abstractmethod def parse(self, text: str) -> AutoGPTAction: """Return AutoGPTAction""" @@ -36,6 +40,8 @@ def preprocess_json_input(input_str: str) -> str: class AutoGPTOutputParser(BaseAutoGPTOutputParser): + """Output parser for AutoGPT.""" + def parse(self, text: str) -> AutoGPTAction: try: parsed = json.loads(text, strict=False) diff --git a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt.py b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt.py index f6645a1dba..0d1ec626a6 100644 --- a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt.py +++ b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt.py @@ -13,6 +13,8 @@ from langchain.vectorstores.base import VectorStoreRetriever class AutoGPTPrompt(BaseChatPromptTemplate, BaseModel): + """Prompt for AutoGPT.""" + ai_name: str ai_role: str tools: List[BaseTool] diff --git a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt_generator.py b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt_generator.py index b8014eb943..81e93a7300 100644 --- a/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt_generator.py +++ b/libs/langchain/langchain/experimental/autonomous_agents/autogpt/prompt_generator.py @@ -123,7 +123,7 @@ class PromptGenerator: def get_prompt(tools: List[BaseTool]) -> str: - """This function generates a prompt string. + """Generates a prompt string. It includes various constraints, commands, resources, and performance evaluations. diff --git a/libs/langchain/langchain/experimental/cpal/constants.py b/libs/langchain/langchain/experimental/cpal/constants.py index 1ab620130d..8d51af705b 100644 --- a/libs/langchain/langchain/experimental/cpal/constants.py +++ b/libs/langchain/langchain/experimental/cpal/constants.py @@ -2,6 +2,8 @@ from enum import Enum class Constant(Enum): + """Enum for constants used in the CPAL.""" + narrative_input = "narrative_input" chain_answer = "chain_answer" # natural language answer chain_data = "chain_data" # pydantic instance diff --git a/libs/langchain/langchain/graphs/arangodb_graph.py b/libs/langchain/langchain/graphs/arangodb_graph.py index 8771953d59..69b78dea39 100644 --- a/libs/langchain/langchain/graphs/arangodb_graph.py +++ b/libs/langchain/langchain/graphs/arangodb_graph.py @@ -136,7 +136,7 @@ def get_arangodb_client( username: Optional[str] = None, password: Optional[str] = None, ) -> Any: - """Convenience method that gets Arango DB from credentials. + """Get the Arango DB client from credentials. Args: url: Arango DB url. Can be passed in as named arg or set as environment diff --git a/libs/langchain/langchain/prompts/base.py b/libs/langchain/langchain/prompts/base.py index 698c552f5c..95d83256bf 100644 --- a/libs/langchain/langchain/prompts/base.py +++ b/libs/langchain/langchain/prompts/base.py @@ -110,7 +110,7 @@ class StringPromptValue(PromptValue): class StringPromptTemplate(BasePromptTemplate, ABC): - """String prompt should expose the format method, returning a prompt.""" + """String prompt that exposes the format method, returning a prompt.""" def format_prompt(self, **kwargs: Any) -> PromptValue: """Create Chat Messages.""" diff --git a/libs/langchain/langchain/retrievers/google_cloud_enterprise_search.py b/libs/langchain/langchain/retrievers/google_cloud_enterprise_search.py index c170d42086..2acb01b363 100644 --- a/libs/langchain/langchain/retrievers/google_cloud_enterprise_search.py +++ b/libs/langchain/langchain/retrievers/google_cloud_enterprise_search.py @@ -18,7 +18,8 @@ if TYPE_CHECKING: class GoogleCloudEnterpriseSearchRetriever(BaseRetriever): - """Wrapper around Google Cloud Enterprise Search Service API. + """Retriever for the Google Cloud Enterprise Search Service API. + For the detailed explanation of the Enterprise Search concepts and configuration parameters refer to the product documentation. diff --git a/libs/langchain/langchain/retrievers/self_query/deeplake.py b/libs/langchain/langchain/retrievers/self_query/deeplake.py index 109b5b6461..f50272b308 100644 --- a/libs/langchain/langchain/retrievers/self_query/deeplake.py +++ b/libs/langchain/langchain/retrievers/self_query/deeplake.py @@ -26,6 +26,7 @@ OPERATOR_TO_TQL = { def can_cast_to_float(string: str) -> bool: + """Check if a string can be cast to a float.""" try: float(string) return True diff --git a/libs/langchain/langchain/retrievers/web_research.py b/libs/langchain/langchain/retrievers/web_research.py index 008df92e0b..5011289270 100644 --- a/libs/langchain/langchain/retrievers/web_research.py +++ b/libs/langchain/langchain/retrievers/web_research.py @@ -67,6 +67,8 @@ class QuestionListOutputParser(PydanticOutputParser): class WebResearchRetriever(BaseRetriever): + """Retriever for web research based on the Google Search API.""" + # Inputs vectorstore: VectorStore = Field( ..., description="Vector store for storing web pages" diff --git a/libs/langchain/langchain/tools/amadeus/base.py b/libs/langchain/langchain/tools/amadeus/base.py index c2df21f0c5..fc0c907e9c 100644 --- a/libs/langchain/langchain/tools/amadeus/base.py +++ b/libs/langchain/langchain/tools/amadeus/base.py @@ -13,4 +13,6 @@ if TYPE_CHECKING: class AmadeusBaseTool(BaseTool): + """Base Tool for Amadeus.""" + client: Client = Field(default_factory=authenticate) diff --git a/libs/langchain/langchain/tools/amadeus/closest_airport.py b/libs/langchain/langchain/tools/amadeus/closest_airport.py index de3b7cc700..bc8854b167 100644 --- a/libs/langchain/langchain/tools/amadeus/closest_airport.py +++ b/libs/langchain/langchain/tools/amadeus/closest_airport.py @@ -12,6 +12,8 @@ from langchain.tools.amadeus.base import AmadeusBaseTool class ClosestAirportSchema(BaseModel): + """Schema for the AmadeusClosestAirport tool.""" + location: str = Field( description=( " The location for which you would like to find the nearest airport " @@ -29,6 +31,8 @@ class ClosestAirportSchema(BaseModel): class AmadeusClosestAirport(AmadeusBaseTool): + """Tool for finding the closest airport to a particular location.""" + name: str = "closest_airport" description: str = ( "Use this tool to find the closest airport to a particular location." diff --git a/libs/langchain/langchain/tools/amadeus/flight_search.py b/libs/langchain/langchain/tools/amadeus/flight_search.py index b7bcd3c45a..8690d53d0c 100644 --- a/libs/langchain/langchain/tools/amadeus/flight_search.py +++ b/libs/langchain/langchain/tools/amadeus/flight_search.py @@ -14,6 +14,8 @@ logger = logging.getLogger(__name__) class FlightSearchSchema(BaseModel): + """Schema for the AmadeusFlightSearch tool.""" + originLocationCode: str = Field( description=( " The three letter International Air Transport " @@ -53,6 +55,8 @@ class FlightSearchSchema(BaseModel): class AmadeusFlightSearch(AmadeusBaseTool): + """Tool for searching for a single flight between two airports.""" + name: str = "single_flight_search" description: str = ( " Use this tool to search for a single flight between the origin and " diff --git a/libs/langchain/langchain/tools/github/tool.py b/libs/langchain/langchain/tools/github/tool.py index 8d3ca9ce65..92706d6aad 100644 --- a/libs/langchain/langchain/tools/github/tool.py +++ b/libs/langchain/langchain/tools/github/tool.py @@ -20,6 +20,8 @@ from langchain.utilities.github import GitHubAPIWrapper class GitHubAction(BaseTool): + """Tool for interacting with the GitHub API.""" + api_wrapper: GitHubAPIWrapper = Field(default_factory=GitHubAPIWrapper) mode: str name = "" diff --git a/libs/langchain/langchain/utils/strings.py b/libs/langchain/langchain/utils/strings.py index 24741257ac..3e866f059f 100644 --- a/libs/langchain/langchain/utils/strings.py +++ b/libs/langchain/langchain/utils/strings.py @@ -36,4 +36,5 @@ def stringify_dict(data: dict) -> str: def comma_list(items: List[Any]) -> str: + """Convert a list to a comma-separated string.""" return ", ".join(str(item) for item in items)