mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
docstrings: docstore
and client
(#6783)
updated docstrings in `docstore/` and `client/` @baskaryan
This commit is contained in:
parent
f5bd88757e
commit
560c4dfc98
@ -1,9 +1,16 @@
|
|||||||
"""LangChain + Client."""
|
"""LangChain + Client."""
|
||||||
from langchain.client.runner_utils import (
|
from langchain.client.runner_utils import (
|
||||||
|
InputFormatError,
|
||||||
arun_on_dataset,
|
arun_on_dataset,
|
||||||
arun_on_examples,
|
arun_on_examples,
|
||||||
run_on_dataset,
|
run_on_dataset,
|
||||||
run_on_examples,
|
run_on_examples,
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = ["arun_on_dataset", "run_on_dataset", "arun_on_examples", "run_on_examples"]
|
__all__ = [
|
||||||
|
"InputFormatError",
|
||||||
|
"arun_on_dataset",
|
||||||
|
"run_on_dataset",
|
||||||
|
"arun_on_examples",
|
||||||
|
"run_on_examples",
|
||||||
|
]
|
||||||
|
@ -51,8 +51,7 @@ class InputFormatError(Exception):
|
|||||||
|
|
||||||
|
|
||||||
def _get_prompts(inputs: Dict[str, Any]) -> List[str]:
|
def _get_prompts(inputs: Dict[str, Any]) -> List[str]:
|
||||||
"""
|
"""Get prompts from inputs.
|
||||||
Get prompts from inputs.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
inputs: The input dictionary.
|
inputs: The input dictionary.
|
||||||
@ -99,8 +98,7 @@ def _get_prompts(inputs: Dict[str, Any]) -> List[str]:
|
|||||||
|
|
||||||
|
|
||||||
def _get_messages(inputs: Dict[str, Any]) -> List[List[BaseMessage]]:
|
def _get_messages(inputs: Dict[str, Any]) -> List[List[BaseMessage]]:
|
||||||
"""
|
"""Get Chat Messages from inputs.
|
||||||
Get Chat Messages from inputs.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
inputs: The input dictionary.
|
inputs: The input dictionary.
|
||||||
@ -143,8 +141,7 @@ async def _arun_llm(
|
|||||||
callbacks: Callbacks = None,
|
callbacks: Callbacks = None,
|
||||||
input_mapper: Optional[Callable[[Dict], Any]] = None,
|
input_mapper: Optional[Callable[[Dict], Any]] = None,
|
||||||
) -> Union[LLMResult, ChatResult]:
|
) -> Union[LLMResult, ChatResult]:
|
||||||
"""
|
"""Asynchronously run the language model.
|
||||||
Asynchronously run the language model.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
llm: The language model to run.
|
llm: The language model to run.
|
||||||
@ -203,8 +200,7 @@ async def _arun_llm_or_chain(
|
|||||||
callbacks: Optional[List[BaseCallbackHandler]] = None,
|
callbacks: Optional[List[BaseCallbackHandler]] = None,
|
||||||
input_mapper: Optional[Callable[[Dict], Any]] = None,
|
input_mapper: Optional[Callable[[Dict], Any]] = None,
|
||||||
) -> Union[List[dict], List[str], List[LLMResult], List[ChatResult]]:
|
) -> Union[List[dict], List[str], List[LLMResult], List[ChatResult]]:
|
||||||
"""
|
"""Asynchronously run the Chain or language model.
|
||||||
Asynchronously run the Chain or language model.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
example: The example to run.
|
example: The example to run.
|
||||||
@ -264,8 +260,7 @@ async def _gather_with_concurrency(
|
|||||||
[Sequence[BaseCallbackHandler], Dict], Coroutine[Any, Any, Any]
|
[Sequence[BaseCallbackHandler], Dict], Coroutine[Any, Any, Any]
|
||||||
],
|
],
|
||||||
) -> List[Any]:
|
) -> List[Any]:
|
||||||
"""
|
"""Run coroutines with a concurrency limit.
|
||||||
Run coroutines with a concurrency limit.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
n: The maximum number of concurrent tasks.
|
n: The maximum number of concurrent tasks.
|
||||||
@ -503,7 +498,8 @@ def run_llm_or_chain(
|
|||||||
callbacks: Optional callbacks to use during the run.
|
callbacks: Optional callbacks to use during the run.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A list of outputs.
|
Union[List[dict], List[str], List[LLMResult], List[ChatResult]]:
|
||||||
|
The outputs of the model or chain.
|
||||||
"""
|
"""
|
||||||
if callbacks:
|
if callbacks:
|
||||||
previous_example_ids = [
|
previous_example_ids = [
|
||||||
@ -670,8 +666,8 @@ async def arun_on_dataset(
|
|||||||
project_name: Name of the project to store the traces in.
|
project_name: Name of the project to store the traces in.
|
||||||
Defaults to {dataset_name}-{chain class name}-{datetime}.
|
Defaults to {dataset_name}-{chain class name}-{datetime}.
|
||||||
verbose: Whether to print progress.
|
verbose: Whether to print progress.
|
||||||
client: Client to use to read the dataset. If not provided, a new
|
client: Client to use to read the dataset. If not provided,
|
||||||
client will be created using the credentials in the environment.
|
a new client will be created using the credentials in the environment.
|
||||||
tags: Tags to add to each run in the project.
|
tags: Tags to add to each run in the project.
|
||||||
run_evaluators: Evaluators to run on the results of the chain.
|
run_evaluators: Evaluators to run on the results of the chain.
|
||||||
input_mapper: A function to map to the inputs dictionary from an Example
|
input_mapper: A function to map to the inputs dictionary from an Example
|
||||||
@ -725,15 +721,14 @@ def run_on_dataset(
|
|||||||
llm_or_chain_factory: Language model or Chain constructor to run
|
llm_or_chain_factory: Language model or Chain constructor to run
|
||||||
over the dataset. The Chain constructor is used to permit
|
over the dataset. The Chain constructor is used to permit
|
||||||
independent calls on each example without carrying over state.
|
independent calls on each example without carrying over state.
|
||||||
concurrency_level: Number of async workers to run in parallel.
|
|
||||||
num_repetitions: Number of times to run the model on each example.
|
num_repetitions: Number of times to run the model on each example.
|
||||||
This is useful when testing success rates or generating confidence
|
This is useful when testing success rates or generating confidence
|
||||||
intervals.
|
intervals.
|
||||||
project_name: Name of the project to store the traces in.
|
project_name: Name of the project to store the traces in.
|
||||||
Defaults to {dataset_name}-{chain class name}-{datetime}.
|
Defaults to {dataset_name}-{chain class name}-{datetime}.
|
||||||
verbose: Whether to print progress.
|
verbose: Whether to print progress.
|
||||||
client: Client to use to access the dataset. If None, a new client
|
client: Client to use to access the dataset. If None,
|
||||||
will be created using the credentials in the environment.
|
a new client will be created using the credentials in the environment.
|
||||||
tags: Tags to add to each run in the project.
|
tags: Tags to add to each run in the project.
|
||||||
run_evaluators: Evaluators to run on the results of the chain.
|
run_evaluators: Evaluators to run on the results of the chain.
|
||||||
input_mapper: A function to map to the inputs dictionary from an Example
|
input_mapper: A function to map to the inputs dictionary from an Example
|
||||||
|
@ -5,8 +5,7 @@ from langchain.schema import Document
|
|||||||
|
|
||||||
|
|
||||||
class DocstoreFn(Docstore):
|
class DocstoreFn(Docstore):
|
||||||
"""
|
"""Langchain Docstore via arbitrary lookup function.
|
||||||
Langchain Docstore via arbitrary lookup function.
|
|
||||||
|
|
||||||
This is useful when:
|
This is useful when:
|
||||||
* it's expensive to construct an InMemoryDocstore/dict
|
* it's expensive to construct an InMemoryDocstore/dict
|
||||||
@ -21,6 +20,14 @@ class DocstoreFn(Docstore):
|
|||||||
self._lookup_fn = lookup_fn
|
self._lookup_fn = lookup_fn
|
||||||
|
|
||||||
def search(self, search: str) -> Document:
|
def search(self, search: str) -> Document:
|
||||||
|
"""Search for a document.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
search: search string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Document if found, else error message.
|
||||||
|
"""
|
||||||
r = self._lookup_fn(search)
|
r = self._lookup_fn(search)
|
||||||
if isinstance(r, str):
|
if isinstance(r, str):
|
||||||
# NOTE: assume the search string is the source ID
|
# NOTE: assume the search string is the source ID
|
||||||
|
@ -13,14 +13,28 @@ class InMemoryDocstore(Docstore, AddableMixin):
|
|||||||
self._dict = _dict if _dict is not None else {}
|
self._dict = _dict if _dict is not None else {}
|
||||||
|
|
||||||
def add(self, texts: Dict[str, Document]) -> None:
|
def add(self, texts: Dict[str, Document]) -> None:
|
||||||
"""Add texts to in memory dictionary."""
|
"""Add texts to in memory dictionary.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
texts: dictionary of id -> document.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None
|
||||||
|
"""
|
||||||
overlapping = set(texts).intersection(self._dict)
|
overlapping = set(texts).intersection(self._dict)
|
||||||
if overlapping:
|
if overlapping:
|
||||||
raise ValueError(f"Tried to add ids that already exist: {overlapping}")
|
raise ValueError(f"Tried to add ids that already exist: {overlapping}")
|
||||||
self._dict = {**self._dict, **texts}
|
self._dict = {**self._dict, **texts}
|
||||||
|
|
||||||
def search(self, search: str) -> Union[str, Document]:
|
def search(self, search: str) -> Union[str, Document]:
|
||||||
"""Search via direct lookup."""
|
"""Search via direct lookup.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
search: id of a document to search for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Document if found, else error message.
|
||||||
|
"""
|
||||||
if search not in self._dict:
|
if search not in self._dict:
|
||||||
return f"ID {search} not found."
|
return f"ID {search} not found."
|
||||||
else:
|
else:
|
||||||
|
@ -25,6 +25,11 @@ class Wikipedia(Docstore):
|
|||||||
|
|
||||||
If page exists, return the page summary, and a PageWithLookups object.
|
If page exists, return the page summary, and a PageWithLookups object.
|
||||||
If page does not exist, return similar entries.
|
If page does not exist, return similar entries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
search: search string.
|
||||||
|
|
||||||
|
Returns: a Document object or error message.
|
||||||
"""
|
"""
|
||||||
import wikipedia
|
import wikipedia
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user