From d18b0caf0e00414e066c9903c8df72bb5bcf9998 Mon Sep 17 00:00:00 2001 From: Davis Chase <130488702+dev2049@users.noreply.github.com> Date: Tue, 25 Apr 2023 11:40:41 -0700 Subject: [PATCH] Add Anthropic default request timeout (#3540) thanks @hitflame! --------- Co-authored-by: Wenqiang Zhao Co-authored-by: delta@com --- langchain/llms/anthropic.py | 10 ++++++++-- tests/integration_tests/llms/test_anthropic.py | 4 ++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/langchain/llms/anthropic.py b/langchain/llms/anthropic.py index e609627967..1301b3d794 100644 --- a/langchain/llms/anthropic.py +++ b/langchain/llms/anthropic.py @@ -1,6 +1,6 @@ """Wrapper around Anthropic APIs.""" import re -from typing import Any, Callable, Dict, Generator, List, Mapping, Optional +from typing import Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Union from pydantic import BaseModel, Extra, root_validator @@ -28,6 +28,9 @@ class _AnthropicCommon(BaseModel): streaming: bool = False """Whether to stream the results.""" + default_request_timeout: Optional[Union[float, Tuple[float, float]]] = None + """Timeout for requests to Anthropic Completion API. Default is 600 seconds.""" + anthropic_api_key: Optional[str] = None HUMAN_PROMPT: Optional[str] = None @@ -43,7 +46,10 @@ class _AnthropicCommon(BaseModel): try: import anthropic - values["client"] = anthropic.Client(anthropic_api_key) + values["client"] = anthropic.Client( + api_key=anthropic_api_key, + default_request_timeout=values["default_request_timeout"], + ) values["HUMAN_PROMPT"] = anthropic.HUMAN_PROMPT values["AI_PROMPT"] = anthropic.AI_PROMPT values["count_tokens"] = anthropic.count_tokens diff --git a/tests/integration_tests/llms/test_anthropic.py b/tests/integration_tests/llms/test_anthropic.py index 8c7717cfc7..2e81f2970d 100644 --- a/tests/integration_tests/llms/test_anthropic.py +++ b/tests/integration_tests/llms/test_anthropic.py @@ -11,14 +11,14 @@ from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler def test_anthropic_call() -> None: """Test valid call to anthropic.""" - llm = Anthropic(model="bare-nano-0") + llm = Anthropic(model="test") output = llm("Say foo:") assert isinstance(output, str) def test_anthropic_streaming() -> None: """Test streaming tokens from anthropic.""" - llm = Anthropic(model="bare-nano-0") + llm = Anthropic(model="test") generator = llm.stream("I'm Pickle Rick") assert isinstance(generator, Generator)