Add Anthropic default request timeout (#3540)

thanks @hitflame!

---------

Co-authored-by: Wenqiang Zhao <hitzhaowenqiang@sina.com>
Co-authored-by: delta@com <delta@com>
pull/3539/head
Davis Chase 1 year ago committed by GitHub
parent b49ee372f1
commit d18b0caf0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,6 +1,6 @@
"""Wrapper around Anthropic APIs.""" """Wrapper around Anthropic APIs."""
import re import re
from typing import Any, Callable, Dict, Generator, List, Mapping, Optional from typing import Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Union
from pydantic import BaseModel, Extra, root_validator from pydantic import BaseModel, Extra, root_validator
@ -28,6 +28,9 @@ class _AnthropicCommon(BaseModel):
streaming: bool = False streaming: bool = False
"""Whether to stream the results.""" """Whether to stream the results."""
default_request_timeout: Optional[Union[float, Tuple[float, float]]] = None
"""Timeout for requests to Anthropic Completion API. Default is 600 seconds."""
anthropic_api_key: Optional[str] = None anthropic_api_key: Optional[str] = None
HUMAN_PROMPT: Optional[str] = None HUMAN_PROMPT: Optional[str] = None
@ -43,7 +46,10 @@ class _AnthropicCommon(BaseModel):
try: try:
import anthropic import anthropic
values["client"] = anthropic.Client(anthropic_api_key) values["client"] = anthropic.Client(
api_key=anthropic_api_key,
default_request_timeout=values["default_request_timeout"],
)
values["HUMAN_PROMPT"] = anthropic.HUMAN_PROMPT values["HUMAN_PROMPT"] = anthropic.HUMAN_PROMPT
values["AI_PROMPT"] = anthropic.AI_PROMPT values["AI_PROMPT"] = anthropic.AI_PROMPT
values["count_tokens"] = anthropic.count_tokens values["count_tokens"] = anthropic.count_tokens

@ -11,14 +11,14 @@ from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler
def test_anthropic_call() -> None: def test_anthropic_call() -> None:
"""Test valid call to anthropic.""" """Test valid call to anthropic."""
llm = Anthropic(model="bare-nano-0") llm = Anthropic(model="test")
output = llm("Say foo:") output = llm("Say foo:")
assert isinstance(output, str) assert isinstance(output, str)
def test_anthropic_streaming() -> None: def test_anthropic_streaming() -> None:
"""Test streaming tokens from anthropic.""" """Test streaming tokens from anthropic."""
llm = Anthropic(model="bare-nano-0") llm = Anthropic(model="test")
generator = llm.stream("I'm Pickle Rick") generator = llm.stream("I'm Pickle Rick")
assert isinstance(generator, Generator) assert isinstance(generator, Generator)

Loading…
Cancel
Save