delete the default model value from langchain and discard the need fo… (#24915)

- description: I remove the limitation of mandatory existence of
`QIANFAN_AK` and default model name which langchain uses cause there is
already a default model nama underlying `qianfan` SDK powering langchain
component.

---------

Co-authored-by: Chester Curme <chester.curme@gmail.com>
This commit is contained in:
Dobiichi-Origami 2024-08-06 22:11:05 +08:00 committed by GitHub
parent 293a4a78de
commit 061ed250f6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 15 additions and 12 deletions

View File

@ -346,7 +346,9 @@ class QianfanChatEndpoint(BaseChatModel):
client: Any #: :meta private:
qianfan_ak: SecretStr = Field(alias="api_key")
# It could be empty due to the use of Console API
# And they're not list here
qianfan_ak: Optional[SecretStr] = Field(default=None, alias="api_key")
"""Qianfan API KEY"""
qianfan_sk: Optional[SecretStr] = Field(default=None, alias="secret_key")
"""Qianfan SECRET KEY"""
@ -365,13 +367,13 @@ class QianfanChatEndpoint(BaseChatModel):
In the case of other model, passing these params will not affect the result.
"""
model: str = "ERNIE-Lite-8K"
model: Optional[str] = Field(default=None)
"""Model name.
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
preset models are mapping to an endpoint.
`model` will be ignored if `endpoint` is set.
Default is ERNIE-Lite-8K.
Default is set by `qianfan` SDK, not here
"""
endpoint: Optional[str] = None
@ -386,16 +388,12 @@ class QianfanChatEndpoint(BaseChatModel):
def validate_environment(cls, values: Dict) -> Dict:
values["qianfan_ak"] = convert_to_secret_str(
get_from_dict_or_env(
values,
["qianfan_ak", "api_key"],
"QIANFAN_AK",
values, ["qianfan_ak", "api_key"], "QIANFAN_AK", default=""
)
)
values["qianfan_sk"] = convert_to_secret_str(
get_from_dict_or_env(
values,
["qianfan_sk", "secret_key"],
"QIANFAN_SK",
values, ["qianfan_sk", "secret_key"], "QIANFAN_SK", default=""
)
)

View File

@ -55,7 +55,7 @@ class QianfanEmbeddingsEndpoint(BaseModel, Embeddings):
chunk_size: int = 16
"""Chunk size when multiple texts are input"""
model: str = "Embedding-V1"
model: Optional[str] = Field(default=None)
"""Model name
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu

View File

@ -55,12 +55,14 @@ class QianfanLLMEndpoint(LLM):
streaming: Optional[bool] = False
"""Whether to stream the results or not."""
model: str = "ERNIE-Bot-turbo"
model: Optional[str] = Field(default=None)
"""Model name.
you could get from https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu
preset models are mapping to an endpoint.
`model` will be ignored if `endpoint` is set
Default is set by `qianfan` SDK, not here
"""
endpoint: Optional[str] = None

View File

@ -306,7 +306,10 @@ def test_functions_call() -> None:
def test_rate_limit() -> None:
chat = QianfanChatEndpoint(model="ERNIE-Bot", init_kwargs={"query_per_second": 2}) # type: ignore[call-arg]
assert chat.client._client._rate_limiter._sync_limiter._query_per_second == 2
assert (
chat.client._client._rate_limiter._internal_qps_rate_limiter._sync_limiter._query_per_second
== 1.8
)
responses = chat.batch(
[
[HumanMessage(content="Hello")],