mirror of https://github.com/hwchase17/langchain
HuggingFaceTextGenInference bug fix: Multiple values for keyword argument (#8044)
Fixed the bug causing: `TypeError: generate() got multiple values for keyword argument 'stop_sequences'` ```python res = await self.async_client.generate( prompt, **self._default_params, stop_sequences=stop, **kwargs, ) ``` The above throws an error because stop_sequences is in also in the self._default_params. --------- Co-authored-by: Bagatur <baskaryan@gmail.com>pull/7880/head
parent
ed6a5532ac
commit
ebc5ff2948
@ -0,0 +1,19 @@
|
||||
from langchain import HuggingFaceTextGenInference
|
||||
|
||||
|
||||
def test_invocation_params_stop_sequences() -> None:
|
||||
llm = HuggingFaceTextGenInference()
|
||||
assert llm._default_params["stop_sequences"] == []
|
||||
|
||||
runtime_stop = None
|
||||
assert llm._invocation_params(runtime_stop)["stop_sequences"] == []
|
||||
assert llm._default_params["stop_sequences"] == []
|
||||
|
||||
runtime_stop = ["stop"]
|
||||
assert llm._invocation_params(runtime_stop)["stop_sequences"] == ["stop"]
|
||||
assert llm._default_params["stop_sequences"] == []
|
||||
|
||||
llm = HuggingFaceTextGenInference(stop_sequences=["."])
|
||||
runtime_stop = ["stop"]
|
||||
assert llm._invocation_params(runtime_stop)["stop_sequences"] == [".", "stop"]
|
||||
assert llm._default_params["stop_sequences"] == ["."]
|
Loading…
Reference in New Issue