mirror of
https://github.com/hwchase17/langchain
synced 2024-11-08 07:10:35 +00:00
Update cohere.py (#795)
When stop tokens are set in Cohere LLM constructor, they are currently not stripped from the response, and they should be stripped
This commit is contained in:
parent
cacf4091c0
commit
ae5695ad32
@ -122,6 +122,6 @@ class Cohere(LLM, BaseModel):
|
|||||||
text = response.generations[0].text
|
text = response.generations[0].text
|
||||||
# If stop tokens are provided, Cohere's endpoint returns them.
|
# If stop tokens are provided, Cohere's endpoint returns them.
|
||||||
# In order to make this consistent with other endpoints, we strip them.
|
# In order to make this consistent with other endpoints, we strip them.
|
||||||
if stop is not None:
|
if stop is not None or self.stop is not None:
|
||||||
text = enforce_stop_tokens(text, stop)
|
text = enforce_stop_tokens(text, params["stop_sequences"])
|
||||||
return text
|
return text
|
||||||
|
Loading…
Reference in New Issue
Block a user