Update cohere.py (#795)

When stop tokens are set in Cohere LLM constructor, they are currently
not stripped from the response, and they should be stripped
This commit is contained in:
bair82 2023-01-30 23:55:44 +01:00 committed by GitHub
parent cacf4091c0
commit ae5695ad32
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -122,6 +122,6 @@ class Cohere(LLM, BaseModel):
text = response.generations[0].text text = response.generations[0].text
# If stop tokens are provided, Cohere's endpoint returns them. # If stop tokens are provided, Cohere's endpoint returns them.
# In order to make this consistent with other endpoints, we strip them. # In order to make this consistent with other endpoints, we strip them.
if stop is not None: if stop is not None or self.stop is not None:
text = enforce_stop_tokens(text, stop) text = enforce_stop_tokens(text, params["stop_sequences"])
return text return text