You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
langchain/langchain/llms/cohere.py

120 lines
3.8 KiB
Python

"""Wrapper around Cohere APIs."""
import os
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Extra, root_validator
from langchain.llms.base import LLM, CompletionOutput
from langchain.llms.utils import enforce_stop_tokens
class Cohere(BaseModel, LLM):
"""Wrapper around Cohere large language models.
To use, you should have the ``cohere`` python package installed, and the
environment variable ``COHERE_API_KEY`` set with your API key.
Example:
.. code-block:: python
from langchain import Cohere
cohere = Cohere(model="small")
"""
client: Any #: :meta private:
model: str = "small"
"""Model name to use."""
max_tokens: int = 256
"""Denotes the number of tokens to predict per generation."""
temperature: float = 0.75
"""A non-negative float that tunes the degree of randomness in generation."""
k: int = 0
"""Number of most likely tokens to consider at each step."""
p: int = 1
"""Total probability mass of tokens to consider at each step."""
frequency_penalty: int = 0
"""Penalizes repeated tokens according to frequency."""
presence_penalty: int = 0
"""Penalizes repeated tokens."""
num_generations: int = 1
"""Number of generations to return."""
return_likelihoods: bool = True
"""Whether to return the likelihoods of the generated tokens."""
class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
if "COHERE_API_KEY" not in os.environ:
raise ValueError(
"Did not find Cohere API key, please add an environment variable"
" `COHERE_API_KEY` which contains it."
)
try:
import cohere
values["client"] = cohere.Client(os.environ["COHERE_API_KEY"])
except ImportError:
raise ValueError(
"Could not import cohere python package. "
"Please it install it with `pip install cohere`."
)
return values
def generate(self, prompt: str, stop: Optional[List[str]] = None) -> List[CompletionOutput]:
"""Call out to Cohere's generate endpoint.
Args:
prompt: The prompt to pass into the model.
stop: Optional list of stop words to use when generating.
Returns:
The string generated by the model.
Example:
.. code-block:: python
response = cohere("Tell me a joke.")
"""
response = self.client.generate(
model=self.model,
prompt=prompt,
max_tokens=self.max_tokens,
temperature=self.temperature,
k=self.k,
p=self.p,
frequency_penalty=self.frequency_penalty,
presence_penalty=self.presence_penalty,
stop_sequences=stop,
num_generations=self.num_generations,
return_likelihoods="GENERATION" if self.return_likelihoods else None,
)
results = []
for generation in response.generations:
txt = generation.text
if stop is not None:
# If stop tokens are provided, Cohere's endpoint returns them.
# In order to make this consistent with other endpoints, we strip them.
txt = enforce_stop_tokens(txt, stop)
N = len(generation.token_likelihoods)
logprobs = [token.likelihood / N for token in generation.token_likelihoods]
results.append(
CompletionOutput(
text=txt,
logprobs=logprobs,
)
)
return results