forked from Archives/langchain
Add replicate take 2 (#2077)
This PR adds a replicate integration to langchain. It's an updated version of https://github.com/hwchase17/langchain/pull/1993, but with updates to match latest replicate-python code. https://github.com/replicate/replicate-python. --------- Co-authored-by: Harrison Chase <hw.chase.17@gmail.com> Co-authored-by: Zeke Sikelianos <zeke@sikelianos.com>searx
parent
a9e1043673
commit
f16c1fb6df
@ -0,0 +1,47 @@
|
|||||||
|
# Replicate
|
||||||
|
This page covers how to run models on Replicate within LangChain.
|
||||||
|
|
||||||
|
## Installation and Setup
|
||||||
|
- Create a [Replicate](https://replicate.com) account. Get your API key and set it as an environment variable (`REPLICATE_API_TOKEN`)
|
||||||
|
- Install the [Replicate python client](https://github.com/replicate/replicate-python) with `pip install replicate`
|
||||||
|
|
||||||
|
## Calling a model
|
||||||
|
|
||||||
|
Find a model on the [Replicate explore page](https://replicate.com/explore), and then paste in the model name and version in this format: `owner-name/model-name:version`
|
||||||
|
|
||||||
|
For example, for this [flan-t5 model](https://replicate.com/daanelson/flan-t5), click on the API tab. The model name/version would be: `daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8`
|
||||||
|
|
||||||
|
Only the `model` param is required, but any other model parameters can also be passed in with the format `input={model_param: value, ...}`
|
||||||
|
|
||||||
|
|
||||||
|
For example, if we were running stable diffusion and wanted to change the image dimensions:
|
||||||
|
|
||||||
|
```
|
||||||
|
Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf", input={'image_dimensions': '512x512'})
|
||||||
|
```
|
||||||
|
|
||||||
|
*Note that only the first output of a model will be returned.*
|
||||||
|
From here, we can initialize our model:
|
||||||
|
|
||||||
|
```python
|
||||||
|
llm = Replicate(model="daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8")
|
||||||
|
```
|
||||||
|
|
||||||
|
And run it:
|
||||||
|
|
||||||
|
```python
|
||||||
|
prompt = """
|
||||||
|
Answer the following yes/no question by reasoning step by step.
|
||||||
|
Can a dog drive a car?
|
||||||
|
"""
|
||||||
|
llm(prompt)
|
||||||
|
```
|
||||||
|
|
||||||
|
We can call any Replicate model (not just LLMs) using this syntax. For example, we can call [Stable Diffusion](https://replicate.com/stability-ai/stable-diffusion):
|
||||||
|
|
||||||
|
```python
|
||||||
|
text2image = Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf",
|
||||||
|
input={'image_dimensions'='512x512'}
|
||||||
|
|
||||||
|
image_output = text2image("A cat riding a motorcycle by Picasso")
|
||||||
|
```
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,108 @@
|
|||||||
|
"""Wrapper around Replicate API."""
|
||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List, Mapping, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Extra, Field, root_validator
|
||||||
|
|
||||||
|
from langchain.llms.base import LLM
|
||||||
|
from langchain.utils import get_from_dict_or_env
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Replicate(LLM, BaseModel):
|
||||||
|
"""Wrapper around Replicate models.
|
||||||
|
|
||||||
|
To use, you should have the ``replicate`` python package installed,
|
||||||
|
and the environment variable ``REPLICATE_API_TOKEN`` set with your API token.
|
||||||
|
You can find your token here: https://replicate.com/account
|
||||||
|
|
||||||
|
The model param is required, but any other model parameters can also
|
||||||
|
be passed in with the format input={model_param: value, ...}
|
||||||
|
|
||||||
|
Example:
|
||||||
|
.. code-block:: python
|
||||||
|
from langchain.llms import Replicate
|
||||||
|
replicate = Replicate(model="stability-ai/stable-diffusion: \
|
||||||
|
27b93a2413e7f36cd83da926f365628\
|
||||||
|
0b2931564ff050bf9575f1fdf9bcd7478",
|
||||||
|
input={"image_dimensions": "512x512"})
|
||||||
|
"""
|
||||||
|
|
||||||
|
model: str
|
||||||
|
input: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||||
|
replicate_api_token: Optional[str] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
"""Configuration for this pydantic config."""
|
||||||
|
|
||||||
|
extra = Extra.forbid
|
||||||
|
|
||||||
|
@root_validator(pre=True)
|
||||||
|
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Build extra kwargs from additional params that were passed in."""
|
||||||
|
all_required_field_names = {field.alias for field in cls.__fields__.values()}
|
||||||
|
|
||||||
|
extra = values.get("model_kwargs", {})
|
||||||
|
for field_name in list(values):
|
||||||
|
if field_name not in all_required_field_names:
|
||||||
|
if field_name in extra:
|
||||||
|
raise ValueError(f"Found {field_name} supplied twice.")
|
||||||
|
logger.warning(
|
||||||
|
f"""{field_name} was transfered to model_kwargs.
|
||||||
|
Please confirm that {field_name} is what you intended."""
|
||||||
|
)
|
||||||
|
extra[field_name] = values.pop(field_name)
|
||||||
|
values["model_kwargs"] = extra
|
||||||
|
return values
|
||||||
|
|
||||||
|
@root_validator()
|
||||||
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
|
"""Validate that api key and python package exists in environment."""
|
||||||
|
replicate_api_token = get_from_dict_or_env(
|
||||||
|
values, "REPLICATE_API_TOKEN", "REPLICATE_API_TOKEN"
|
||||||
|
)
|
||||||
|
values["replicate_api_token"] = replicate_api_token
|
||||||
|
return values
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _identifying_params(self) -> Mapping[str, Any]:
|
||||||
|
"""Get the identifying parameters."""
|
||||||
|
return {
|
||||||
|
**{"model_kwargs": self.model_kwargs},
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _llm_type(self) -> str:
|
||||||
|
"""Return type of model."""
|
||||||
|
return "replicate"
|
||||||
|
|
||||||
|
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
||||||
|
"""Call to replicate endpoint."""
|
||||||
|
try:
|
||||||
|
import replicate as replicate_python
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not import replicate python package. "
|
||||||
|
"Please install it with `pip install replicate`."
|
||||||
|
)
|
||||||
|
|
||||||
|
# get the model and version
|
||||||
|
model_str, version_str = self.model.split(":")
|
||||||
|
model = replicate_python.models.get(model_str)
|
||||||
|
version = model.versions.get(version_str)
|
||||||
|
|
||||||
|
# sort through the openapi schema to get the name of the first input
|
||||||
|
input_properties = sorted(
|
||||||
|
version.openapi_schema["components"]["schemas"]["Input"][
|
||||||
|
"properties"
|
||||||
|
].items(),
|
||||||
|
key=lambda item: item[1].get("x-order", 0),
|
||||||
|
)
|
||||||
|
first_input_name = input_properties[0][0]
|
||||||
|
|
||||||
|
inputs = {first_input_name: prompt, **self.input}
|
||||||
|
|
||||||
|
outputs = replicate_python.run(self.model, input={**inputs})
|
||||||
|
return outputs[0]
|
@ -0,0 +1,10 @@
|
|||||||
|
"""Test Replicate API wrapper."""
|
||||||
|
|
||||||
|
from langchain.llms.replicate import Replicate
|
||||||
|
|
||||||
|
|
||||||
|
def test_replicate_call() -> None:
|
||||||
|
"""Test valid call to Replicate."""
|
||||||
|
llm = Replicate()
|
||||||
|
output = llm("Say foo:")
|
||||||
|
assert isinstance(output, str)
|
Loading…
Reference in New Issue