Compare commits

...

7 Commits

Author SHA1 Message Date
Harrison Chase
37ee1b76ae Merge branch 'master' into harrison/replicate 2023-03-27 19:53:26 -07:00
Harrison Chase
6a4fa37fe1 Merge branch 'cbh123-add-replicate' into harrison/replicate 2023-03-27 18:54:12 -07:00
Harrison Chase
da04e26d12 stash 2023-03-27 18:54:03 -07:00
Charlie Holtz
6ff3b473c4 fix typo 2023-03-27 12:30:08 -07:00
Charlie Holtz
9eefc205e9 merge in latest integrations.rst changes 2023-03-27 12:00:25 -07:00
Charlie Holtz
e862b2f85c Merge branch 'master' of https://github.com/cbh123/langchain into add-replicate 2023-03-27 11:59:08 -07:00
Charlie Holtz
cefe277a35 initial commit
replicate readme

add replicate to integration

simple replicate test

rename api key to token, and update to latest python client

update replicate docs
2023-03-26 14:27:38 -07:00
5 changed files with 520 additions and 0 deletions

View File

@@ -0,0 +1,49 @@
# Replicate
This page covers how to use the Replicate ecosystem within LangChain.
## Installation and Setup
- Create a [Replicate](https://replicate.com) account. Get your api key and set it as an environment variable (`REPLICATE_API_TOKEN`)
- Install the [Replicate python client](https://github.com/replicate/replicate-python) with `pip install replicate`
## Calling a model
Find a model on the [replicate explore page](https://replicate.com/explore), and then paste in the model name and version in this format: model_name/version
For example, for this [flan-t5 model]( https://replicate.com/daanelson/flan-t5), click on the API tab. The model name/version would be: `daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8`
Only the `model` param is required, but we can add other model params when initializing.
For example, if we were running stable diffusion and wanted to change the image dimensions:
```
Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf",
image_dimensions='512x512')
```
*Note that only the first output of a model will be returned.*
From here, we can initialize our model:
```python
llm = Replicate(model="daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8")
```
And run it:
```python
prompt = """
Answer the following yes/no question by reasoning step by step.
Can a dog drive a car?
"""
llm(prompt)
```
We can call any replicate model (not just LLMs) using this syntax. For example, we can call stable diffusion:
```python
text2image = Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf",
image_dimensions='512x512'
image_output = text2image("A cat riding a motorcycle by Picasso")
image_output
```

File diff suppressed because one or more lines are too long

View File

@@ -19,6 +19,7 @@ from langchain.llms.nlpcloud import NLPCloud
from langchain.llms.openai import AzureOpenAI, OpenAI, OpenAIChat
from langchain.llms.petals import Petals
from langchain.llms.promptlayer_openai import PromptLayerOpenAI, PromptLayerOpenAIChat
from langchain.llms.replicate import Replicate
from langchain.llms.sagemaker_endpoint import SagemakerEndpoint
from langchain.llms.self_hosted import SelfHostedPipeline
from langchain.llms.self_hosted_hugging_face import SelfHostedHuggingFaceLLM
@@ -45,6 +46,7 @@ __all__ = [
"HuggingFacePipeline",
"AI21",
"AzureOpenAI",
"Replicate",
"SelfHostedPipeline",
"SelfHostedHuggingFaceLLM",
"PromptLayerOpenAI",
@@ -72,6 +74,7 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
"petals": Petals,
"huggingface_pipeline": HuggingFacePipeline,
"azure": AzureOpenAI,
"replicate": Replicate,
"self_hosted": SelfHostedPipeline,
"self_hosted_hugging_face": SelfHostedHuggingFaceLLM,
"stochasticai": StochasticAI,

View File

@@ -0,0 +1,96 @@
"""Wrapper around Replicate API."""
import logging
from typing import Any, Dict, List, Mapping, Optional
from pydantic import BaseModel, Extra, Field, root_validator
from langchain.llms.base import LLM
from langchain.utils import get_from_dict_or_env
logger = logging.getLogger(__name__)
class Replicate(LLM, BaseModel):
"""Wrapper around Replicate models.
To use, you should have the ``replicate`` python package installed,
and the environment variable ``REPLICATE_API_TOKEN`` set with your API token.
You can find your token here: https://replicate.com/account
model_name and model_version are required params, but any other model parameters
can be passed in here.
Example:
.. code-block:: python
from langchain.llms import Replicate
replicate = Replicate(model=...)
"""
model: str
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
"""Holds any model parameters valid for `create` call not
explicitly specified."""
replicate_api_token: Optional[str] = None
class Config:
"""Configuration for this pydantic config."""
extra = Extra.forbid
@root_validator(pre=True)
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""Build extra kwargs from additional params that were passed in."""
all_required_field_names = {field.alias for field in cls.__fields__.values()}
extra = values.get("model_kwargs", {})
for field_name in list(values):
if field_name not in all_required_field_names:
if field_name in extra:
raise ValueError(f"Found {field_name} supplied twice.")
logger.warning(
f"""{field_name} was transfered to model_kwargs.
Please confirm that {field_name} is what you intended."""
)
extra[field_name] = values.pop(field_name)
values["model_kwargs"] = extra
return values
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
replicate_api_token = get_from_dict_or_env(
values, "REPLICATE_API_TOKEN", "REPLICATE_API_TOKEN"
)
values["replicate_api_token"] = replicate_api_token
return values
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""
return {
**{"model": self.model},
**{"model_kwargs": self.model_kwargs},
}
@property
def _llm_type(self) -> str:
"""Return type of model."""
return "replicate"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
"""Call to replicate endpoint."""
try:
import replicate as replicate_python
except ImportError:
raise ValueError(
"Could not import replicate python package. "
"Please install it with `pip install replicate`."
)
params = self.model_kwargs or {}
inputs = {"prompt": prompt, **params}
outputs = replicate_python.run(self.model, input=inputs)
return outputs[0]

View File

@@ -0,0 +1,10 @@
"""Test Replicate API wrapper."""
from langchain.llms.replicate import Replicate
def test_replicate_call() -> None:
"""Test valid call to Replicate."""
llm = Replicate()
output = llm("Say foo:")
assert isinstance(output, str)