mirror of
https://github.com/hwchase17/langchain.git
synced 2026-03-18 11:07:36 +00:00
initial commit
replicate readme add replicate to integration simple replicate test rename api key to token, and update to latest python client update replicate docs
This commit is contained in:
49
docs/ecosystem/replicate.md
Normal file
49
docs/ecosystem/replicate.md
Normal file
@@ -0,0 +1,49 @@
|
||||
# Replicate
|
||||
This page covers how to use the Replicate ecosystem within LangChain.
|
||||
|
||||
## Installation and Setup
|
||||
- Create a [Replicate](https://replicate.com) account. Get your api key and set it as an environment variable (`REPLICATE_API_TOKEN`)
|
||||
- Install the [Replicate python client](https://github.com/replicate/replicate-python) with `pip install replicate`
|
||||
|
||||
## Calling a model
|
||||
|
||||
Find a model on the [replicate explore page](https://replicate.com/explore), and then paste in the model name and version in this format: model_name/version
|
||||
|
||||
For example, for this [flan-t5 model]( https://replicate.com/daanelson/flan-t5), click on the API tab. The model name/version would be: `daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8`
|
||||
|
||||
Only the `model` param is required, but we can add other model params when initializing.
|
||||
|
||||
For example, if we were running stable diffusion and wanted to change the image dimensions:
|
||||
|
||||
```
|
||||
Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf",
|
||||
image_dimensions='512x512')
|
||||
```
|
||||
|
||||
*Note that only the first output of a model will be returned.*
|
||||
|
||||
From here, we can initialize our model:
|
||||
|
||||
```python
|
||||
llm = Replicate(model="daanelson/flan-t5:04e422a9b85baed86a4f24981d7f9953e20c5fd82f6103b74ebc431588e1cec8")
|
||||
```
|
||||
|
||||
And run it:
|
||||
|
||||
```python
|
||||
prompt = """
|
||||
Answer the following yes/no question by reasoning step by step.
|
||||
Can a dog drive a car?
|
||||
"""
|
||||
llm(prompt)
|
||||
```
|
||||
|
||||
We can call any replicate model (not just LLMs) using this syntax. For example, we can call stable diffusion:
|
||||
|
||||
```python
|
||||
text2image = Replicate(model="stability-ai/stable-diffusion:db21e45d3f7023abc2a46ee38a23973f6dce16bb082a930b0c49861f96d1e5bf",
|
||||
image_dimensions='512x512'
|
||||
|
||||
image_output = text2image("A cat riding a motorcycle by Picasso")
|
||||
image_output
|
||||
```
|
||||
@@ -39,6 +39,8 @@ The examples here are all "how-to" guides for how to integrate with various LLM
|
||||
|
||||
`Self-Hosted Models (via Runhouse) <./integrations/self_hosted_examples.html>`_: Covers how to run models on existing or on-demand remote compute with LangChain.
|
||||
|
||||
`Replicate <./integrations/replicate_example.html>`_: Covers how to utilize the Replicate wrapper.
|
||||
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
374
docs/modules/llms/integrations/replicate.ipynb
Normal file
374
docs/modules/llms/integrations/replicate.ipynb
Normal file
File diff suppressed because one or more lines are too long
@@ -19,6 +19,7 @@ from langchain.llms.nlpcloud import NLPCloud
|
||||
from langchain.llms.openai import AzureOpenAI, OpenAI, OpenAIChat
|
||||
from langchain.llms.petals import Petals
|
||||
from langchain.llms.promptlayer_openai import PromptLayerOpenAI, PromptLayerOpenAIChat
|
||||
from langchain.llms.replicate import Replicate
|
||||
from langchain.llms.sagemaker_endpoint import SagemakerEndpoint
|
||||
from langchain.llms.self_hosted import SelfHostedPipeline
|
||||
from langchain.llms.self_hosted_hugging_face import SelfHostedHuggingFaceLLM
|
||||
@@ -45,6 +46,7 @@ __all__ = [
|
||||
"HuggingFacePipeline",
|
||||
"AI21",
|
||||
"AzureOpenAI",
|
||||
"Replicate",
|
||||
"SelfHostedPipeline",
|
||||
"SelfHostedHuggingFaceLLM",
|
||||
"PromptLayerOpenAI",
|
||||
@@ -72,6 +74,7 @@ type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||
"petals": Petals,
|
||||
"huggingface_pipeline": HuggingFacePipeline,
|
||||
"azure": AzureOpenAI,
|
||||
"replicate": Replicate,
|
||||
"self_hosted": SelfHostedPipeline,
|
||||
"self_hosted_hugging_face": SelfHostedHuggingFaceLLM,
|
||||
"stochasticai": StochasticAI,
|
||||
|
||||
100
langchain/llms/replicate.py
Normal file
100
langchain/llms/replicate.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Wrapper around Replicate API."""
|
||||
import logging
|
||||
from typing import Any, Dict, List, Mapping, Optional
|
||||
|
||||
from pydantic import BaseModel, Extra, Field, root_validator
|
||||
|
||||
from langchain.llms.base import LLM
|
||||
from langchain.llms.utils import enforce_stop_tokens
|
||||
from langchain.utils import get_from_dict_or_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Replicate(LLM, BaseModel):
|
||||
"""Wrapper around Replicate models.
|
||||
|
||||
To use, you should have the ``replicate`` python package installed,
|
||||
and the environment variable ``REPLICATE_API_TOKEN`` set with your API token.
|
||||
You can find your token here: https://replicate.com/account
|
||||
|
||||
model_name and model_version are required params, but any other model parameters
|
||||
can be passed in here.
|
||||
|
||||
Example:
|
||||
.. code-block:: python
|
||||
from langchain.llms import Replicate
|
||||
replicate = Replicate(model="stability-ai/stable-diffusion:27b93a2413e7f36cd83da926f3656280b2931564ff050bf9575f1fdf9bcd7478")
|
||||
"""
|
||||
|
||||
model: str = ""
|
||||
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
"""Holds any model parameters valid for `create` call not
|
||||
explicitly specified."""
|
||||
|
||||
replicate_api_token: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic config."""
|
||||
|
||||
extra = Extra.forbid
|
||||
|
||||
@root_validator(pre=True)
|
||||
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Build extra kwargs from additional params that were passed in."""
|
||||
all_required_field_names = {field.alias for field in cls.__fields__.values()}
|
||||
|
||||
extra = values.get("model_kwargs", {})
|
||||
for field_name in list(values):
|
||||
if field_name not in all_required_field_names:
|
||||
if field_name in extra:
|
||||
raise ValueError(f"Found {field_name} supplied twice.")
|
||||
logger.warning(
|
||||
f"""{field_name} was transfered to model_kwargs.
|
||||
Please confirm that {field_name} is what you intended."""
|
||||
)
|
||||
extra[field_name] = values.pop(field_name)
|
||||
values["model_kwargs"] = extra
|
||||
return values
|
||||
|
||||
@root_validator()
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
replicate_api_token = get_from_dict_or_env(
|
||||
values, "REPLICATE_API_TOKEN", "REPLICATE_API_TOKEN"
|
||||
)
|
||||
values["replicate_api_token"] = replicate_api_token
|
||||
return values
|
||||
|
||||
@property
|
||||
def _identifying_params(self) -> Mapping[str, Any]:
|
||||
"""Get the identifying parameters."""
|
||||
return {
|
||||
**{"model": self.model},
|
||||
**{"model_kwargs": self.model_kwargs},
|
||||
}
|
||||
|
||||
@property
|
||||
def _llm_type(self) -> str:
|
||||
"""Return type of model."""
|
||||
return "replicate"
|
||||
|
||||
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
||||
"""Call to replicate endpoint."""
|
||||
try:
|
||||
import replicate as replicate_python
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
"Could not import replicate python package. "
|
||||
"Please install it with `pip install replicate`."
|
||||
)
|
||||
params = self.model_kwargs or {}
|
||||
|
||||
inputs = {
|
||||
'prompt': prompt,
|
||||
**params
|
||||
}
|
||||
|
||||
outputs = replicate_python.run(self.model, **inputs)
|
||||
return outputs[0]
|
||||
10
tests/integration_tests/llms/test_replicate.py
Normal file
10
tests/integration_tests/llms/test_replicate.py
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Test Replicate API wrapper."""
|
||||
|
||||
from langchain.llms.replicate import Replicate
|
||||
|
||||
|
||||
def test_replicate_call() -> None:
|
||||
"""Test valid call to Replicate."""
|
||||
llm = Replicate()
|
||||
output = llm("Say foo:")
|
||||
assert isinstance(output, str)
|
||||
Reference in New Issue
Block a user