mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-07 03:56:39 +00:00
Improve credential handing to allow passing in constructors (#79)
Addresses the issue in #76 by either using the relevant environment variable if set or using a string passed in the constructor. Prefers the constructor string over the environment variable, which seemed like the natural choice to me.
This commit is contained in:
parent
9679bdc34c
commit
54e325be2f
27
README.md
27
README.md
@ -2,9 +2,7 @@
|
|||||||
|
|
||||||
⚡ Building applications with LLMs through composability ⚡
|
⚡ Building applications with LLMs through composability ⚡
|
||||||
|
|
||||||
[](https://github.com/hwchase17/langchain/actions/workflows/lint.yml) [](https://github.com/hwchase17/langchain/actions/workflows/test.yml) [](https://opensource.org/licenses/MIT) [](https://twitter.com/langchainai) [](https://discord.gg/6adMQxSpJS)
|
[](https://github.com/hwchase17/langchain/actions/workflows/lint.yml) [](https://github.com/hwchase17/langchain/actions/workflows/test.yml) [](https://opensource.org/licenses/MIT) [](https://twitter.com/langchainai) [](https://discord.gg/6adMQxSpJS)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Quick Install
|
## Quick Install
|
||||||
|
|
||||||
@ -20,6 +18,7 @@ combine them with other sources of computation or knowledge.
|
|||||||
|
|
||||||
This library is aimed at assisting in the development of those types of applications.
|
This library is aimed at assisting in the development of those types of applications.
|
||||||
It aims to create:
|
It aims to create:
|
||||||
|
|
||||||
1. a comprehensive collection of pieces you would ever want to combine
|
1. a comprehensive collection of pieces you would ever want to combine
|
||||||
2. a flexible interface for combining pieces into a single comprehensive "chain"
|
2. a flexible interface for combining pieces into a single comprehensive "chain"
|
||||||
3. a schema for easily saving and sharing those chains
|
3. a schema for easily saving and sharing those chains
|
||||||
@ -30,23 +29,23 @@ Besides the installation of this python package, you will also need to install p
|
|||||||
|
|
||||||
Note: the reason these packages are not included in the dependencies by default is that as we imagine scaling this package, we do not want to force dependencies that are not needed.
|
Note: the reason these packages are not included in the dependencies by default is that as we imagine scaling this package, we do not want to force dependencies that are not needed.
|
||||||
|
|
||||||
The following use cases require specific installs and environment variables:
|
The following use cases require specific installs and api keys:
|
||||||
|
|
||||||
- *OpenAI*:
|
- _OpenAI_:
|
||||||
- Install requirements with `pip install openai`
|
- Install requirements with `pip install openai`
|
||||||
- Set the following environment variable: `OPENAI_API_KEY`
|
- Get an OpenAI api key and either set it as an environment variable (`OPENAI_API_KEY`) or pass it to the LLM constructor as `openai_api_key`.
|
||||||
- *Cohere*:
|
- _Cohere_:
|
||||||
- Install requirements with `pip install cohere`
|
- Install requirements with `pip install cohere`
|
||||||
- Set the following environment variable: `COHERE_API_KEY`
|
- Get a Cohere api key and either set it as an environment variable (`COHERE_API_KEY`) or pass it to the LLM constructor as `cohere_api_key`.
|
||||||
- *HuggingFace Hub*
|
- _HuggingFace Hub_
|
||||||
- Install requirements with `pip install huggingface_hub`
|
- Install requirements with `pip install huggingface_hub`
|
||||||
- Set the following environment variable: `HUGGINGFACEHUB_API_TOKEN`
|
- Get a HuggingFace Hub api token and either set it as an environment variable (`HUGGINGFACEHUB_API_TOKEN`) or pass it to the LLM constructor as `huggingfacehub_api_token`.
|
||||||
- *SerpAPI*:
|
- _SerpAPI_:
|
||||||
- Install requirements with `pip install google-search-results`
|
- Install requirements with `pip install google-search-results`
|
||||||
- Set the following environment variable: `SERPAPI_API_KEY`
|
- Get a SerpAPI api key and either set it as an environment variable (`SERPAPI_API_KEY`) or pass it to the LLM constructor as `serpapi_api_key`.
|
||||||
- *NatBot*:
|
- _NatBot_:
|
||||||
- Install requirements with `pip install playwright`
|
- Install requirements with `pip install playwright`
|
||||||
- *Wikipedia*:
|
- _Wikipedia_:
|
||||||
- Install requirements with `pip install wikipedia`
|
- Install requirements with `pip install wikipedia`
|
||||||
|
|
||||||
## 🚀 What can I do with this
|
## 🚀 What can I do with this
|
||||||
|
@ -92,6 +92,7 @@ class SelfAskWithSearchChain(Chain, BaseModel):
|
|||||||
input_key: str = "question" #: :meta private:
|
input_key: str = "question" #: :meta private:
|
||||||
output_key: str = "answer" #: :meta private:
|
output_key: str = "answer" #: :meta private:
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ Heavily borrowed from https://github.com/ofirpress/self-ask
|
|||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra, root_validator
|
from pydantic import BaseModel, Extra, root_validator
|
||||||
|
|
||||||
@ -29,7 +29,8 @@ class SerpAPIChain(Chain, BaseModel):
|
|||||||
"""Chain that calls SerpAPI.
|
"""Chain that calls SerpAPI.
|
||||||
|
|
||||||
To use, you should have the ``google-search-results`` python package installed,
|
To use, you should have the ``google-search-results`` python package installed,
|
||||||
and the environment variable ``SERPAPI_API_KEY`` set with your API key.
|
and the environment variable ``SERPAPI_API_KEY`` set with your API key, or pass
|
||||||
|
`serpapi_api_key` as a named parameter to the constructor.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
@ -42,6 +43,8 @@ class SerpAPIChain(Chain, BaseModel):
|
|||||||
input_key: str = "search_query" #: :meta private:
|
input_key: str = "search_query" #: :meta private:
|
||||||
output_key: str = "search_result" #: :meta private:
|
output_key: str = "search_result" #: :meta private:
|
||||||
|
|
||||||
|
serpapi_api_key: Optional[str] = os.environ.get("SERPAPI_API_KEY")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
@ -66,10 +69,13 @@ class SerpAPIChain(Chain, BaseModel):
|
|||||||
@root_validator()
|
@root_validator()
|
||||||
def validate_environment(cls, values: Dict) -> Dict:
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
"""Validate that api key and python package exists in environment."""
|
"""Validate that api key and python package exists in environment."""
|
||||||
if "SERPAPI_API_KEY" not in os.environ:
|
serpapi_api_key = values.get("serpapi_api_key")
|
||||||
|
|
||||||
|
if serpapi_api_key is None or serpapi_api_key == "":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Did not find SerpAPI API key, please add an environment variable"
|
"Did not find SerpAPI API key, please add an environment variable"
|
||||||
" `SERPAPI_API_KEY` which contains it."
|
" `SERPAPI_API_KEY` which contains it, or pass `serpapi_api_key` as a named"
|
||||||
|
" parameter to the constructor."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
from serpapi import GoogleSearch
|
from serpapi import GoogleSearch
|
||||||
@ -84,7 +90,7 @@ class SerpAPIChain(Chain, BaseModel):
|
|||||||
|
|
||||||
def _run(self, inputs: Dict[str, Any]) -> Dict[str, str]:
|
def _run(self, inputs: Dict[str, Any]) -> Dict[str, str]:
|
||||||
params = {
|
params = {
|
||||||
"api_key": os.environ["SERPAPI_API_KEY"],
|
"api_key": self.serpapi_api_key,
|
||||||
"engine": "google",
|
"engine": "google",
|
||||||
"q": inputs[self.input_key],
|
"q": inputs[self.input_key],
|
||||||
"google_domain": "google.com",
|
"google_domain": "google.com",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""Wrapper around OpenAI embedding models."""
|
"""Wrapper around OpenAI embedding models."""
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Extra, root_validator
|
from pydantic import BaseModel, Extra, root_validator
|
||||||
|
|
||||||
@ -11,19 +11,22 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|||||||
"""Wrapper around OpenAI embedding models.
|
"""Wrapper around OpenAI embedding models.
|
||||||
|
|
||||||
To use, you should have the ``openai`` python package installed, and the
|
To use, you should have the ``openai`` python package installed, and the
|
||||||
environment variable ``OPENAI_API_KEY`` set with your API key.
|
environment variable ``OPENAI_API_KEY`` set with your API key or pass it
|
||||||
|
as a named parameter to the constructor.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from langchain.embeddings import OpenAIEmbeddings
|
from langchain.embeddings import OpenAIEmbeddings
|
||||||
openai = OpenAIEmbeddings(model_name="davinci")
|
openai = OpenAIEmbeddings(model_name="davinci", openai_api_key="my-api-key")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
client: Any #: :meta private:
|
client: Any #: :meta private:
|
||||||
model_name: str = "babbage"
|
model_name: str = "babbage"
|
||||||
"""Model name to use."""
|
"""Model name to use."""
|
||||||
|
|
||||||
|
openai_api_key: Optional[str] = os.environ.get("OPENAI_API_KEY")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
@ -32,14 +35,18 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
|||||||
@root_validator()
|
@root_validator()
|
||||||
def validate_environment(cls, values: Dict) -> Dict:
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
"""Validate that api key and python package exists in environment."""
|
"""Validate that api key and python package exists in environment."""
|
||||||
if "OPENAI_API_KEY" not in os.environ:
|
openai_api_key = values.get("openai_api_key")
|
||||||
|
|
||||||
|
if openai_api_key is None or openai_api_key == "":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Did not find OpenAI API key, please add an environment variable"
|
"Did not find OpenAI API key, please add an environment variable"
|
||||||
" `OPENAI_API_KEY` which contains it."
|
" `OPENAI_API_KEY` which contains it, or pass `openai_api_key` as a"
|
||||||
|
" named parameter."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
import openai
|
import openai
|
||||||
|
|
||||||
|
openai.api_key = openai_api_key
|
||||||
values["client"] = openai.Embedding
|
values["client"] = openai.Embedding
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -12,13 +12,14 @@ class Cohere(BaseModel, LLM):
|
|||||||
"""Wrapper around Cohere large language models.
|
"""Wrapper around Cohere large language models.
|
||||||
|
|
||||||
To use, you should have the ``cohere`` python package installed, and the
|
To use, you should have the ``cohere`` python package installed, and the
|
||||||
environment variable ``COHERE_API_KEY`` set with your API key.
|
environment variable ``COHERE_API_KEY`` set with your API key, or pass
|
||||||
|
it as a named parameter to the constructor.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from langchain import Cohere
|
from langchain import Cohere
|
||||||
cohere = Cohere(model="gptd-instruct-tft")
|
cohere = Cohere(model="gptd-instruct-tft", cohere_api_key="my-api-key")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
client: Any #: :meta private:
|
client: Any #: :meta private:
|
||||||
@ -43,6 +44,8 @@ class Cohere(BaseModel, LLM):
|
|||||||
presence_penalty: int = 0
|
presence_penalty: int = 0
|
||||||
"""Penalizes repeated tokens."""
|
"""Penalizes repeated tokens."""
|
||||||
|
|
||||||
|
cohere_api_key: Optional[str] = os.environ.get("COHERE_API_KEY")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
@ -51,15 +54,18 @@ class Cohere(BaseModel, LLM):
|
|||||||
@root_validator()
|
@root_validator()
|
||||||
def validate_environment(cls, values: Dict) -> Dict:
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
"""Validate that api key and python package exists in environment."""
|
"""Validate that api key and python package exists in environment."""
|
||||||
if "COHERE_API_KEY" not in os.environ:
|
cohere_api_key = values.get("cohere_api_key")
|
||||||
|
|
||||||
|
if cohere_api_key is None or cohere_api_key == "":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Did not find Cohere API key, please add an environment variable"
|
"Did not find Cohere API key, please add an environment variable"
|
||||||
" `COHERE_API_KEY` which contains it."
|
" `COHERE_API_KEY` which contains it, or pass `cohere_api_key`"
|
||||||
|
" as a named parameter."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
import cohere
|
import cohere
|
||||||
|
|
||||||
values["client"] = cohere.Client(os.environ["COHERE_API_KEY"])
|
values["client"] = cohere.Client(cohere_api_key)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Could not import cohere python package. "
|
"Could not import cohere python package. "
|
||||||
|
@ -14,7 +14,8 @@ class HuggingFaceHub(BaseModel, LLM):
|
|||||||
"""Wrapper around HuggingFaceHub models.
|
"""Wrapper around HuggingFaceHub models.
|
||||||
|
|
||||||
To use, you should have the ``huggingface_hub`` python package installed, and the
|
To use, you should have the ``huggingface_hub`` python package installed, and the
|
||||||
environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token.
|
environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, or pass
|
||||||
|
it as a named parameter to the constructor.
|
||||||
|
|
||||||
Only supports task `text-generation` for now.
|
Only supports task `text-generation` for now.
|
||||||
|
|
||||||
@ -22,7 +23,7 @@ class HuggingFaceHub(BaseModel, LLM):
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from langchain import HuggingFaceHub
|
from langchain import HuggingFaceHub
|
||||||
hf = HuggingFaceHub(repo_id="gpt2")
|
hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key")
|
||||||
"""
|
"""
|
||||||
|
|
||||||
client: Any #: :meta private:
|
client: Any #: :meta private:
|
||||||
@ -37,6 +38,8 @@ class HuggingFaceHub(BaseModel, LLM):
|
|||||||
num_return_sequences: int = 1
|
num_return_sequences: int = 1
|
||||||
"""How many completions to generate for each prompt."""
|
"""How many completions to generate for each prompt."""
|
||||||
|
|
||||||
|
huggingfacehub_api_token: Optional[str] = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
@ -45,10 +48,13 @@ class HuggingFaceHub(BaseModel, LLM):
|
|||||||
@root_validator()
|
@root_validator()
|
||||||
def validate_environment(cls, values: Dict) -> Dict:
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
"""Validate that api key and python package exists in environment."""
|
"""Validate that api key and python package exists in environment."""
|
||||||
if "HUGGINGFACEHUB_API_TOKEN" not in os.environ:
|
huggingfacehub_api_token = values.get("huggingfacehub_api_token")
|
||||||
|
|
||||||
|
if huggingfacehub_api_token is None or huggingfacehub_api_token == "":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Did not find HuggingFace API token, please add an environment variable"
|
"Did not find HuggingFace API token, please add an environment variable"
|
||||||
" `HUGGINGFACEHUB_API_TOKEN` which contains it."
|
" `HUGGINGFACEHUB_API_TOKEN` which contains it, or pass"
|
||||||
|
" `huggingfacehub_api_token` as a named parameter."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
from huggingface_hub.inference_api import InferenceApi
|
from huggingface_hub.inference_api import InferenceApi
|
||||||
@ -56,7 +62,7 @@ class HuggingFaceHub(BaseModel, LLM):
|
|||||||
repo_id = values.get("repo_id", DEFAULT_REPO_ID)
|
repo_id = values.get("repo_id", DEFAULT_REPO_ID)
|
||||||
values["client"] = InferenceApi(
|
values["client"] = InferenceApi(
|
||||||
repo_id=repo_id,
|
repo_id=repo_id,
|
||||||
token=os.environ["HUGGINGFACEHUB_API_TOKEN"],
|
token=huggingfacehub_api_token,
|
||||||
task="text-generation",
|
task="text-generation",
|
||||||
)
|
)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -38,6 +38,8 @@ class OpenAI(BaseModel, LLM):
|
|||||||
best_of: int = 1
|
best_of: int = 1
|
||||||
"""Generates best_of completions server-side and returns the "best"."""
|
"""Generates best_of completions server-side and returns the "best"."""
|
||||||
|
|
||||||
|
openai_api_key: Optional[str] = os.environ.get("OPENAI_API_KEY")
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
"""Configuration for this pydantic object."""
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
@ -46,14 +48,18 @@ class OpenAI(BaseModel, LLM):
|
|||||||
@root_validator()
|
@root_validator()
|
||||||
def validate_environment(cls, values: Dict) -> Dict:
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
"""Validate that api key and python package exists in environment."""
|
"""Validate that api key and python package exists in environment."""
|
||||||
if "OPENAI_API_KEY" not in os.environ:
|
openai_api_key = values.get("openai_api_key")
|
||||||
|
|
||||||
|
if openai_api_key is None or openai_api_key == "":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Did not find OpenAI API key, please add an environment variable"
|
"Did not find OpenAI API key, please add an environment variable"
|
||||||
" `OPENAI_API_KEY` which contains it."
|
" `OPENAI_API_KEY` which contains it, or pass `openai_api_key`"
|
||||||
|
" as a named parameter."
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
import openai
|
import openai
|
||||||
|
|
||||||
|
openai.api_key = openai_api_key
|
||||||
values["client"] = openai.Completion
|
values["client"] = openai.Completion
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
Loading…
Reference in New Issue
Block a user