mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-10 03:00:59 +00:00
Compare commits
1 Commits
langchain-
...
eugene/com
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f3b569583 |
@@ -10,7 +10,7 @@ from typing import TYPE_CHECKING, Dict, Optional, Set
|
||||
import requests
|
||||
from langchain_core.messages import BaseMessage
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, from_env, get_from_dict_or_env
|
||||
|
||||
from langchain_community.adapters.openai import convert_message_to_dict
|
||||
from langchain_community.chat_models.openai import (
|
||||
@@ -64,7 +64,9 @@ class ChatAnyscale(ChatOpenAI):
|
||||
"""AnyScale Endpoints API keys."""
|
||||
model_name: str = Field(default=DEFAULT_MODEL, alias="model")
|
||||
"""Model name to use."""
|
||||
anyscale_api_base: str = Field(default=DEFAULT_API_BASE)
|
||||
anyscale_api_base: str = Field(
|
||||
default_factory=from_env("ANYSCALE_API_BASE", default=DEFAULT_API_BASE)
|
||||
)
|
||||
"""Base URL path for API requests,
|
||||
leave blank if not using a proxy or service emulator."""
|
||||
anyscale_proxy: Optional[str] = None
|
||||
@@ -112,12 +114,6 @@ class ChatAnyscale(ChatOpenAI):
|
||||
"ANYSCALE_API_KEY",
|
||||
)
|
||||
)
|
||||
values["anyscale_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"anyscale_api_base",
|
||||
"ANYSCALE_API_BASE",
|
||||
default=DEFAULT_API_BASE,
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"anyscale_proxy",
|
||||
|
||||
@@ -49,6 +49,7 @@ from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
)
|
||||
@@ -348,7 +349,10 @@ class ChatBaichuan(BaseChatModel):
|
||||
def lc_serializable(self) -> bool:
|
||||
return True
|
||||
|
||||
baichuan_api_base: str = Field(default=DEFAULT_API_BASE, alias="base_url")
|
||||
baichuan_api_base: str = Field(
|
||||
default_factory=from_env("BAICHUAN_API_BASE", default=DEFAULT_API_BASE),
|
||||
alias="base_url",
|
||||
)
|
||||
"""Baichuan custom endpoints"""
|
||||
baichuan_api_key: SecretStr = Field(alias="api_key")
|
||||
"""Baichuan API Key"""
|
||||
@@ -408,12 +412,6 @@ class ChatBaichuan(BaseChatModel):
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["baichuan_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"baichuan_api_base",
|
||||
"BAICHUAN_API_BASE",
|
||||
DEFAULT_API_BASE,
|
||||
)
|
||||
values["baichuan_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(
|
||||
values,
|
||||
|
||||
@@ -22,6 +22,7 @@ from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResu
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
)
|
||||
|
||||
@@ -88,7 +89,9 @@ class ChatCoze(BaseChatModel):
|
||||
def lc_serializable(self) -> bool:
|
||||
return True
|
||||
|
||||
coze_api_base: str = Field(default=DEFAULT_API_BASE)
|
||||
coze_api_base: str = Field(
|
||||
default_factory=from_env("COZE_API_BASE", default=DEFAULT_API_BASE)
|
||||
)
|
||||
"""Coze custom endpoints"""
|
||||
coze_api_key: Optional[SecretStr] = None
|
||||
"""Coze API Key"""
|
||||
@@ -118,12 +121,6 @@ class ChatCoze(BaseChatModel):
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["coze_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"coze_api_base",
|
||||
"COZE_API_BASE",
|
||||
DEFAULT_API_BASE,
|
||||
)
|
||||
values["coze_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(
|
||||
values,
|
||||
|
||||
@@ -57,7 +57,7 @@ from langchain_core.outputs import (
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
|
||||
from langchain_community.utilities.requests import Requests
|
||||
@@ -203,7 +203,9 @@ class ChatDeepInfra(BaseChatModel):
|
||||
# client: Any #: :meta private:
|
||||
model_name: str = Field(default="meta-llama/Llama-2-70b-chat-hf", alias="model")
|
||||
"""Model name to use."""
|
||||
deepinfra_api_token: Optional[str] = None
|
||||
deepinfra_api_token: Optional[str] = Field(
|
||||
default_factory=from_env("DEEPINFRA_API_TOKEN", default=api_key)
|
||||
)
|
||||
request_timeout: Optional[float] = Field(default=None, alias="timeout")
|
||||
temperature: Optional[float] = 1
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
@@ -297,12 +299,6 @@ class ChatDeepInfra(BaseChatModel):
|
||||
"DEEPINFRA_API_KEY",
|
||||
default="",
|
||||
)
|
||||
values["deepinfra_api_token"] = get_from_dict_or_env(
|
||||
values,
|
||||
"deepinfra_api_token",
|
||||
"DEEPINFRA_API_TOKEN",
|
||||
default=api_key,
|
||||
)
|
||||
return values
|
||||
|
||||
@root_validator(pre=False, skip_on_failure=True)
|
||||
|
||||
@@ -14,7 +14,7 @@ from langchain_core.messages import (
|
||||
)
|
||||
from langchain_core.outputs import ChatGeneration, ChatResult
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -74,13 +74,17 @@ class ErnieBotChat(BaseChatModel):
|
||||
|
||||
"""
|
||||
|
||||
ernie_api_base: Optional[str] = None
|
||||
ernie_api_base: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_API_BASE", default="https://aip.baidubce.com")
|
||||
)
|
||||
"""Baidu application custom endpoints"""
|
||||
|
||||
ernie_client_id: Optional[str] = None
|
||||
ernie_client_id: Optional[str] = Field(default_factory=from_env("ERNIE_CLIENT_ID"))
|
||||
"""Baidu application client id"""
|
||||
|
||||
ernie_client_secret: Optional[str] = None
|
||||
ernie_client_secret: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_CLIENT_SECRET")
|
||||
)
|
||||
"""Baidu application client secret"""
|
||||
|
||||
access_token: Optional[str] = None
|
||||
@@ -110,19 +114,6 @@ class ErnieBotChat(BaseChatModel):
|
||||
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["ernie_api_base"] = get_from_dict_or_env(
|
||||
values, "ernie_api_base", "ERNIE_API_BASE", "https://aip.baidubce.com"
|
||||
)
|
||||
values["ernie_client_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
"ernie_client_id",
|
||||
"ERNIE_CLIENT_ID",
|
||||
)
|
||||
values["ernie_client_secret"] = get_from_dict_or_env(
|
||||
values,
|
||||
"ernie_client_secret",
|
||||
"ERNIE_CLIENT_SECRET",
|
||||
)
|
||||
return values
|
||||
|
||||
def _chat(self, payload: object) -> dict:
|
||||
|
||||
@@ -31,7 +31,7 @@ from langchain_core.language_models.llms import create_base_retry_decorator
|
||||
from langchain_core.messages import AIMessageChunk, BaseMessage, BaseMessageChunk
|
||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||
from langchain_core.utils import convert_to_secret_str, from_env, get_from_dict_or_env
|
||||
|
||||
from langchain_community.adapters.openai import (
|
||||
convert_dict_to_message,
|
||||
@@ -151,7 +151,9 @@ class GPTRouter(BaseChatModel):
|
||||
|
||||
client: Any = Field(default=None, exclude=True) #: :meta private:
|
||||
models_priority_list: List[GPTRouterModel] = Field(min_items=1)
|
||||
gpt_router_api_base: str = Field(default=None)
|
||||
gpt_router_api_base: str = Field(
|
||||
default_factory=from_env("GPT_ROUTER_API_BASE", default=DEFAULT_API_BASE_URL)
|
||||
)
|
||||
"""WriteSonic GPTRouter custom endpoint"""
|
||||
gpt_router_api_key: Optional[SecretStr] = None
|
||||
"""WriteSonic GPTRouter API Key"""
|
||||
@@ -169,13 +171,6 @@ class GPTRouter(BaseChatModel):
|
||||
|
||||
@root_validator(allow_reuse=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["gpt_router_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"gpt_router_api_base",
|
||||
"GPT_ROUTER_API_BASE",
|
||||
DEFAULT_API_BASE_URL,
|
||||
)
|
||||
|
||||
values["gpt_router_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(
|
||||
values,
|
||||
|
||||
@@ -21,6 +21,7 @@ from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResu
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -98,9 +99,11 @@ class ChatHunyuan(BaseChatModel):
|
||||
def lc_serializable(self) -> bool:
|
||||
return True
|
||||
|
||||
hunyuan_app_id: Optional[int] = None
|
||||
hunyuan_app_id: Optional[int] = Field(default_factory=from_env("HUNYUAN_APP_ID"))
|
||||
"""Hunyuan App ID"""
|
||||
hunyuan_secret_id: Optional[str] = None
|
||||
hunyuan_secret_id: Optional[str] = Field(
|
||||
default_factory=from_env("HUNYUAN_SECRET_ID")
|
||||
)
|
||||
"""Hunyuan Secret ID"""
|
||||
hunyuan_secret_key: Optional[SecretStr] = None
|
||||
"""Hunyuan Secret Key"""
|
||||
@@ -165,16 +168,6 @@ class ChatHunyuan(BaseChatModel):
|
||||
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["hunyuan_app_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
"hunyuan_app_id",
|
||||
"HUNYUAN_APP_ID",
|
||||
)
|
||||
values["hunyuan_secret_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
"hunyuan_secret_id",
|
||||
"HUNYUAN_SECRET_ID",
|
||||
)
|
||||
values["hunyuan_secret_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(
|
||||
values,
|
||||
|
||||
@@ -55,7 +55,7 @@ from langchain_core.outputs import (
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -219,12 +219,24 @@ class ChatLiteLLM(BaseChatModel):
|
||||
model: str = "gpt-3.5-turbo"
|
||||
model_name: Optional[str] = None
|
||||
"""Model name to use."""
|
||||
openai_api_key: Optional[str] = None
|
||||
azure_api_key: Optional[str] = None
|
||||
anthropic_api_key: Optional[str] = None
|
||||
replicate_api_key: Optional[str] = None
|
||||
cohere_api_key: Optional[str] = None
|
||||
openrouter_api_key: Optional[str] = None
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY", default="")
|
||||
)
|
||||
azure_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("AZURE_API_KEY", default="")
|
||||
)
|
||||
anthropic_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("ANTHROPIC_API_KEY", default="")
|
||||
)
|
||||
replicate_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("REPLICATE_API_KEY", default="")
|
||||
)
|
||||
cohere_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("COHERE_API_KEY", default="")
|
||||
)
|
||||
openrouter_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENROUTER_API_KEY", default="")
|
||||
)
|
||||
streaming: bool = False
|
||||
api_base: Optional[str] = None
|
||||
organization: Optional[str] = None
|
||||
@@ -302,24 +314,6 @@ class ChatLiteLLM(BaseChatModel):
|
||||
"Please install it with `pip install litellm`"
|
||||
)
|
||||
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY", default=""
|
||||
)
|
||||
values["azure_api_key"] = get_from_dict_or_env(
|
||||
values, "azure_api_key", "AZURE_API_KEY", default=""
|
||||
)
|
||||
values["anthropic_api_key"] = get_from_dict_or_env(
|
||||
values, "anthropic_api_key", "ANTHROPIC_API_KEY", default=""
|
||||
)
|
||||
values["replicate_api_key"] = get_from_dict_or_env(
|
||||
values, "replicate_api_key", "REPLICATE_API_KEY", default=""
|
||||
)
|
||||
values["openrouter_api_key"] = get_from_dict_or_env(
|
||||
values, "openrouter_api_key", "OPENROUTER_API_KEY", default=""
|
||||
)
|
||||
values["cohere_api_key"] = get_from_dict_or_env(
|
||||
values, "cohere_api_key", "COHERE_API_KEY", default=""
|
||||
)
|
||||
values["huggingface_api_key"] = get_from_dict_or_env(
|
||||
values, "huggingface_api_key", "HUGGINGFACE_API_KEY", default=""
|
||||
)
|
||||
|
||||
@@ -3,7 +3,12 @@
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@@ -31,9 +36,13 @@ class ChatOctoAI(ChatOpenAI):
|
||||
chat = ChatOctoAI(model_name="mixtral-8x7b-instruct")
|
||||
"""
|
||||
|
||||
octoai_api_base: str = Field(default=DEFAULT_API_BASE)
|
||||
octoai_api_base: str = Field(
|
||||
default_factory=from_env("OCTOAI_API_BASE", default=DEFAULT_API_BASE)
|
||||
)
|
||||
octoai_api_token: SecretStr = Field(default=None)
|
||||
model_name: str = Field(default=DEFAULT_MODEL)
|
||||
model_name: str = Field(
|
||||
default_factory=from_env("MODEL_NAME", default=DEFAULT_MODEL)
|
||||
)
|
||||
|
||||
@property
|
||||
def _llm_type(self) -> str:
|
||||
@@ -51,21 +60,9 @@ class ChatOctoAI(ChatOpenAI):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["octoai_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"octoai_api_base",
|
||||
"OCTOAI_API_BASE",
|
||||
default=DEFAULT_API_BASE,
|
||||
)
|
||||
values["octoai_api_token"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "octoai_api_token", "OCTOAI_API_TOKEN")
|
||||
)
|
||||
values["model_name"] = get_from_dict_or_env(
|
||||
values,
|
||||
"model_name",
|
||||
"MODEL_NAME",
|
||||
default=DEFAULT_MODEL,
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -47,6 +47,7 @@ from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResu
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -205,7 +206,9 @@ class ChatOpenAI(BaseChatModel):
|
||||
# When updating this to use a SecretStr
|
||||
# Check for classes that derive from this class (as some of them
|
||||
# may assume openai_api_key is a str)
|
||||
openai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
@@ -213,7 +216,9 @@ class ChatOpenAI(BaseChatModel):
|
||||
openai_organization: Optional[str] = Field(default=None, alias="organization")
|
||||
"""Automatically inferred from env var `OPENAI_ORG_ID` if not provided."""
|
||||
# to support explicit proxy for OpenAI
|
||||
openai_proxy: Optional[str] = None
|
||||
openai_proxy: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_PROXY", default="")
|
||||
)
|
||||
request_timeout: Union[float, Tuple[float, float], Any, None] = Field(
|
||||
default=None, alias="timeout"
|
||||
)
|
||||
@@ -283,9 +288,6 @@ class ChatOpenAI(BaseChatModel):
|
||||
if values["n"] > 1 and values["streaming"]:
|
||||
raise ValueError("n must be 1 when streaming.")
|
||||
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY"
|
||||
)
|
||||
# Check OPENAI_ORGANIZATION for backwards compatibility.
|
||||
values["openai_organization"] = (
|
||||
values["openai_organization"]
|
||||
@@ -295,12 +297,6 @@ class ChatOpenAI(BaseChatModel):
|
||||
values["openai_api_base"] = values["openai_api_base"] or os.getenv(
|
||||
"OPENAI_API_BASE"
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_proxy",
|
||||
"OPENAI_PROXY",
|
||||
default="",
|
||||
)
|
||||
try:
|
||||
import openai
|
||||
|
||||
|
||||
@@ -36,7 +36,11 @@ from langchain_core.messages import (
|
||||
)
|
||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -67,7 +71,9 @@ class ChatPerplexity(BaseChatModel):
|
||||
"""What sampling temperature to use."""
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
"""Holds any model parameters valid for `create` call not explicitly specified."""
|
||||
pplx_api_key: Optional[str] = Field(None, alias="api_key")
|
||||
pplx_api_key: Optional[str] = Field(
|
||||
None, alias="api_key", default_factory=from_env("PPLX_API_KEY")
|
||||
)
|
||||
"""Base URL path for API requests,
|
||||
leave blank if not using a proxy or service emulator."""
|
||||
request_timeout: Optional[Union[float, Tuple[float, float]]] = Field(
|
||||
@@ -119,9 +125,6 @@ class ChatPerplexity(BaseChatModel):
|
||||
@root_validator(allow_reuse=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["pplx_api_key"] = get_from_dict_or_env(
|
||||
values, "pplx_api_key", "PPLX_API_KEY"
|
||||
)
|
||||
try:
|
||||
import openai
|
||||
except ImportError:
|
||||
|
||||
@@ -14,6 +14,7 @@ from langchain_core.outputs import ChatGeneration, ChatResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -111,19 +112,31 @@ class ChatSnowflakeCortex(BaseChatModel):
|
||||
cumulative probabilities. Value should be ranging between 0.0 and 1.0.
|
||||
"""
|
||||
|
||||
snowflake_username: Optional[str] = Field(default=None, alias="username")
|
||||
snowflake_username: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_USERNAME"), alias="username"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_USERNAME` if not provided."""
|
||||
snowflake_password: Optional[SecretStr] = Field(default=None, alias="password")
|
||||
"""Automatically inferred from env var `SNOWFLAKE_PASSWORD` if not provided."""
|
||||
snowflake_account: Optional[str] = Field(default=None, alias="account")
|
||||
snowflake_account: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_ACCOUNT"), alias="account"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_ACCOUNT` if not provided."""
|
||||
snowflake_database: Optional[str] = Field(default=None, alias="database")
|
||||
snowflake_database: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_DATABASE"), alias="database"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_DATABASE` if not provided."""
|
||||
snowflake_schema: Optional[str] = Field(default=None, alias="schema")
|
||||
snowflake_schema: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_SCHEMA"), alias="schema"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_SCHEMA` if not provided."""
|
||||
snowflake_warehouse: Optional[str] = Field(default=None, alias="warehouse")
|
||||
snowflake_warehouse: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_WAREHOUSE"), alias="warehouse"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_WAREHOUSE` if not provided."""
|
||||
snowflake_role: Optional[str] = Field(default=None, alias="role")
|
||||
snowflake_role: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_ROLE"), alias="role"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_ROLE` if not provided."""
|
||||
|
||||
@root_validator(pre=True)
|
||||
@@ -146,27 +159,9 @@ class ChatSnowflakeCortex(BaseChatModel):
|
||||
"`pip install snowflake-snowpark-python`"
|
||||
)
|
||||
|
||||
values["snowflake_username"] = get_from_dict_or_env(
|
||||
values, "snowflake_username", "SNOWFLAKE_USERNAME"
|
||||
)
|
||||
values["snowflake_password"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "snowflake_password", "SNOWFLAKE_PASSWORD")
|
||||
)
|
||||
values["snowflake_account"] = get_from_dict_or_env(
|
||||
values, "snowflake_account", "SNOWFLAKE_ACCOUNT"
|
||||
)
|
||||
values["snowflake_database"] = get_from_dict_or_env(
|
||||
values, "snowflake_database", "SNOWFLAKE_DATABASE"
|
||||
)
|
||||
values["snowflake_schema"] = get_from_dict_or_env(
|
||||
values, "snowflake_schema", "SNOWFLAKE_SCHEMA"
|
||||
)
|
||||
values["snowflake_warehouse"] = get_from_dict_or_env(
|
||||
values, "snowflake_warehouse", "SNOWFLAKE_WAREHOUSE"
|
||||
)
|
||||
values["snowflake_role"] = get_from_dict_or_env(
|
||||
values, "snowflake_role", "SNOWFLAKE_ROLE"
|
||||
)
|
||||
|
||||
connection_params = {
|
||||
"account": values["snowflake_account"],
|
||||
|
||||
@@ -37,6 +37,7 @@ from langchain_core.outputs import (
|
||||
)
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
)
|
||||
@@ -228,10 +229,16 @@ class ChatSparkLLM(BaseChatModel):
|
||||
spark_api_secret: Optional[str] = Field(default=None, alias="api_secret")
|
||||
"""Automatically inferred from env var `IFLYTEK_SPARK_API_SECRET`
|
||||
if not provided."""
|
||||
spark_api_url: Optional[str] = Field(default=None, alias="api_url")
|
||||
spark_api_url: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_URL", default=SPARK_API_URL),
|
||||
alias="api_url",
|
||||
)
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
emulator."""
|
||||
spark_llm_domain: Optional[str] = Field(default=None, alias="model")
|
||||
spark_llm_domain: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_LLM_DOMAIN", default=SPARK_LLM_DOMAIN),
|
||||
alias="model",
|
||||
)
|
||||
"""Model name to use."""
|
||||
spark_user_id: str = "lc_user"
|
||||
streaming: bool = False
|
||||
@@ -294,18 +301,6 @@ class ChatSparkLLM(BaseChatModel):
|
||||
["spark_api_secret", "api_secret"],
|
||||
"IFLYTEK_SPARK_API_SECRET",
|
||||
)
|
||||
values["spark_api_url"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_api_url",
|
||||
"IFLYTEK_SPARK_API_URL",
|
||||
SPARK_API_URL,
|
||||
)
|
||||
values["spark_llm_domain"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_llm_domain",
|
||||
"IFLYTEK_SPARK_LLM_DOMAIN",
|
||||
SPARK_LLM_DOMAIN,
|
||||
)
|
||||
|
||||
# put extra params into model_kwargs
|
||||
default_values = {
|
||||
|
||||
@@ -42,6 +42,7 @@ from langchain_core.messages import (
|
||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -85,7 +86,9 @@ class ChatYuan2(BaseChatModel):
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
"""Holds any model parameters valid for `create` call not explicitly specified."""
|
||||
|
||||
yuan2_api_key: Optional[str] = Field(default="EMPTY", alias="api_key")
|
||||
yuan2_api_key: Optional[str] = Field(
|
||||
default="EMPTY", alias="api_key", default_factory=from_env("YUAN2_API_KEY")
|
||||
)
|
||||
"""Automatically inferred from env var `YUAN2_API_KEY` if not provided."""
|
||||
|
||||
yuan2_api_base: Optional[str] = Field(
|
||||
@@ -170,9 +173,6 @@ class ChatYuan2(BaseChatModel):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["yuan2_api_key"] = get_from_dict_or_env(
|
||||
values, "yuan2_api_key", "YUAN2_API_KEY"
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -32,7 +32,7 @@ from langchain_core.messages import (
|
||||
)
|
||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -335,7 +335,10 @@ class ChatZhipuAI(BaseChatModel):
|
||||
# client:
|
||||
zhipuai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
||||
"""Automatically inferred from env var `ZHIPUAI_API_KEY` if not provided."""
|
||||
zhipuai_api_base: Optional[str] = Field(default=None, alias="api_base")
|
||||
zhipuai_api_base: Optional[str] = Field(
|
||||
default_factory=from_env("ZHIPUAI_API_BASE", default=ZHIPUAI_API_BASE),
|
||||
alias="api_base",
|
||||
)
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
emulator.
|
||||
"""
|
||||
@@ -382,9 +385,6 @@ class ChatZhipuAI(BaseChatModel):
|
||||
values["zhipuai_api_key"] = get_from_dict_or_env(
|
||||
values, ["zhipuai_api_key", "api_key"], "ZHIPUAI_API_KEY"
|
||||
)
|
||||
values["zhipuai_api_base"] = get_from_dict_or_env(
|
||||
values, "zhipuai_api_base", "ZHIPUAI_API_BASE", default=ZHIPUAI_API_BASE
|
||||
)
|
||||
|
||||
return values
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
|
||||
from langchain_core.callbacks.base import Callbacks
|
||||
from langchain_core.documents import BaseDocumentCompressor, Document
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
|
||||
class DashScopeRerank(BaseDocumentCompressor):
|
||||
@@ -21,7 +21,9 @@ class DashScopeRerank(BaseDocumentCompressor):
|
||||
top_n: Optional[int] = 3
|
||||
"""Number of documents to return."""
|
||||
|
||||
dashscope_api_key: Optional[str] = Field(None, alias="api_key")
|
||||
dashscope_api_key: Optional[str] = Field(
|
||||
None, alias="api_key", default_factory=from_env("DASHSCOPE_API_KEY")
|
||||
)
|
||||
"""DashScope API key. Must be specified directly or via environment variable
|
||||
DASHSCOPE_API_KEY."""
|
||||
|
||||
@@ -46,9 +48,6 @@ class DashScopeRerank(BaseDocumentCompressor):
|
||||
)
|
||||
|
||||
values["client"] = dashscope.TextReRank
|
||||
values["dashscope_api_key"] = get_from_dict_or_env(
|
||||
values, "dashscope_api_key", "DASHSCOPE_API_KEY"
|
||||
)
|
||||
values["model"] = dashscope.TextReRank.Models.gte_rerank
|
||||
|
||||
return values
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
|
||||
from langchain_core.callbacks.base import Callbacks
|
||||
from langchain_core.documents import BaseDocumentCompressor, Document
|
||||
from langchain_core.pydantic_v1 import Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
|
||||
class VolcengineRerank(BaseDocumentCompressor):
|
||||
@@ -15,11 +15,11 @@ class VolcengineRerank(BaseDocumentCompressor):
|
||||
client: Any = None
|
||||
"""Volcengine client to use for compressing documents."""
|
||||
|
||||
ak: Optional[str] = None
|
||||
ak: Optional[str] = Field(default_factory=from_env("VOLC_API_AK"))
|
||||
"""Access Key ID.
|
||||
https://www.volcengine.com/docs/84313/1254553"""
|
||||
|
||||
sk: Optional[str] = None
|
||||
sk: Optional[str] = Field(default_factory=from_env("VOLC_API_SK"))
|
||||
"""Secret Access Key.
|
||||
https://www.volcengine.com/docs/84313/1254553"""
|
||||
|
||||
@@ -53,9 +53,6 @@ class VolcengineRerank(BaseDocumentCompressor):
|
||||
"or `pip install --user volcengine`."
|
||||
)
|
||||
|
||||
values["ak"] = get_from_dict_or_env(values, "ak", "VOLC_API_AK")
|
||||
values["sk"] = get_from_dict_or_env(values, "sk", "VOLC_API_SK")
|
||||
|
||||
values["client"] = VikingDBService(
|
||||
host="api-vikingdb.volces.com",
|
||||
region="cn-beijing",
|
||||
|
||||
@@ -5,7 +5,12 @@ from __future__ import annotations
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@@ -21,7 +26,9 @@ class AnyscaleEmbeddings(OpenAIEmbeddings):
|
||||
"""AnyScale Endpoints API keys."""
|
||||
model: str = Field(default=DEFAULT_MODEL)
|
||||
"""Model name to use."""
|
||||
anyscale_api_base: str = Field(default=DEFAULT_API_BASE)
|
||||
anyscale_api_base: str = Field(
|
||||
default_factory=from_env("ANYSCALE_API_BASE", default=DEFAULT_API_BASE)
|
||||
)
|
||||
"""Base URL path for API requests."""
|
||||
tiktoken_enabled: bool = False
|
||||
"""Set this to False for non-OpenAI implementations of the embeddings API"""
|
||||
@@ -44,12 +51,6 @@ class AnyscaleEmbeddings(OpenAIEmbeddings):
|
||||
"ANYSCALE_API_KEY",
|
||||
)
|
||||
)
|
||||
values["anyscale_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"anyscale_api_base",
|
||||
"ANYSCALE_API_BASE",
|
||||
default=DEFAULT_API_BASE,
|
||||
)
|
||||
try:
|
||||
import openai
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from typing import (
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
from requests.exceptions import HTTPError
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@@ -104,7 +104,9 @@ class DashScopeEmbeddings(BaseModel, Embeddings):
|
||||
client: Any #: :meta private:
|
||||
"""The DashScope client."""
|
||||
model: str = "text-embedding-v1"
|
||||
dashscope_api_key: Optional[str] = None
|
||||
dashscope_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("DASHSCOPE_API_KEY")
|
||||
)
|
||||
max_retries: int = 5
|
||||
"""Maximum number of retries to make when generating."""
|
||||
|
||||
@@ -118,9 +120,6 @@ class DashScopeEmbeddings(BaseModel, Embeddings):
|
||||
import dashscope
|
||||
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["dashscope_api_key"] = get_from_dict_or_env(
|
||||
values, "dashscope_api_key", "DASHSCOPE_API_KEY"
|
||||
)
|
||||
dashscope.api_key = values["dashscope_api_key"]
|
||||
try:
|
||||
import dashscope
|
||||
|
||||
@@ -8,7 +8,7 @@ from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.runnables.config import run_in_executor
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,9 +20,13 @@ logger = logging.getLogger(__name__)
|
||||
class ErnieEmbeddings(BaseModel, Embeddings):
|
||||
"""`Ernie Embeddings V1` embedding models."""
|
||||
|
||||
ernie_api_base: Optional[str] = None
|
||||
ernie_client_id: Optional[str] = None
|
||||
ernie_client_secret: Optional[str] = None
|
||||
ernie_api_base: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_API_BASE", default="https://aip.baidubce.com")
|
||||
)
|
||||
ernie_client_id: Optional[str] = Field(default_factory=from_env("ERNIE_CLIENT_ID"))
|
||||
ernie_client_secret: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_CLIENT_SECRET")
|
||||
)
|
||||
access_token: Optional[str] = None
|
||||
|
||||
chunk_size: int = 16
|
||||
@@ -33,19 +37,6 @@ class ErnieEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["ernie_api_base"] = get_from_dict_or_env(
|
||||
values, "ernie_api_base", "ERNIE_API_BASE", "https://aip.baidubce.com"
|
||||
)
|
||||
values["ernie_client_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
"ernie_client_id",
|
||||
"ERNIE_CLIENT_ID",
|
||||
)
|
||||
values["ernie_client_secret"] = get_from_dict_or_env(
|
||||
values,
|
||||
"ernie_client_secret",
|
||||
"ERNIE_CLIENT_SECRET",
|
||||
)
|
||||
return values
|
||||
|
||||
def _embedding(self, json: object) -> dict:
|
||||
|
||||
@@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
from packaging.version import parse
|
||||
|
||||
__all__ = ["GradientEmbeddings"]
|
||||
@@ -31,10 +31,14 @@ class GradientEmbeddings(BaseModel, Embeddings):
|
||||
model: str
|
||||
"Underlying gradient.ai model id."
|
||||
|
||||
gradient_workspace_id: Optional[str] = None
|
||||
gradient_workspace_id: Optional[str] = Field(
|
||||
default_factory=from_env("GRADIENT_WORKSPACE_ID")
|
||||
)
|
||||
"Underlying gradient.ai workspace_id."
|
||||
|
||||
gradient_access_token: Optional[str] = None
|
||||
gradient_access_token: Optional[str] = Field(
|
||||
default_factory=from_env("GRADIENT_ACCESS_TOKEN")
|
||||
)
|
||||
"""gradient.ai API Token, which can be generated by going to
|
||||
https://auth.gradient.ai/select-workspace
|
||||
and selecting "Access tokens" under the profile drop-down.
|
||||
@@ -59,13 +63,6 @@ class GradientEmbeddings(BaseModel, Embeddings):
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
|
||||
values["gradient_access_token"] = get_from_dict_or_env(
|
||||
values, "gradient_access_token", "GRADIENT_ACCESS_TOKEN"
|
||||
)
|
||||
values["gradient_workspace_id"] = get_from_dict_or_env(
|
||||
values, "gradient_workspace_id", "GRADIENT_WORKSPACE_ID"
|
||||
)
|
||||
|
||||
values["gradient_api_url"] = get_from_dict_or_env(
|
||||
values, "gradient_api_url", "GRADIENT_API_URL"
|
||||
)
|
||||
|
||||
@@ -18,6 +18,7 @@ from typing import (
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -144,14 +145,22 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
|
||||
client: Any #: :meta private:
|
||||
model: str = "text-embedding-ada-002"
|
||||
deployment: str = model
|
||||
openai_api_version: Optional[str] = None
|
||||
openai_api_base: Optional[str] = None
|
||||
openai_api_version: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_VERSION", default=default_api_version)
|
||||
)
|
||||
openai_api_base: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_BASE", default="")
|
||||
)
|
||||
# to support explicit proxy for LocalAI
|
||||
openai_proxy: Optional[str] = None
|
||||
openai_proxy: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_PROXY", default="")
|
||||
)
|
||||
embedding_ctx_length: int = 8191
|
||||
"""The maximum number of tokens to embed at once."""
|
||||
openai_api_key: Optional[str] = None
|
||||
openai_organization: Optional[str] = None
|
||||
openai_api_key: Optional[str] = Field(default_factory=from_env("OPENAI_API_KEY"))
|
||||
openai_organization: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_ORGANIZATION", default="")
|
||||
)
|
||||
allowed_special: Union[Literal["all"], Set[str]] = set()
|
||||
disallowed_special: Union[Literal["all"], Set[str], Sequence[str]] = "all"
|
||||
chunk_size: int = 1000
|
||||
@@ -200,35 +209,8 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY"
|
||||
)
|
||||
values["openai_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_api_base",
|
||||
"OPENAI_API_BASE",
|
||||
default="",
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_proxy",
|
||||
"OPENAI_PROXY",
|
||||
default="",
|
||||
)
|
||||
|
||||
default_api_version = ""
|
||||
values["openai_api_version"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_api_version",
|
||||
"OPENAI_API_VERSION",
|
||||
default=default_api_version,
|
||||
)
|
||||
values["openai_organization"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_organization",
|
||||
"OPENAI_ORGANIZATION",
|
||||
default="",
|
||||
)
|
||||
try:
|
||||
import openai
|
||||
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
from typing import Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@@ -22,9 +27,11 @@ class OctoAIEmbeddings(OpenAIEmbeddings):
|
||||
|
||||
octoai_api_token: SecretStr = Field(default=None)
|
||||
"""OctoAI Endpoints API keys."""
|
||||
endpoint_url: str = Field(default=DEFAULT_API_BASE)
|
||||
endpoint_url: str = Field(
|
||||
default_factory=from_env("ENDPOINT_URL", default=DEFAULT_API_BASE)
|
||||
)
|
||||
"""Base URL path for API requests."""
|
||||
model: str = Field(default=DEFAULT_MODEL)
|
||||
model: str = Field(default_factory=from_env("MODEL", default=DEFAULT_MODEL))
|
||||
"""Model name to use."""
|
||||
tiktoken_enabled: bool = False
|
||||
"""Set this to False for non-OpenAI implementations of the embeddings API"""
|
||||
@@ -41,21 +48,9 @@ class OctoAIEmbeddings(OpenAIEmbeddings):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: dict) -> dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["endpoint_url"] = get_from_dict_or_env(
|
||||
values,
|
||||
"endpoint_url",
|
||||
"ENDPOINT_URL",
|
||||
default=DEFAULT_API_BASE,
|
||||
)
|
||||
values["octoai_api_token"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "octoai_api_token", "OCTOAI_API_TOKEN")
|
||||
)
|
||||
values["model"] = get_from_dict_or_env(
|
||||
values,
|
||||
"model",
|
||||
"MODEL",
|
||||
default=DEFAULT_MODEL,
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -23,6 +23,7 @@ from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -195,19 +196,28 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
# to support Azure OpenAI Service custom deployment names
|
||||
deployment: Optional[str] = model
|
||||
# TODO: Move to AzureOpenAIEmbeddings.
|
||||
openai_api_version: Optional[str] = Field(default=None, alias="api_version")
|
||||
openai_api_version: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_VERSION", default=default_api_version),
|
||||
alias="api_version",
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_VERSION` if not provided."""
|
||||
# to support Azure OpenAI Service custom endpoints
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
emulator."""
|
||||
# to support Azure OpenAI Service custom endpoints
|
||||
openai_api_type: Optional[str] = None
|
||||
openai_api_type: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_TYPE", default="")
|
||||
)
|
||||
# to support explicit proxy for OpenAI
|
||||
openai_proxy: Optional[str] = None
|
||||
openai_proxy: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_PROXY", default="")
|
||||
)
|
||||
embedding_ctx_length: int = 8191
|
||||
"""The maximum number of tokens to embed at once."""
|
||||
openai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_organization: Optional[str] = Field(default=None, alias="organization")
|
||||
"""Automatically inferred from env var `OPENAI_ORG_ID` if not provided."""
|
||||
@@ -289,24 +299,9 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY"
|
||||
)
|
||||
values["openai_api_base"] = values["openai_api_base"] or os.getenv(
|
||||
"OPENAI_API_BASE"
|
||||
)
|
||||
values["openai_api_type"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_api_type",
|
||||
"OPENAI_API_TYPE",
|
||||
default="",
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_proxy",
|
||||
"OPENAI_PROXY",
|
||||
default="",
|
||||
)
|
||||
if values["openai_api_type"] in ("azure", "azure_ad", "azuread"):
|
||||
default_api_version = "2023-05-15"
|
||||
# Azure OpenAI embedding models allow a maximum of 16 texts
|
||||
@@ -315,12 +310,6 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
|
||||
values["chunk_size"] = min(values["chunk_size"], 16)
|
||||
else:
|
||||
default_api_version = ""
|
||||
values["openai_api_version"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_api_version",
|
||||
"OPENAI_API_VERSION",
|
||||
default=default_api_version,
|
||||
)
|
||||
# Check OPENAI_ORGANIZATION for backwards compatibility.
|
||||
values["openai_organization"] = (
|
||||
values["openai_organization"]
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,11 +13,11 @@ logger = logging.getLogger(__name__)
|
||||
class VolcanoEmbeddings(BaseModel, Embeddings):
|
||||
"""`Volcengine Embeddings` embedding models."""
|
||||
|
||||
volcano_ak: Optional[str] = None
|
||||
volcano_ak: Optional[str] = Field(default_factory=from_env("VOLC_ACCESSKEY"))
|
||||
"""volcano access key
|
||||
learn more from: https://www.volcengine.com/docs/6459/76491#ak-sk"""
|
||||
|
||||
volcano_sk: Optional[str] = None
|
||||
volcano_sk: Optional[str] = Field(default_factory=from_env("VOLC_SECRETKEY"))
|
||||
"""volcano secret key
|
||||
learn more from: https://www.volcengine.com/docs/6459/76491#ak-sk"""
|
||||
|
||||
@@ -66,16 +66,6 @@ class VolcanoEmbeddings(BaseModel, Embeddings):
|
||||
ValueError: volcengine package not found, please install it with
|
||||
`pip install volcengine`
|
||||
"""
|
||||
values["volcano_ak"] = get_from_dict_or_env(
|
||||
values,
|
||||
"volcano_ak",
|
||||
"VOLC_ACCESSKEY",
|
||||
)
|
||||
values["volcano_sk"] = get_from_dict_or_env(
|
||||
values,
|
||||
"volcano_sk",
|
||||
"VOLC_SECRETKEY",
|
||||
)
|
||||
|
||||
try:
|
||||
from volcengine.maas import MaasService
|
||||
|
||||
@@ -23,6 +23,7 @@ from langchain_core.prompt_values import PromptValue
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
|
||||
from langchain_core.utils import (
|
||||
check_package_version,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -57,7 +58,11 @@ class _AnthropicCommon(BaseLanguageModel):
|
||||
max_retries: int = 2
|
||||
"""Number of retries allowed for requests sent to the Anthropic Completion API."""
|
||||
|
||||
anthropic_api_url: Optional[str] = None
|
||||
anthropic_api_url: Optional[str] = Field(
|
||||
default_factory=from_env(
|
||||
"ANTHROPIC_API_URL", default="https://api.anthropic.com"
|
||||
)
|
||||
)
|
||||
|
||||
anthropic_api_key: Optional[SecretStr] = None
|
||||
|
||||
@@ -82,12 +87,6 @@ class _AnthropicCommon(BaseLanguageModel):
|
||||
get_from_dict_or_env(values, "anthropic_api_key", "ANTHROPIC_API_KEY")
|
||||
)
|
||||
# Get custom api url from environment.
|
||||
values["anthropic_api_url"] = get_from_dict_or_env(
|
||||
values,
|
||||
"anthropic_api_url",
|
||||
"ANTHROPIC_API_URL",
|
||||
default="https://api.anthropic.com",
|
||||
)
|
||||
|
||||
try:
|
||||
import anthropic
|
||||
|
||||
@@ -15,7 +15,12 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.llms.openai import (
|
||||
BaseOpenAI,
|
||||
@@ -83,9 +88,13 @@ class Anyscale(BaseOpenAI):
|
||||
"""
|
||||
|
||||
"""Key word arguments to pass to the model."""
|
||||
anyscale_api_base: str = Field(default=DEFAULT_BASE_URL)
|
||||
anyscale_api_base: str = Field(
|
||||
default_factory=from_env("ANYSCALE_API_BASE", default=DEFAULT_BASE_URL)
|
||||
)
|
||||
anyscale_api_key: SecretStr = Field(default=None)
|
||||
model_name: str = Field(default=DEFAULT_MODEL)
|
||||
model_name: str = Field(
|
||||
default_factory=from_env("MODEL_NAME", default=DEFAULT_MODEL)
|
||||
)
|
||||
|
||||
prefix_messages: List = Field(default_factory=list)
|
||||
|
||||
@@ -96,21 +105,9 @@ class Anyscale(BaseOpenAI):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["anyscale_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"anyscale_api_base",
|
||||
"ANYSCALE_API_BASE",
|
||||
default=DEFAULT_BASE_URL,
|
||||
)
|
||||
values["anyscale_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "anyscale_api_key", "ANYSCALE_API_KEY")
|
||||
)
|
||||
values["model_name"] = get_from_dict_or_env(
|
||||
values,
|
||||
"model_name",
|
||||
"MODEL_NAME",
|
||||
default=DEFAULT_MODEL,
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -8,7 +8,12 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@@ -28,7 +33,12 @@ class BaichuanLLM(LLM):
|
||||
timeout: int = 60
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
baichuan_api_host: Optional[str] = None
|
||||
baichuan_api_host: Optional[str] = Field(
|
||||
default_factory=from_env(
|
||||
"BAICHUAN_API_HOST",
|
||||
default="https://api.baichuan-ai.com/v1/chat/completions",
|
||||
)
|
||||
)
|
||||
baichuan_api_key: Optional[SecretStr] = None
|
||||
|
||||
@pre_init
|
||||
@@ -36,12 +46,6 @@ class BaichuanLLM(LLM):
|
||||
values["baichuan_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "baichuan_api_key", "BAICHUAN_API_KEY")
|
||||
)
|
||||
values["baichuan_api_host"] = get_from_dict_or_env(
|
||||
values,
|
||||
"baichuan_api_host",
|
||||
"BAICHUAN_API_HOST",
|
||||
default="https://api.baichuan-ai.com/v1/chat/completions",
|
||||
)
|
||||
return values
|
||||
|
||||
@property
|
||||
|
||||
@@ -22,7 +22,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_community.utilities.anthropic import (
|
||||
@@ -297,7 +297,9 @@ class BedrockBase(BaseModel, ABC):
|
||||
|
||||
client: Any = Field(exclude=True) #: :meta private:
|
||||
|
||||
region_name: Optional[str] = None
|
||||
region_name: Optional[str] = Field(
|
||||
default_factory=from_env("AWS_DEFAULT_REGION", default=session.region_name)
|
||||
)
|
||||
"""The aws region e.g., `us-west-2`. Fallsback to AWS_DEFAULT_REGION env variable
|
||||
or region specified in ~/.aws/config in case it is not provided here.
|
||||
"""
|
||||
@@ -406,13 +408,6 @@ class BedrockBase(BaseModel, ABC):
|
||||
# use default credentials
|
||||
session = boto3.Session()
|
||||
|
||||
values["region_name"] = get_from_dict_or_env(
|
||||
values,
|
||||
"region_name",
|
||||
"AWS_DEFAULT_REGION",
|
||||
default=session.region_name,
|
||||
)
|
||||
|
||||
client_params = {}
|
||||
if values["region_name"]:
|
||||
client_params["region_name"] = values["region_name"]
|
||||
|
||||
@@ -10,7 +10,7 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_community.utilities.requests import Requests
|
||||
@@ -33,7 +33,7 @@ class EdenAI(LLM):
|
||||
|
||||
base_url: str = "https://api.edenai.run/v2"
|
||||
|
||||
edenai_api_key: Optional[str] = None
|
||||
edenai_api_key: Optional[str] = Field(default_factory=from_env("EDENAI_API_KEY"))
|
||||
|
||||
feature: Literal["text", "image"] = "text"
|
||||
"""Which generative feature to use, use text by default"""
|
||||
@@ -76,9 +76,6 @@ class EdenAI(LLM):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["edenai_api_key"] = get_from_dict_or_env(
|
||||
values, "edenai_api_key", "EDENAI_API_KEY"
|
||||
)
|
||||
return values
|
||||
|
||||
@root_validator(pre=True)
|
||||
|
||||
@@ -12,7 +12,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, LLMResult
|
||||
from langchain_core.pydantic_v1 import Extra, Field, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@@ -54,10 +54,14 @@ class GradientLLM(BaseLLM):
|
||||
model_id: str = Field(alias="model", min_length=2)
|
||||
"Underlying gradient.ai model id (base or fine-tuned)."
|
||||
|
||||
gradient_workspace_id: Optional[str] = None
|
||||
gradient_workspace_id: Optional[str] = Field(
|
||||
default_factory=from_env("GRADIENT_WORKSPACE_ID")
|
||||
)
|
||||
"Underlying gradient.ai workspace_id."
|
||||
|
||||
gradient_access_token: Optional[str] = None
|
||||
gradient_access_token: Optional[str] = Field(
|
||||
default_factory=from_env("GRADIENT_ACCESS_TOKEN")
|
||||
)
|
||||
"""gradient.ai API Token, which can be generated by going to
|
||||
https://auth.gradient.ai/select-workspace
|
||||
and selecting "Access tokens" under the profile drop-down.
|
||||
@@ -83,13 +87,6 @@ class GradientLLM(BaseLLM):
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
|
||||
values["gradient_access_token"] = get_from_dict_or_env(
|
||||
values, "gradient_access_token", "GRADIENT_ACCESS_TOKEN"
|
||||
)
|
||||
values["gradient_workspace_id"] = get_from_dict_or_env(
|
||||
values, "gradient_workspace_id", "GRADIENT_WORKSPACE_ID"
|
||||
)
|
||||
|
||||
if (
|
||||
values["gradient_access_token"] is None
|
||||
or len(values["gradient_access_token"]) < 10
|
||||
|
||||
@@ -16,7 +16,12 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@@ -68,8 +73,12 @@ class MinimaxCommon(BaseModel):
|
||||
"""Total probability mass of tokens to consider at each step."""
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
"""Holds any model parameters valid for `create` call not explicitly specified."""
|
||||
minimax_api_host: Optional[str] = None
|
||||
minimax_group_id: Optional[str] = None
|
||||
minimax_api_host: Optional[str] = Field(
|
||||
default_factory=from_env("MINIMAX_API_HOST", default="https://api.minimax.chat")
|
||||
)
|
||||
minimax_group_id: Optional[str] = Field(
|
||||
default_factory=from_env("MINIMAX_GROUP_ID")
|
||||
)
|
||||
minimax_api_key: Optional[SecretStr] = None
|
||||
|
||||
@pre_init
|
||||
@@ -78,16 +87,7 @@ class MinimaxCommon(BaseModel):
|
||||
values["minimax_api_key"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "minimax_api_key", "MINIMAX_API_KEY")
|
||||
)
|
||||
values["minimax_group_id"] = get_from_dict_or_env(
|
||||
values, "minimax_group_id", "MINIMAX_GROUP_ID"
|
||||
)
|
||||
# Get custom api url from environment.
|
||||
values["minimax_api_host"] = get_from_dict_or_env(
|
||||
values,
|
||||
"minimax_api_host",
|
||||
"MINIMAX_API_HOST",
|
||||
default="https://api.minimax.chat",
|
||||
)
|
||||
values["_client"] = _MinimaxEndpointClient( # type: ignore[call-arg]
|
||||
host=values["minimax_api_host"],
|
||||
api_key=values["minimax_api_key"],
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
|
||||
from langchain_community.llms.openai import BaseOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
@@ -35,9 +40,13 @@ class OctoAIEndpoint(BaseOpenAI):
|
||||
"""
|
||||
|
||||
"""Key word arguments to pass to the model."""
|
||||
octoai_api_base: str = Field(default=DEFAULT_BASE_URL)
|
||||
octoai_api_base: str = Field(
|
||||
default_factory=from_env("OCTOAI_API_BASE", default=DEFAULT_BASE_URL)
|
||||
)
|
||||
octoai_api_token: SecretStr = Field(default=None)
|
||||
model_name: str = Field(default=DEFAULT_MODEL)
|
||||
model_name: str = Field(
|
||||
default_factory=from_env("MODEL_NAME", default=DEFAULT_MODEL)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def is_lc_serializable(cls) -> bool:
|
||||
@@ -69,21 +78,9 @@ class OctoAIEndpoint(BaseOpenAI):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["octoai_api_base"] = get_from_dict_or_env(
|
||||
values,
|
||||
"octoai_api_base",
|
||||
"OCTOAI_API_BASE",
|
||||
default=DEFAULT_BASE_URL,
|
||||
)
|
||||
values["octoai_api_token"] = convert_to_secret_str(
|
||||
get_from_dict_or_env(values, "octoai_api_token", "OCTOAI_API_TOKEN")
|
||||
)
|
||||
values["model_name"] = get_from_dict_or_env(
|
||||
values,
|
||||
"model_name",
|
||||
"MODEL_NAME",
|
||||
default=DEFAULT_MODEL,
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -30,6 +30,7 @@ from langchain_core.language_models.llms import BaseLLM, create_base_retry_decor
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
@@ -201,7 +202,9 @@ class BaseOpenAI(BaseLLM):
|
||||
# When updating this to use a SecretStr
|
||||
# Check for classes that derive from this class (as some of them
|
||||
# may assume openai_api_key is a str)
|
||||
openai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
@@ -209,7 +212,9 @@ class BaseOpenAI(BaseLLM):
|
||||
openai_organization: Optional[str] = Field(default=None, alias="organization")
|
||||
"""Automatically inferred from env var `OPENAI_ORG_ID` if not provided."""
|
||||
# to support explicit proxy for OpenAI
|
||||
openai_proxy: Optional[str] = None
|
||||
openai_proxy: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_PROXY", default="")
|
||||
)
|
||||
batch_size: int = 20
|
||||
"""Batch size to use when passing multiple documents to generate."""
|
||||
request_timeout: Union[float, Tuple[float, float], Any, None] = Field(
|
||||
@@ -283,18 +288,9 @@ class BaseOpenAI(BaseLLM):
|
||||
if values["streaming"] and values["best_of"] > 1:
|
||||
raise ValueError("Cannot stream results when best_of > 1.")
|
||||
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY"
|
||||
)
|
||||
values["openai_api_base"] = values["openai_api_base"] or os.getenv(
|
||||
"OPENAI_API_BASE"
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_proxy",
|
||||
"OPENAI_PROXY",
|
||||
default="",
|
||||
)
|
||||
values["openai_organization"] = (
|
||||
values["openai_organization"]
|
||||
or os.getenv("OPENAI_ORG_ID")
|
||||
|
||||
@@ -6,7 +6,7 @@ from langchain_core.callbacks.manager import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Extra
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
|
||||
class SVEndpointHandler:
|
||||
@@ -201,7 +201,9 @@ class Sambaverse(LLM):
|
||||
sambaverse_api_key: str = ""
|
||||
"""sambaverse api key"""
|
||||
|
||||
sambaverse_model_name: Optional[str] = None
|
||||
sambaverse_model_name: Optional[str] = Field(
|
||||
default_factory=from_env("SAMBAVERSE_MODEL_NAME")
|
||||
)
|
||||
"""sambaverse expert model to use"""
|
||||
|
||||
model_kwargs: Optional[dict] = None
|
||||
@@ -231,9 +233,6 @@ class Sambaverse(LLM):
|
||||
values["sambaverse_api_key"] = get_from_dict_or_env(
|
||||
values, "sambaverse_api_key", "SAMBAVERSE_API_KEY"
|
||||
)
|
||||
values["sambaverse_model_name"] = get_from_dict_or_env(
|
||||
values, "sambaverse_model_name", "SAMBAVERSE_MODEL_NAME"
|
||||
)
|
||||
return values
|
||||
|
||||
@property
|
||||
|
||||
@@ -18,7 +18,7 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,11 +42,23 @@ class SparkLLM(LLM):
|
||||
"""
|
||||
|
||||
client: Any = None #: :meta private:
|
||||
spark_app_id: Optional[str] = None
|
||||
spark_api_key: Optional[str] = None
|
||||
spark_api_secret: Optional[str] = None
|
||||
spark_api_url: Optional[str] = None
|
||||
spark_llm_domain: Optional[str] = None
|
||||
spark_app_id: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_APP_ID")
|
||||
)
|
||||
spark_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_KEY")
|
||||
)
|
||||
spark_api_secret: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_SECRET")
|
||||
)
|
||||
spark_api_url: Optional[str] = Field(
|
||||
default_factory=from_env(
|
||||
"IFLYTEK_SPARK_API_URL", default="wss://spark-api.xf-yun.com/v3.1/chat"
|
||||
)
|
||||
)
|
||||
spark_llm_domain: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_LLM_DOMAIN", default="generalv3")
|
||||
)
|
||||
spark_user_id: str = "lc_user"
|
||||
streaming: bool = False
|
||||
request_timeout: int = 30
|
||||
@@ -56,33 +68,6 @@ class SparkLLM(LLM):
|
||||
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
values["spark_app_id"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_app_id",
|
||||
"IFLYTEK_SPARK_APP_ID",
|
||||
)
|
||||
values["spark_api_key"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_api_key",
|
||||
"IFLYTEK_SPARK_API_KEY",
|
||||
)
|
||||
values["spark_api_secret"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_api_secret",
|
||||
"IFLYTEK_SPARK_API_SECRET",
|
||||
)
|
||||
values["spark_api_url"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_api_url",
|
||||
"IFLYTEK_SPARK_API_URL",
|
||||
"wss://spark-api.xf-yun.com/v3.1/chat",
|
||||
)
|
||||
values["spark_llm_domain"] = get_from_dict_or_env(
|
||||
values,
|
||||
"spark_llm_domain",
|
||||
"IFLYTEK_SPARK_LLM_DOMAIN",
|
||||
"generalv3",
|
||||
)
|
||||
# put extra params into model_kwargs
|
||||
values["model_kwargs"]["temperature"] = values["temperature"] or cls.temperature
|
||||
values["model_kwargs"]["top_k"] = values["top_k"] or cls.top_k
|
||||
|
||||
@@ -25,7 +25,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.language_models.llms import BaseLLM
|
||||
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from requests.exceptions import HTTPError
|
||||
from tenacity import (
|
||||
before_sleep_log,
|
||||
@@ -184,7 +184,9 @@ class Tongyi(BaseLLM):
|
||||
top_p: float = 0.8
|
||||
"""Total probability mass of tokens to consider at each step."""
|
||||
|
||||
dashscope_api_key: Optional[str] = None
|
||||
dashscope_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("DASHSCOPE_API_KEY")
|
||||
)
|
||||
"""Dashscope api key provide by Alibaba Cloud."""
|
||||
|
||||
streaming: bool = False
|
||||
@@ -201,9 +203,6 @@ class Tongyi(BaseLLM):
|
||||
@pre_init
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["dashscope_api_key"] = get_from_dict_or_env(
|
||||
values, "dashscope_api_key", "DASHSCOPE_API_KEY"
|
||||
)
|
||||
try:
|
||||
import dashscope
|
||||
except ImportError:
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
from langchain_community.utilities.vertexai import get_client_info
|
||||
|
||||
@@ -26,9 +26,11 @@ if TYPE_CHECKING:
|
||||
class _BaseGoogleVertexAISearchRetriever(BaseModel):
|
||||
project_id: str
|
||||
"""Google Cloud Project ID."""
|
||||
data_store_id: Optional[str] = None
|
||||
data_store_id: Optional[str] = Field(default_factory=from_env("DATA_STORE_ID"))
|
||||
"""Vertex AI Search data store ID."""
|
||||
search_engine_id: Optional[str] = None
|
||||
search_engine_id: Optional[str] = Field(
|
||||
default_factory=from_env("SEARCH_ENGINE_ID")
|
||||
)
|
||||
"""Vertex AI Search app ID."""
|
||||
location_id: str = "global"
|
||||
"""Vertex AI Search data store location."""
|
||||
@@ -66,17 +68,6 @@ class _BaseGoogleVertexAISearchRetriever(BaseModel):
|
||||
) from exc
|
||||
|
||||
values["project_id"] = get_from_dict_or_env(values, "project_id", "PROJECT_ID")
|
||||
|
||||
try:
|
||||
values["data_store_id"] = get_from_dict_or_env(
|
||||
values, "data_store_id", "DATA_STORE_ID"
|
||||
)
|
||||
values["search_engine_id"] = get_from_dict_or_env(
|
||||
values, "search_engine_id", "SEARCH_ENGINE_ID"
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return values
|
||||
|
||||
@property
|
||||
|
||||
@@ -8,7 +8,7 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForToolRun
|
||||
from langchain_core.pydantic_v1 import root_validator
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +23,7 @@ class EdenaiTool(BaseTool):
|
||||
|
||||
feature: str
|
||||
subfeature: str
|
||||
edenai_api_key: Optional[str] = None
|
||||
edenai_api_key: Optional[str] = Field(default_factory=from_env("EDENAI_API_KEY"))
|
||||
is_async: bool = False
|
||||
|
||||
providers: List[str]
|
||||
@@ -32,9 +32,6 @@ class EdenaiTool(BaseTool):
|
||||
@root_validator(allow_reuse=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["edenai_api_key"] = get_from_dict_or_env(
|
||||
values, "edenai_api_key", "EDENAI_API_KEY"
|
||||
)
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
|
||||
class AlphaVantageAPIWrapper(BaseModel):
|
||||
@@ -16,7 +16,9 @@ class AlphaVantageAPIWrapper(BaseModel):
|
||||
2. Save your API KEY into ALPHAVANTAGE_API_KEY env variable
|
||||
"""
|
||||
|
||||
alphavantage_api_key: Optional[str] = None
|
||||
alphavantage_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("ALPHAVANTAGE_API_KEY")
|
||||
)
|
||||
|
||||
class Config:
|
||||
"""Configuration for this pydantic object."""
|
||||
@@ -26,9 +28,6 @@ class AlphaVantageAPIWrapper(BaseModel):
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["alphavantage_api_key"] = get_from_dict_or_env(
|
||||
values, "alphavantage_api_key", "ALPHAVANTAGE_API_KEY"
|
||||
)
|
||||
return values
|
||||
|
||||
def search_symbols(self, keywords: str) -> Dict[str, Any]:
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any, Dict, List, Mapping, Optional, Tuple, Type, Union
|
||||
|
||||
import requests
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
DEFAULT_URL = "https://api.clickup.com/api/v2"
|
||||
|
||||
@@ -276,7 +276,9 @@ def fetch_list_id(space_id: int, folder_id: int, access_token: str) -> Optional[
|
||||
class ClickupAPIWrapper(BaseModel):
|
||||
"""Wrapper for Clickup API."""
|
||||
|
||||
access_token: Optional[str] = None
|
||||
access_token: Optional[str] = Field(
|
||||
default_factory=from_env("CLICKUP_ACCESS_TOKEN")
|
||||
)
|
||||
team_id: Optional[str] = None
|
||||
space_id: Optional[str] = None
|
||||
folder_id: Optional[str] = None
|
||||
@@ -326,9 +328,6 @@ class ClickupAPIWrapper(BaseModel):
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["access_token"] = get_from_dict_or_env(
|
||||
values, "access_token", "CLICKUP_ACCESS_TOKEN"
|
||||
)
|
||||
values["team_id"] = fetch_team_id(values["access_token"])
|
||||
values["space_id"] = fetch_space_id(values["team_id"], values["access_token"])
|
||||
values["folder_id"] = fetch_folder_id(
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
)
|
||||
@@ -30,7 +31,9 @@ class DallEAPIWrapper(BaseModel):
|
||||
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
||||
model_name: str = Field(default="dall-e-2", alias="model")
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
openai_api_key: Optional[str] = Field(default=None, alias="api_key")
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
"""Base URL path for API requests, leave blank if not using a proxy or service
|
||||
@@ -38,7 +41,9 @@ class DallEAPIWrapper(BaseModel):
|
||||
openai_organization: Optional[str] = Field(default=None, alias="organization")
|
||||
"""Automatically inferred from env var `OPENAI_ORG_ID` if not provided."""
|
||||
# to support explicit proxy for OpenAI
|
||||
openai_proxy: Optional[str] = None
|
||||
openai_proxy: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_PROXY", default="")
|
||||
)
|
||||
request_timeout: Union[float, Tuple[float, float], Any, None] = Field(
|
||||
default=None, alias="timeout"
|
||||
)
|
||||
@@ -93,9 +98,6 @@ class DallEAPIWrapper(BaseModel):
|
||||
@root_validator(pre=False, skip_on_failure=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key and python package exists in environment."""
|
||||
values["openai_api_key"] = get_from_dict_or_env(
|
||||
values, "openai_api_key", "OPENAI_API_KEY"
|
||||
)
|
||||
# Check OPENAI_ORGANIZATION for backwards compatibility.
|
||||
values["openai_organization"] = (
|
||||
values["openai_organization"]
|
||||
@@ -106,12 +108,6 @@ class DallEAPIWrapper(BaseModel):
|
||||
values["openai_api_base"] = values["openai_api_base"] or os.getenv(
|
||||
"OPENAI_API_BASE"
|
||||
)
|
||||
values["openai_proxy"] = get_from_dict_or_env(
|
||||
values,
|
||||
"openai_proxy",
|
||||
"OPENAI_PROXY",
|
||||
default="",
|
||||
)
|
||||
|
||||
try:
|
||||
import openai
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Dict, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util import Retry
|
||||
|
||||
@@ -12,7 +12,7 @@ from urllib3.util import Retry
|
||||
class InfobipAPIWrapper(BaseModel):
|
||||
"""Wrapper for Infobip API for messaging."""
|
||||
|
||||
infobip_api_key: Optional[str] = None
|
||||
infobip_api_key: Optional[str] = Field(default_factory=from_env("INFOBIP_API_KEY"))
|
||||
infobip_base_url: Optional[str] = "https://api.infobip.com"
|
||||
|
||||
class Config:
|
||||
@@ -23,9 +23,6 @@ class InfobipAPIWrapper(BaseModel):
|
||||
@root_validator(pre=True)
|
||||
def validate_environment(cls, values: Dict) -> Dict:
|
||||
"""Validate that api key exists in environment."""
|
||||
values["infobip_api_key"] = get_from_dict_or_env(
|
||||
values, "infobip_api_key", "INFOBIP_API_KEY"
|
||||
)
|
||||
values["infobip_base_url"] = get_from_dict_or_env(
|
||||
values, "infobip_base_url", "INFOBIP_BASE_URL"
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
|
||||
from langchain_core.utils import get_from_dict_or_env
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env
|
||||
|
||||
|
||||
class TwilioAPIWrapper(BaseModel):
|
||||
@@ -31,7 +31,7 @@ class TwilioAPIWrapper(BaseModel):
|
||||
"""Twilio account string identifier."""
|
||||
auth_token: Optional[str] = None
|
||||
"""Twilio auth token."""
|
||||
from_number: Optional[str] = None
|
||||
from_number: Optional[str] = Field(default_factory=from_env("TWILIO_FROM_NUMBER"))
|
||||
"""A Twilio phone number in [E.164](https://www.twilio.com/docs/glossary/what-e164)
|
||||
format, an
|
||||
[alphanumeric sender ID](https://www.twilio.com/docs/sms/send-messages#use-an-alphanumeric-sender-id),
|
||||
@@ -61,9 +61,6 @@ class TwilioAPIWrapper(BaseModel):
|
||||
)
|
||||
account_sid = get_from_dict_or_env(values, "account_sid", "TWILIO_ACCOUNT_SID")
|
||||
auth_token = get_from_dict_or_env(values, "auth_token", "TWILIO_AUTH_TOKEN")
|
||||
values["from_number"] = get_from_dict_or_env(
|
||||
values, "from_number", "TWILIO_FROM_NUMBER"
|
||||
)
|
||||
values["client"] = Client(account_sid, auth_token)
|
||||
return values
|
||||
|
||||
|
||||
Reference in New Issue
Block a user