mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-21 06:33:41 +00:00
update
This commit is contained in:
@@ -26,6 +26,7 @@ from langchain_core.utils import (
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -99,10 +100,12 @@ class ChatHunyuan(BaseChatModel):
|
||||
def lc_serializable(self) -> bool:
|
||||
return True
|
||||
|
||||
hunyuan_app_id: Optional[int] = Field(default_factory=from_env("HUNYUAN_APP_ID"))
|
||||
hunyuan_app_id: Optional[int] = Field(
|
||||
default_factory=from_env("HUNYUAN_APP_ID", default=None)
|
||||
)
|
||||
"""Hunyuan App ID"""
|
||||
hunyuan_secret_id: Optional[str] = Field(
|
||||
default_factory=from_env("HUNYUAN_SECRET_ID")
|
||||
default_factory=from_env("HUNYUAN_SECRET_ID", default=None)
|
||||
)
|
||||
"""Hunyuan Secret ID"""
|
||||
hunyuan_secret_key: Optional[SecretStr] = None
|
||||
|
||||
@@ -57,6 +57,7 @@ from langchain_core.runnables import Runnable
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.function_calling import convert_to_openai_tool
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
|
||||
@@ -48,9 +48,11 @@ from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
|
||||
from langchain_core.runnables import Runnable
|
||||
from langchain_core.utils import (
|
||||
from_env,
|
||||
get_from_dict_or_env,
|
||||
get_pydantic_field_names,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.adapters.openai import (
|
||||
convert_dict_to_message,
|
||||
@@ -206,7 +208,7 @@ class ChatOpenAI(BaseChatModel):
|
||||
# Check for classes that derive from this class (as some of them
|
||||
# may assume openai_api_key is a str)
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
default_factory=from_env("OPENAI_API_KEY", default=None), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
|
||||
@@ -20,6 +20,7 @@ from langchain_core.utils import (
|
||||
pre_init,
|
||||
)
|
||||
from langchain_core.utils.utils import build_extra_kwargs
|
||||
from pydantic import Field
|
||||
|
||||
SUPPORTED_ROLES: List[str] = [
|
||||
"system",
|
||||
@@ -113,29 +114,29 @@ class ChatSnowflakeCortex(BaseChatModel):
|
||||
"""
|
||||
|
||||
snowflake_username: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_USERNAME"), alias="username"
|
||||
default_factory=from_env("SNOWFLAKE_USERNAME", default=None), alias="username"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_USERNAME` if not provided."""
|
||||
snowflake_password: Optional[SecretStr] = Field(default=None, alias="password")
|
||||
"""Automatically inferred from env var `SNOWFLAKE_PASSWORD` if not provided."""
|
||||
snowflake_account: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_ACCOUNT"), alias="account"
|
||||
default_factory=from_env("SNOWFLAKE_ACCOUNT", default=None), alias="account"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_ACCOUNT` if not provided."""
|
||||
snowflake_database: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_DATABASE"), alias="database"
|
||||
default_factory=from_env("SNOWFLAKE_DATABASE", default=None), alias="database"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_DATABASE` if not provided."""
|
||||
snowflake_schema: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_SCHEMA"), alias="schema"
|
||||
default_factory=from_env("SNOWFLAKE_SCHEMA", default=None), alias="schema"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_SCHEMA` if not provided."""
|
||||
snowflake_warehouse: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_WAREHOUSE"), alias="warehouse"
|
||||
default_factory=from_env("SNOWFLAKE_WAREHOUSE", default=None), alias="warehouse"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_WAREHOUSE` if not provided."""
|
||||
snowflake_role: Optional[str] = Field(
|
||||
default_factory=from_env("SNOWFLAKE_ROLE"), alias="role"
|
||||
default_factory=from_env("SNOWFLAKE_ROLE", default=None), alias="role"
|
||||
)
|
||||
"""Automatically inferred from env var `SNOWFLAKE_ROLE` if not provided."""
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
|
||||
@@ -6,9 +6,10 @@ from typing import Dict, List, Optional
|
||||
import requests
|
||||
from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.runnables.config import run_in_executor
|
||||
from langchain_core.utils import from_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,9 +24,11 @@ class ErnieEmbeddings(BaseModel, Embeddings):
|
||||
ernie_api_base: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_API_BASE", default="https://aip.baidubce.com")
|
||||
)
|
||||
ernie_client_id: Optional[str] = Field(default_factory=from_env("ERNIE_CLIENT_ID"))
|
||||
ernie_client_id: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_CLIENT_ID", default=None)
|
||||
)
|
||||
ernie_client_secret: Optional[str] = Field(
|
||||
default_factory=from_env("ERNIE_CLIENT_SECRET")
|
||||
default_factory=from_env("ERNIE_CLIENT_SECRET", default=None)
|
||||
)
|
||||
access_token: Optional[str] = None
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
|
||||
@@ -3,8 +3,9 @@ from typing import Dict, Generator, List, Optional
|
||||
|
||||
import requests
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class SambaStudioEmbeddings(BaseModel, Embeddings):
|
||||
|
||||
@@ -4,8 +4,9 @@ import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.embeddings import Embeddings
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field
|
||||
from langchain_core.utils import from_env, pre_init
|
||||
from langchain_core.pydantic_v1 import BaseModel
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -13,11 +14,15 @@ logger = logging.getLogger(__name__)
|
||||
class VolcanoEmbeddings(BaseModel, Embeddings):
|
||||
"""`Volcengine Embeddings` embedding models."""
|
||||
|
||||
volcano_ak: Optional[str] = Field(default_factory=from_env("VOLC_ACCESSKEY"))
|
||||
volcano_ak: Optional[str] = Field(
|
||||
default_factory=from_env("VOLC_ACCESSKEY", default=None)
|
||||
)
|
||||
"""volcano access key
|
||||
learn more from: https://www.volcengine.com/docs/6459/76491#ak-sk"""
|
||||
|
||||
volcano_sk: Optional[str] = Field(default_factory=from_env("VOLC_SECRETKEY"))
|
||||
volcano_sk: Optional[str] = Field(
|
||||
default_factory=from_env("VOLC_SECRETKEY", default=None)
|
||||
)
|
||||
"""volcano secret key
|
||||
learn more from: https://www.volcengine.com/docs/6459/76491#ak-sk"""
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ from langchain_core.utils import (
|
||||
pre_init,
|
||||
)
|
||||
from langchain_core.utils.utils import build_extra_kwargs, convert_to_secret_str
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class _AnthropicCommon(BaseLanguageModel):
|
||||
|
||||
@@ -21,6 +21,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.openai import (
|
||||
BaseOpenAI,
|
||||
|
||||
@@ -14,6 +14,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
|
||||
@@ -10,8 +10,9 @@ from langchain_core.callbacks import (
|
||||
)
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field, root_validator
|
||||
from langchain_core.utils import from_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from langchain_core.utils.pydantic import get_fields
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
from langchain_community.utilities.requests import Requests
|
||||
@@ -34,7 +35,9 @@ class EdenAI(LLM):
|
||||
|
||||
base_url: str = "https://api.edenai.run/v2"
|
||||
|
||||
edenai_api_key: Optional[str] = Field(default_factory=from_env("EDENAI_API_KEY"))
|
||||
edenai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("EDENAI_API_KEY", default=None)
|
||||
)
|
||||
|
||||
feature: Literal["text", "image"] = "text"
|
||||
"""Which generative feature to use, use text by default"""
|
||||
|
||||
@@ -22,6 +22,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
@@ -77,7 +78,7 @@ class MinimaxCommon(BaseModel):
|
||||
default_factory=from_env("MINIMAX_API_HOST", default="https://api.minimax.chat")
|
||||
)
|
||||
minimax_group_id: Optional[str] = Field(
|
||||
default_factory=from_env("MINIMAX_GROUP_ID")
|
||||
default_factory=from_env("MINIMAX_GROUP_ID", default=None)
|
||||
)
|
||||
minimax_api_key: Optional[SecretStr] = None
|
||||
|
||||
|
||||
@@ -5,7 +5,8 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import from_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.openai import BaseOpenAI
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
|
||||
@@ -37,6 +37,7 @@ from langchain_core.utils import (
|
||||
)
|
||||
from langchain_core.utils.pydantic import get_fields
|
||||
from langchain_core.utils.utils import build_extra_kwargs
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.utils.openai import is_openai_v1
|
||||
|
||||
@@ -204,7 +205,7 @@ class BaseOpenAI(BaseLLM):
|
||||
# Check for classes that derive from this class (as some of them
|
||||
# may assume openai_api_key is a str)
|
||||
openai_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("OPENAI_API_KEY"), alias="api_key"
|
||||
default_factory=from_env("OPENAI_API_KEY", default=None), alias="api_key"
|
||||
)
|
||||
"""Automatically inferred from env var `OPENAI_API_KEY` if not provided."""
|
||||
openai_api_base: Optional[str] = Field(default=None, alias="base_url")
|
||||
|
||||
@@ -6,7 +6,8 @@ import requests
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ import requests
|
||||
from langchain_core.callbacks.manager import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class SVEndpointHandler:
|
||||
@@ -204,7 +204,7 @@ class Sambaverse(LLM):
|
||||
"""sambaverse api key"""
|
||||
|
||||
sambaverse_model_name: Optional[str] = Field(
|
||||
default_factory=from_env("SAMBAVERSE_MODEL_NAME")
|
||||
default_factory=from_env("SAMBAVERSE_MODEL_NAME", default=None)
|
||||
)
|
||||
"""sambaverse expert model to use"""
|
||||
|
||||
|
||||
@@ -18,7 +18,8 @@ from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.outputs import GenerationChunk
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
from langchain_core.utils import from_env, pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,13 +44,13 @@ class SparkLLM(LLM):
|
||||
|
||||
client: Any = None #: :meta private:
|
||||
spark_app_id: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_APP_ID")
|
||||
default_factory=from_env("IFLYTEK_SPARK_APP_ID", default=None)
|
||||
)
|
||||
spark_api_key: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_KEY")
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_KEY", default=None)
|
||||
)
|
||||
spark_api_secret: Optional[str] = Field(
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_SECRET")
|
||||
default_factory=from_env("IFLYTEK_SPARK_API_SECRET", default=None)
|
||||
)
|
||||
spark_api_url: Optional[str] = Field(
|
||||
default_factory=from_env(
|
||||
|
||||
@@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.pydantic_v1 import Field, SecretStr
|
||||
from langchain_core.pydantic_v1 import SecretStr
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from langchain_core.utils import (
|
||||
convert_to_secret_str,
|
||||
@@ -10,6 +10,7 @@ from langchain_core.utils import (
|
||||
get_from_dict_or_env,
|
||||
pre_init,
|
||||
)
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.utilities.arcee import ArceeWrapper, DALMFilter
|
||||
|
||||
|
||||
@@ -6,7 +6,8 @@ from langchain_core._api.deprecation import deprecated
|
||||
from langchain_core.callbacks import CallbackManagerForRetrieverRun
|
||||
from langchain_core.documents import Document
|
||||
from langchain_core.retrievers import BaseRetriever
|
||||
from langchain_core.utils import pre_init
|
||||
from langchain_core.utils import from_env, get_from_dict_or_env, pre_init
|
||||
from pydantic import Field
|
||||
|
||||
from langchain_community.utilities.vertexai import get_client_info
|
||||
|
||||
|
||||
Reference in New Issue
Block a user