community[patch]: Upgrade pydantic extra (#25185)

Upgrade to using a literal for specifying the extra which is the
recommended approach in pydantic 2.

This works correctly also in pydantic v1.

```python
from pydantic.v1 import BaseModel

class Foo(BaseModel, extra="forbid"):
    x: int

Foo(x=5, y=1)
```

And 


```python
from pydantic.v1 import BaseModel

class Foo(BaseModel):
    x: int

    class Config:
      extra = "forbid"

Foo(x=5, y=1)
```


## Enum -> literal using grit pattern:

```
engine marzano(0.1)
language python
or {
    `extra=Extra.allow` => `extra="allow"`,
    `extra=Extra.forbid` => `extra="forbid"`,
    `extra=Extra.ignore` => `extra="ignore"`
}
```

Resorted attributes in config and removed doc-string in case we will
need to deal with going back and forth between pydantic v1 and v2 during
the 0.3 release. (This will reduce merge conflicts.)


## Sort attributes in Config:

```
engine marzano(0.1)
language python


function sort($values) js {
    return $values.text.split(',').sort().join("\n");
}


class_definition($name, $body) as $C where {
    $name <: `Config`,
    $body <: block($statements),
    $values = [],
    $statements <: some bubble($values) assignment() as $A where {
        $values += $A
    },
    $body => sort($values),
}

```
This commit is contained in:
Eugene Yurtsev 2024-08-08 13:20:39 -04:00 committed by GitHub
parent 11adc09e02
commit bf5193bb99
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
216 changed files with 302 additions and 763 deletions

View File

@ -54,12 +54,8 @@ class AINetworkToolkit(BaseToolkit):
return values return values
class Config: class Config:
"""Pydantic config."""
# Allow extra fields. This is needed for the `interface` field.
validate_all = True
# Allow arbitrary types. This is needed for the `interface` field.
arbitrary_types_allowed = True arbitrary_types_allowed = True
validate_all = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:
"""Get the tools in the toolkit.""" """Get the tools in the toolkit."""

View File

@ -27,9 +27,6 @@ class AmadeusToolkit(BaseToolkit):
llm: Optional[BaseLanguageModel] = Field(default=None) llm: Optional[BaseLanguageModel] = Field(default=None)
class Config: class Config:
"""Pydantic config."""
# Allow extra fields. This is needed for the `client` field.
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -25,9 +25,6 @@ class CassandraDatabaseToolkit(BaseToolkit):
db: CassandraDatabase = Field(exclude=True) db: CassandraDatabase = Field(exclude=True)
class Config: class Config:
"""Configuration for this pydantic object."""
# Allow arbitrary types. This is needed for the `db` field.
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -32,8 +32,6 @@ class FinancialDatasetsToolkit(BaseToolkit):
self.api_wrapper = api_wrapper self.api_wrapper = api_wrapper
class Config: class Config:
"""Pydantic config."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -46,8 +46,6 @@ class GmailToolkit(BaseToolkit):
api_resource: Resource = Field(default_factory=build_resource_service) api_resource: Resource = Field(default_factory=build_resource_service)
class Config: class Config:
"""Pydantic config."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -26,8 +26,6 @@ class MultionToolkit(BaseToolkit):
""" """
class Config: class Config:
"""Pydantic config."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -41,8 +41,6 @@ class O365Toolkit(BaseToolkit):
account: Account = Field(default_factory=authenticate) account: Account = Field(default_factory=authenticate)
class Config: class Config:
"""Pydantic config."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -4,7 +4,7 @@ from __future__ import annotations
from typing import TYPE_CHECKING, List, Optional, Type, cast from typing import TYPE_CHECKING, List, Optional, Type, cast
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import root_validator
from langchain_core.tools import BaseTool, BaseToolkit from langchain_core.tools import BaseTool, BaseToolkit
from langchain_community.tools.playwright.base import ( from langchain_community.tools.playwright.base import (
@ -69,10 +69,8 @@ class PlayWrightBrowserToolkit(BaseToolkit):
async_browser: Optional["AsyncBrowser"] = None async_browser: Optional["AsyncBrowser"] = None
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_imports_and_browser_provided(cls, values: dict) -> dict: def validate_imports_and_browser_provided(cls, values: dict) -> dict:

View File

@ -64,8 +64,6 @@ class PowerBIToolkit(BaseToolkit):
tiktoken_model_name: Optional[str] = None tiktoken_model_name: Optional[str] = None
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -26,8 +26,6 @@ class SlackToolkit(BaseToolkit):
client: WebClient = Field(default_factory=login) client: WebClient = Field(default_factory=login)
class Config: class Config:
"""Pydantic config."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -28,8 +28,6 @@ class SparkSQLToolkit(BaseToolkit):
llm: BaseLanguageModel = Field(exclude=True) llm: BaseLanguageModel = Field(exclude=True)
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -84,8 +84,6 @@ class SQLDatabaseToolkit(BaseToolkit):
return self.db.dialect return self.db.dialect
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def get_tools(self) -> List[BaseTool]: def get_tools(self) -> List[BaseTool]:

View File

@ -7,7 +7,7 @@ from typing import Any, Dict, List, Optional
from langchain.chains import LLMChain from langchain.chains import LLMChain
from langchain.chains.base import Chain from langchain.chains.base import Chain
from langchain_core.callbacks import CallbackManagerForChainRun from langchain_core.callbacks import CallbackManagerForChainRun
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Field, root_validator
from langchain_community.utilities.requests import TextRequestsWrapper from langchain_community.utilities.requests import TextRequestsWrapper
@ -39,10 +39,8 @@ class LLMRequestsChain(Chain):
output_key: str = "output" #: :meta private: output_key: str = "output" #: :meta private:
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@property @property
def input_keys(self) -> List[str]: def input_keys(self) -> List[str]:

View File

@ -19,7 +19,7 @@ from langchain_core.callbacks import (
) )
from langchain_core.documents import Document from langchain_core.documents import Document
from langchain_core.language_models import BaseLanguageModel from langchain_core.language_models import BaseLanguageModel
from langchain_core.pydantic_v1 import Extra, Field, validator from langchain_core.pydantic_v1 import Field, validator
from langchain_core.vectorstores import VectorStoreRetriever from langchain_core.vectorstores import VectorStoreRetriever
from langchain_community.chains.pebblo_retrieval.enforcement_filters import ( from langchain_community.chains.pebblo_retrieval.enforcement_filters import (
@ -205,11 +205,9 @@ class PebbloRetrievalQA(Chain):
return {self.output_key: answer} return {self.output_key: answer}
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True
extra = "forbid"
@property @property
def input_keys(self) -> List[str]: def input_keys(self) -> List[str]:

View File

@ -92,8 +92,6 @@ class ChatAnthropic(BaseChatModel, _AnthropicCommon):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True

View File

@ -376,8 +376,6 @@ class ChatBaichuan(BaseChatModel):
"""Holds any model parameters valid for API call not explicitly specified.""" """Holds any model parameters valid for API call not explicitly specified."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -380,8 +380,6 @@ class QianfanChatEndpoint(BaseChatModel):
"""Endpoint of the Qianfan LLM, required if custom model used.""" """Endpoint of the Qianfan LLM, required if custom model used."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -16,7 +16,6 @@ from langchain_core.messages import (
SystemMessage, SystemMessage,
) )
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import Extra
from langchain_community.chat_models.anthropic import ( from langchain_community.chat_models.anthropic import (
convert_messages_to_prompt_anthropic, convert_messages_to_prompt_anthropic,
@ -233,9 +232,7 @@ class BedrockChat(BaseChatModel, BedrockBase):
return attributes return attributes
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def _stream( def _stream(
self, self,

View File

@ -118,8 +118,6 @@ class ChatCohere(BaseChatModel, BaseCohere):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True

View File

@ -112,8 +112,6 @@ class ChatCoze(BaseChatModel):
the client needs to assemble the final reply based on the type of message. """ the client needs to assemble the final reply based on the type of message. """
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -13,7 +13,7 @@ from langchain_core.messages import (
BaseMessage, BaseMessage,
) )
from langchain_core.outputs import ChatGeneration, ChatResult from langchain_core.outputs import ChatGeneration, ChatResult
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from langchain_community.utilities.requests import Requests from langchain_community.utilities.requests import Requests
@ -71,9 +71,7 @@ class ChatDappierAI(BaseChatModel):
dappier_api_key: Optional[SecretStr] = Field(None, description="Dappier API Token") dappier_api_key: Optional[SecretStr] = Field(None, description="Dappier API Token")
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -49,7 +49,6 @@ from langchain_core.output_parsers.openai_tools import (
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import ( from langchain_core.pydantic_v1 import (
BaseModel, BaseModel,
Extra,
Field, Field,
SecretStr, SecretStr,
) )
@ -298,9 +297,7 @@ class ChatEdenAI(BaseChatModel):
edenai_api_key: Optional[SecretStr] = Field(None, description="EdenAI API Token") edenai_api_key: Optional[SecretStr] = Field(None, description="EdenAI API Token")
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -133,8 +133,6 @@ class ChatHunyuan(BaseChatModel):
"""Holds any model parameters valid for API call not explicitly specified.""" """Holds any model parameters valid for API call not explicitly specified."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -18,14 +18,14 @@ from langchain_core.outputs import (
ChatGeneration, ChatGeneration,
ChatResult, ChatResult,
) )
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, SecretStr from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Ignoring type because below is valid pydantic code # Ignoring type because below is valid pydantic code
# Unexpected keyword argument "extra" for "__init_subclass__" of "object" [call-arg] # Unexpected keyword argument "extra" for "__init_subclass__" of "object" [call-arg]
class ChatParams(BaseModel, extra=Extra.allow): class ChatParams(BaseModel, extra="allow"):
"""Parameters for the `Javelin AI Gateway` LLM.""" """Parameters for the `Javelin AI Gateway` LLM."""
temperature: float = 0.0 temperature: float = 0.0
@ -69,8 +69,6 @@ class ChatJavelinAIGateway(BaseChatModel):
"""The API key for the Javelin AI Gateway.""" """The API key for the Javelin AI Gateway."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
def __init__(self, **kwargs: Any): def __init__(self, **kwargs: Any):

View File

@ -189,8 +189,6 @@ class JinaChat(BaseChatModel):
"""Maximum number of tokens to generate.""" """Maximum number of tokens to generate."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -544,8 +544,6 @@ class KineticaSqlResponse(BaseModel):
"""The Pandas dataframe containing the fetched data.""" """The Pandas dataframe containing the fetched data."""
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
@ -585,8 +583,6 @@ class KineticaSqlOutputParser(BaseOutputParser[KineticaSqlResponse]):
""" Kinetica DB connection. """ """ Kinetica DB connection. """
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def parse(self, text: str) -> KineticaSqlResponse: def parse(self, text: str) -> KineticaSqlResponse:

View File

@ -85,8 +85,6 @@ class LlamaEdgeChatService(BaseChatModel):
"""Whether to stream the results or not.""" """Whether to stream the results or not."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -384,8 +384,6 @@ class MiniMaxChat(BaseChatModel):
"""Whether to stream the results or not.""" """Whether to stream the results or not."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True, allow_reuse=True) @root_validator(pre=True, allow_reuse=True)

View File

@ -18,14 +18,14 @@ from langchain_core.outputs import (
ChatGeneration, ChatGeneration,
ChatResult, ChatResult,
) )
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Ignoring type because below is valid pydantic code # Ignoring type because below is valid pydantic code
# Unexpected keyword argument "extra" for "__init_subclass__" of "object" [call-arg] # Unexpected keyword argument "extra" for "__init_subclass__" of "object" [call-arg]
class ChatParams(BaseModel, extra=Extra.allow): class ChatParams(BaseModel, extra="allow"):
"""Parameters for the `MLflow AI Gateway` LLM.""" """Parameters for the `MLflow AI Gateway` LLM."""
temperature: float = 0.0 temperature: float = 0.0

View File

@ -38,7 +38,7 @@ from langchain_core.output_parsers.openai_tools import (
PydanticToolsParser, PydanticToolsParser,
) )
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.runnables import Runnable from langchain_core.runnables import Runnable
from langchain_core.tools import BaseTool from langchain_core.tools import BaseTool
from langchain_core.utils.function_calling import convert_to_openai_function from langchain_core.utils.function_calling import convert_to_openai_function
@ -500,9 +500,7 @@ class ChatOCIGenAI(BaseChatModel, OCIGenAIBase):
""" # noqa: E501 """ # noqa: E501
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@property @property
def _llm_type(self) -> str: def _llm_type(self) -> str:

View File

@ -245,8 +245,6 @@ class ChatOpenAI(BaseChatModel):
"""Optional httpx.Client.""" """Optional httpx.Client."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -82,8 +82,6 @@ class ChatPerplexity(BaseChatModel):
"""Maximum number of tokens to generate.""" """Maximum number of tokens to generate."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@property @property

View File

@ -40,7 +40,6 @@ from langchain_core.messages import (
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
from langchain_core.pydantic_v1 import ( from langchain_core.pydantic_v1 import (
BaseModel, BaseModel,
Extra,
Field, Field,
SecretStr, SecretStr,
) )
@ -299,11 +298,9 @@ class ChatPremAI(BaseChatModel, BaseModel):
client: Any client: Any
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@pre_init @pre_init
def validate_environments(cls, values: Dict) -> Dict: def validate_environments(cls, values: Dict) -> Dict:

View File

@ -31,8 +31,6 @@ class SolarChat(SolarCommon, ChatOpenAI):
# this is needed to match ChatOpenAI superclass # this is needed to match ChatOpenAI superclass
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "ignore" extra = "ignore"

View File

@ -246,8 +246,6 @@ class ChatSparkLLM(BaseChatModel):
"""Holds any model parameters valid for API call not explicitly specified.""" """Holds any model parameters valid for API call not explicitly specified."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -458,8 +458,6 @@ class ChatTongyi(BaseChatModel):
"""Maximum number of retries to make when generating.""" """Maximum number of retries to make when generating."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@property @property

View File

@ -121,8 +121,6 @@ class ChatYuan2(BaseChatModel):
"""The penalty to apply to repeated tokens.""" """The penalty to apply to repeated tokens."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@property @property

View File

@ -373,8 +373,6 @@ class ChatZhipuAI(BaseChatModel):
"""Maximum number of tokens to generate.""" """Maximum number of tokens to generate."""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(pre=True) @root_validator(pre=True)

View File

@ -1,6 +1,6 @@
from typing import Any, Dict, List, Tuple from typing import Any, Dict, List, Tuple
from langchain_core.pydantic_v1 import BaseModel, Extra, Field from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_community.cross_encoders.base import BaseCrossEncoder from langchain_community.cross_encoders.base import BaseCrossEncoder
@ -46,9 +46,7 @@ class HuggingFaceCrossEncoder(BaseModel, BaseCrossEncoder):
) )
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def score(self, text_pairs: List[Tuple[str, str]]) -> List[float]: def score(self, text_pairs: List[Tuple[str, str]]) -> List[float]:
"""Compute similarity scores using a HuggingFace transformer model. """Compute similarity scores using a HuggingFace transformer model.

View File

@ -1,7 +1,7 @@
import json import json
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_community.cross_encoders.base import BaseCrossEncoder from langchain_community.cross_encoders.base import BaseCrossEncoder
@ -90,10 +90,8 @@ class SagemakerEndpointCrossEncoder(BaseModel, BaseCrossEncoder):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
from langchain_core.callbacks.base import Callbacks from langchain_core.callbacks.base import Callbacks
from langchain_core.documents import BaseDocumentCompressor, Document from langchain_core.documents import BaseDocumentCompressor, Document
from langchain_core.pydantic_v1 import Extra, Field, root_validator from langchain_core.pydantic_v1 import Field, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
@ -26,11 +26,9 @@ class DashScopeRerank(BaseDocumentCompressor):
DASHSCOPE_API_KEY.""" DASHSCOPE_API_KEY."""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Dict, Optional, Sequence
from langchain_core.callbacks.manager import Callbacks from langchain_core.callbacks.manager import Callbacks
from langchain_core.documents import BaseDocumentCompressor, Document from langchain_core.documents import BaseDocumentCompressor, Document
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import root_validator
if TYPE_CHECKING: if TYPE_CHECKING:
from flashrank import Ranker, RerankRequest from flashrank import Ranker, RerankRequest
@ -34,10 +34,8 @@ class FlashrankRerank(BaseDocumentCompressor):
"""Prefix for flashrank_rerank metadata keys""" """Prefix for flashrank_rerank metadata keys"""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
import requests import requests
from langchain_core.callbacks import Callbacks from langchain_core.callbacks import Callbacks
from langchain_core.documents import BaseDocumentCompressor, Document from langchain_core.documents import BaseDocumentCompressor, Document
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
JINA_API_URL: str = "https://api.jina.ai/v1/rerank" JINA_API_URL: str = "https://api.jina.ai/v1/rerank"
@ -28,10 +28,8 @@ class JinaRerank(BaseDocumentCompressor):
"""Identifier for the application making the request.""" """Identifier for the application making the request."""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -72,10 +72,8 @@ class LLMLinguaCompressor(BaseDocumentCompressor):
return values return values
class Config: class Config:
"""Configuration for this pydantic object."""
extra = "forbid"
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@staticmethod @staticmethod
def _format_context(docs: Sequence[Document]) -> List[str]: def _format_context(docs: Sequence[Document]) -> List[str]:

View File

@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence
from langchain.retrievers.document_compressors.base import BaseDocumentCompressor from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
from langchain_core.callbacks.manager import Callbacks from langchain_core.callbacks.manager import Callbacks
from langchain_core.documents import Document from langchain_core.documents import Document
from langchain_core.pydantic_v1 import Extra, Field, PrivateAttr, root_validator from langchain_core.pydantic_v1 import Field, PrivateAttr, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
if TYPE_CHECKING: if TYPE_CHECKING:
@ -37,10 +37,8 @@ class RankLLMRerank(BaseDocumentCompressor):
_retriever: Any = PrivateAttr() _retriever: Any = PrivateAttr()
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union
from langchain_core.callbacks.base import Callbacks from langchain_core.callbacks.base import Callbacks
from langchain_core.documents import BaseDocumentCompressor, Document from langchain_core.documents import BaseDocumentCompressor, Document
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
@ -33,11 +33,9 @@ class VolcengineRerank(BaseDocumentCompressor):
"""Number of documents to return.""" """Number of documents to return."""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -38,9 +38,9 @@ class _O365Settings(BaseSettings):
client_secret: SecretStr = Field(..., env="O365_CLIENT_SECRET") client_secret: SecretStr = Field(..., env="O365_CLIENT_SECRET")
class Config: class Config:
env_prefix = ""
case_sentive = False case_sentive = False
env_file = ".env" env_file = ".env"
env_prefix = ""
class _O365TokenStorage(BaseSettings): class _O365TokenStorage(BaseSettings):

View File

@ -23,8 +23,6 @@ class OneDriveFileLoader(BaseLoader, BaseModel):
class Config: class Config:
arbitrary_types_allowed = True arbitrary_types_allowed = True
"""Allow arbitrary types. This is needed for the File type. Default is True.
See https://pydantic-docs.helpmanual.io/usage/types/#arbitrary-types-allowed"""
def load(self) -> List[Document]: def load(self) -> List[Document]:
"""Load Documents""" """Load Documents"""

View File

@ -21,11 +21,9 @@ class _OneNoteGraphSettings(BaseSettings):
client_secret: SecretStr = Field(..., env="MS_GRAPH_CLIENT_SECRET") client_secret: SecretStr = Field(..., env="MS_GRAPH_CLIENT_SECRET")
class Config: class Config:
"""Config for OneNoteGraphSettings."""
env_prefix = ""
case_sentive = False case_sentive = False
env_file = ".env" env_file = ".env"
env_prefix = ""
class OneNoteLoader(BaseLoader, BaseModel): class OneNoteLoader(BaseLoader, BaseModel):

View File

@ -154,8 +154,6 @@ class EmbeddingsRedundantFilter(BaseDocumentTransformer, BaseModel):
to be considered redundant.""" to be considered redundant."""
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def transform_documents( def transform_documents(
@ -204,8 +202,6 @@ class EmbeddingsClusteringFilter(BaseDocumentTransformer, BaseModel):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def transform_documents( def transform_documents(

View File

@ -30,8 +30,6 @@ class LongContextReorder(BaseDocumentTransformer, BaseModel):
See: https://arxiv.org/abs//2307.03172""" See: https://arxiv.org/abs//2307.03172"""
class Config: class Config:
"""Configuration for this pydantic object."""
arbitrary_types_allowed = True arbitrary_types_allowed = True
def transform_documents( def transform_documents(

View File

@ -59,8 +59,6 @@ class BaichuanTextEmbeddings(BaseModel, Embeddings):
"""Chunk size when multiple texts are input""" """Chunk size when multiple texts are input"""
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(allow_reuse=True) @root_validator(allow_reuse=True)

View File

@ -6,7 +6,7 @@ from typing import Any, Dict, List, Optional
import numpy as np import numpy as np
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.runnables.config import run_in_executor from langchain_core.runnables.config import run_in_executor
@ -75,9 +75,7 @@ class BedrockEmbeddings(BaseModel, Embeddings):
"""Whether the embeddings should be normalized to unit vectors""" """Whether the embeddings should be normalized to unit vectors"""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=False, skip_on_failure=True) @root_validator(pre=False, skip_on_failure=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ import logging
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -44,9 +44,7 @@ class ClarifaiEmbeddings(BaseModel, Embeddings):
api_base: str = "https://api.clarifai.com" api_base: str = "https://api.clarifai.com"
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
DEFAULT_MODEL_NAME = "@cf/baai/bge-base-en-v1.5" DEFAULT_MODEL_NAME = "@cf/baai/bge-base-en-v1.5"
@ -44,9 +44,7 @@ class CloudflareWorkersAIEmbeddings(BaseModel, Embeddings):
self.headers = {"Authorization": f"Bearer {self.api_token}"} self.headers = {"Authorization": f"Bearer {self.api_token}"}
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using Cloudflare Workers AI. """Compute doc embeddings using Cloudflare Workers AI.

View File

@ -4,7 +4,7 @@ from typing import Dict, List, Optional, cast
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
@ -54,7 +54,7 @@ class ClovaEmbeddings(BaseModel, Embeddings):
"""Application ID for identifying your application.""" """Application ID for identifying your application."""
class Config: class Config:
extra = Extra.forbid extra = "forbid"
@root_validator(pre=True, allow_reuse=True) @root_validator(pre=True, allow_reuse=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
from langchain_community.llms.cohere import _create_retry_decorator from langchain_community.llms.cohere import _create_retry_decorator
@ -50,9 +50,7 @@ class CohereEmbeddings(BaseModel, Embeddings):
"""Identifier for the application making the request.""" """Identifier for the application making the request."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -10,7 +10,7 @@ from typing import (
) )
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
from requests.exceptions import HTTPError from requests.exceptions import HTTPError
from tenacity import ( from tenacity import (
@ -109,9 +109,7 @@ class DashScopeEmbeddings(BaseModel, Embeddings):
"""Maximum number of retries to make when generating.""" """Maximum number of retries to make when generating."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import get_from_dict_or_env, pre_init from langchain_core.utils import get_from_dict_or_env, pre_init
DEFAULT_MODEL_ID = "sentence-transformers/clip-ViT-B-32" DEFAULT_MODEL_ID = "sentence-transformers/clip-ViT-B-32"
@ -55,9 +55,7 @@ class DeepInfraEmbeddings(BaseModel, Embeddings):
"""Batch size for embedding requests.""" """Batch size for embedding requests."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -3,7 +3,6 @@ from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import ( from langchain_core.pydantic_v1 import (
BaseModel, BaseModel,
Extra,
Field, Field,
SecretStr, SecretStr,
) )
@ -30,9 +29,7 @@ class EdenAiEmbeddings(BaseModel, Embeddings):
""" """
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from requests.adapters import HTTPAdapter, Retry from requests.adapters import HTTPAdapter, Retry
from typing_extensions import NotRequired, TypedDict from typing_extensions import NotRequired, TypedDict
@ -57,9 +57,7 @@ class EmbaasEmbeddings(BaseModel, Embeddings):
timeout: Optional[int] = 30 timeout: Optional[int] = 30
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -4,7 +4,7 @@ from typing import Any, Dict, List, Literal, Optional
import numpy as np import numpy as np
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init from langchain_core.utils import pre_init
MIN_VERSION = "0.2.0" MIN_VERSION = "0.2.0"
@ -67,9 +67,7 @@ class FastEmbedEmbeddings(BaseModel, Embeddings):
_model: Any # : :meta private: _model: Any # : :meta private:
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "allow"
extra = Extra.allow
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -1,7 +1,7 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
from packaging.version import parse from packaging.version import parse
@ -51,9 +51,7 @@ class GradientEmbeddings(BaseModel, Embeddings):
# LLM call kwargs # LLM call kwargs
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(allow_reuse=True) @root_validator(allow_reuse=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional
import requests import requests
from langchain_core._api import deprecated, warn_deprecated from langchain_core._api import deprecated, warn_deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, SecretStr from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
DEFAULT_MODEL_NAME = "sentence-transformers/all-mpnet-base-v2" DEFAULT_MODEL_NAME = "sentence-transformers/all-mpnet-base-v2"
DEFAULT_INSTRUCT_MODEL = "hkunlp/instructor-large" DEFAULT_INSTRUCT_MODEL = "hkunlp/instructor-large"
@ -81,9 +81,7 @@ class HuggingFaceEmbeddings(BaseModel, Embeddings):
) )
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace transformer model. """Compute doc embeddings using a HuggingFace transformer model.
@ -185,9 +183,7 @@ class HuggingFaceInstructEmbeddings(BaseModel, Embeddings):
self.show_progress = self.encode_kwargs.pop("show_progress_bar") self.show_progress = self.encode_kwargs.pop("show_progress_bar")
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace instruct model. """Compute doc embeddings using a HuggingFace instruct model.
@ -314,9 +310,7 @@ class HuggingFaceBgeEmbeddings(BaseModel, Embeddings):
self.show_progress = self.encode_kwargs.pop("show_progress_bar") self.show_progress = self.encode_kwargs.pop("show_progress_bar")
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace transformer model. """Compute doc embeddings using a HuggingFace transformer model.

View File

@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional
from langchain_core._api import deprecated from langchain_core._api import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
DEFAULT_MODEL = "sentence-transformers/all-mpnet-base-v2" DEFAULT_MODEL = "sentence-transformers/all-mpnet-base-v2"
@ -48,9 +48,7 @@ class HuggingFaceHubEmbeddings(BaseModel, Embeddings):
huggingfacehub_api_token: Optional[str] = None huggingfacehub_api_token: Optional[str] = None
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -8,7 +8,7 @@ import aiohttp
import numpy as np import numpy as np
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
__all__ = ["InfinityEmbeddings"] __all__ = ["InfinityEmbeddings"]
@ -45,9 +45,7 @@ class InfinityEmbeddings(BaseModel, Embeddings):
# LLM call kwargs # LLM call kwargs
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(allow_reuse=True) @root_validator(allow_reuse=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -5,7 +5,7 @@ from logging import getLogger
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
__all__ = ["InfinityEmbeddingsLocal"] __all__ = ["InfinityEmbeddingsLocal"]
@ -58,9 +58,7 @@ class InfinityEmbeddingsLocal(BaseModel, Embeddings):
# LLM call kwargs # LLM call kwargs
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(allow_reuse=True) @root_validator(allow_reuse=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -4,7 +4,7 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field from langchain_core.pydantic_v1 import BaseModel, Field
DEFAULT_BGE_MODEL = "BAAI/bge-small-en-v1.5" DEFAULT_BGE_MODEL = "BAAI/bge-small-en-v1.5"
DEFAULT_QUERY_BGE_INSTRUCTION_EN = ( DEFAULT_QUERY_BGE_INSTRUCTION_EN = (
@ -107,9 +107,7 @@ class IpexLLMBgeEmbeddings(BaseModel, Embeddings):
self.query_instruction = DEFAULT_QUERY_BGE_INSTRUCTION_ZH self.query_instruction = DEFAULT_QUERY_BGE_INSTRUCTION_ZH
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace transformer model. """Compute doc embeddings using a HuggingFace transformer model.

View File

@ -3,7 +3,7 @@ import os
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
class QuantizedBgeEmbeddings(BaseModel, Embeddings): class QuantizedBgeEmbeddings(BaseModel, Embeddings):
@ -119,9 +119,7 @@ class QuantizedBgeEmbeddings(BaseModel, Embeddings):
) )
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "allow"
extra = Extra.allow
def _embed(self, inputs: Any) -> Any: def _embed(self, inputs: Any) -> Any:
import torch import torch

View File

@ -3,7 +3,7 @@ import sys
from typing import Any, List from typing import Any, List
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
class JohnSnowLabsEmbeddings(BaseModel, Embeddings): class JohnSnowLabsEmbeddings(BaseModel, Embeddings):
@ -59,9 +59,7 @@ class JohnSnowLabsEmbeddings(BaseModel, Embeddings):
raise Exception("Failure loading model") from exc raise Exception("Failure loading model") from exc
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a JohnSnowLabs transformer model. """Compute doc embeddings using a JohnSnowLabs transformer model.

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional
import numpy as np import numpy as np
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init from langchain_core.utils import pre_init
LASER_MULTILINGUAL_MODEL: str = "laser2" LASER_MULTILINGUAL_MODEL: str = "laser2"
@ -38,9 +38,7 @@ class LaserEmbeddings(BaseModel, Embeddings):
_encoder_pipeline: Any # : :meta private: _encoder_pipeline: Any # : :meta private:
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -1,7 +1,7 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
class LlamaCppEmbeddings(BaseModel, Embeddings): class LlamaCppEmbeddings(BaseModel, Embeddings):
@ -58,9 +58,7 @@ class LlamaCppEmbeddings(BaseModel, Embeddings):
"""Print verbose output to stderr.""" """Print verbose output to stderr."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=False, skip_on_failure=True) @root_validator(pre=False, skip_on_failure=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -4,7 +4,7 @@ from typing import Dict, List, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
@ -33,9 +33,7 @@ class LLMRailsEmbeddings(BaseModel, Embeddings):
"""LLMRails API key.""" """LLMRails API key."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -16,7 +16,7 @@ from typing import (
) )
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
from langchain_core.utils import ( from langchain_core.utils import (
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
@ -167,9 +167,7 @@ class LocalAIEmbeddings(BaseModel, Embeddings):
"""Holds any model parameters valid for `create` call not explicitly specified.""" """Holds any model parameters valid for `create` call not explicitly specified."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:

View File

@ -5,7 +5,7 @@ from typing import Any, Callable, Dict, List, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, SecretStr from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -118,10 +118,8 @@ class MiniMaxEmbeddings(BaseModel, Embeddings):
"""API Key for MiniMax API.""" """API Key for MiniMax API."""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
allow_population_by_field_name = True allow_population_by_field_name = True
extra = "forbid"
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -1,7 +1,7 @@
from typing import Any, List, Optional from typing import Any, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
class ModelScopeEmbeddings(BaseModel, Embeddings): class ModelScopeEmbeddings(BaseModel, Embeddings):
@ -40,9 +40,7 @@ class ModelScopeEmbeddings(BaseModel, Embeddings):
) )
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a modelscope embedding model. """Compute doc embeddings using a modelscope embedding model.

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Mapping, Optional, Tuple
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
@ -42,9 +42,7 @@ class MosaicMLInstructorEmbeddings(BaseModel, Embeddings):
mosaicml_api_token: Optional[str] = None mosaicml_api_token: Optional[str] = None
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from enum import Enum
from typing import Any, Dict, Iterator, List, Mapping, Optional from typing import Any, Dict, Iterator, List, Mapping, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init from langchain_core.utils import pre_init
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint" CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
@ -86,9 +86,7 @@ class OCIGenAIEmbeddings(BaseModel, Embeddings):
per request""" per request"""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument

View File

@ -3,7 +3,7 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -142,9 +142,7 @@ class OllamaEmbeddings(BaseModel, Embeddings):
return {**{"model": self.model}, **self._default_params} return {**{"model": self.model}, **self._default_params}
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def _process_emb_response(self, input: str) -> List[float]: def _process_emb_response(self, input: str) -> List[float]:
"""Process a response from the API. """Process a response from the API.

View File

@ -21,7 +21,7 @@ from typing import (
import numpy as np import numpy as np
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field, root_validator from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
from langchain_core.utils import ( from langchain_core.utils import (
get_from_dict_or_env, get_from_dict_or_env,
get_pydantic_field_names, get_pydantic_field_names,
@ -255,10 +255,8 @@ class OpenAIEmbeddings(BaseModel, Embeddings):
"""Optional httpx.Client.""" """Optional httpx.Client."""
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
allow_population_by_field_name = True allow_population_by_field_name = True
extra = "forbid"
@root_validator(pre=True) @root_validator(pre=True)
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:

View File

@ -2,7 +2,7 @@ from pathlib import Path
from typing import Any, Dict, List from typing import Any, Dict, List
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, Field from langchain_core.pydantic_v1 import BaseModel, Field
DEFAULT_QUERY_INSTRUCTION = ( DEFAULT_QUERY_INSTRUCTION = (
"Represent the question for retrieving supporting documents: " "Represent the question for retrieving supporting documents: "
@ -255,9 +255,7 @@ class OpenVINOEmbeddings(BaseModel, Embeddings):
return all_embeddings return all_embeddings
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace transformer model. """Compute doc embeddings using a HuggingFace transformer model.

View File

@ -1,7 +1,7 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
class QuantizedBiEncoderEmbeddings(BaseModel, Embeddings): class QuantizedBiEncoderEmbeddings(BaseModel, Embeddings):
@ -101,9 +101,7 @@ For more information, please visit:
self.transformer_model.eval() self.transformer_model.eval()
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "allow"
extra = Extra.allow
def _embed(self, inputs: Any) -> Any: def _embed(self, inputs: Any) -> Any:
try: try:

View File

@ -14,7 +14,7 @@ import traceback
from typing import TYPE_CHECKING, Any, Dict, List, Optional from typing import TYPE_CHECKING, Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
if TYPE_CHECKING: if TYPE_CHECKING:
from oracledb import Connection from oracledb import Connection
@ -38,9 +38,7 @@ class OracleEmbeddings(BaseModel, Embeddings):
super().__init__(**kwargs) super().__init__(**kwargs)
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
""" """
1 - user needs to have create procedure, 1 - user needs to have create procedure,

View File

@ -4,7 +4,7 @@ from typing import Any, List
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -24,9 +24,7 @@ class OVHCloudEmbeddings(BaseModel, Embeddings):
region: str = "kepler" region: str = "kepler"
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def __init__(self, **kwargs: Any): def __init__(self, **kwargs: Any):
super().__init__(**kwargs) super().__init__(**kwargs)

View File

@ -1,7 +1,7 @@
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
from langchain_core.utils import pre_init from langchain_core.utils import pre_init
from langchain_community.llms.sagemaker_endpoint import ContentHandlerBase from langchain_community.llms.sagemaker_endpoint import ContentHandlerBase
@ -111,10 +111,8 @@ class SagemakerEndpointEmbeddings(BaseModel, Embeddings):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
arbitrary_types_allowed = True arbitrary_types_allowed = True
extra = "forbid"
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -1,7 +1,6 @@
from typing import Any, Callable, List from typing import Any, Callable, List
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import Extra
from langchain_community.llms.self_hosted import SelfHostedPipeline from langchain_community.llms.self_hosted import SelfHostedPipeline
@ -67,9 +66,7 @@ class SelfHostedEmbeddings(SelfHostedPipeline, Embeddings):
"""Any kwargs to pass to the model's inference function.""" """Any kwargs to pass to the model's inference function."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a HuggingFace transformer model. """Compute doc embeddings using a HuggingFace transformer model.

View File

@ -6,7 +6,7 @@ from typing import Any, Callable, Dict, List, Optional
import requests import requests
from langchain_core._api import deprecated from langchain_core._api import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -76,9 +76,7 @@ class SolarEmbeddings(BaseModel, Embeddings):
"""API Key for Solar API.""" """API Key for Solar API."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ import importlib.util
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
class SpacyEmbeddings(BaseModel, Embeddings): class SpacyEmbeddings(BaseModel, Embeddings):
@ -23,9 +23,7 @@ class SpacyEmbeddings(BaseModel, Embeddings):
nlp: Optional[Any] = None nlp: Optional[Any] = None
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid # Forbid extra attributes during model initialization
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -116,8 +116,6 @@ class SparkLLMTextEmbeddings(BaseModel, Embeddings):
If "query", it belongs to query Embedding.""" If "query", it belongs to query Embedding."""
class Config: class Config:
"""Configuration for this pydantic object"""
allow_population_by_field_name = True allow_population_by_field_name = True
@root_validator(allow_reuse=True) @root_validator(allow_reuse=True)

View File

@ -1,7 +1,7 @@
from typing import Any, List from typing import Any, List
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra from langchain_core.pydantic_v1 import BaseModel
DEFAULT_MODEL_URL = "https://tfhub.dev/google/universal-sentence-encoder-multilingual/3" DEFAULT_MODEL_URL = "https://tfhub.dev/google/universal-sentence-encoder-multilingual/3"
@ -44,9 +44,7 @@ class TensorflowHubEmbeddings(BaseModel, Embeddings):
self.embed = tensorflow_hub.load(self.model_url) self.embed = tensorflow_hub.load(self.model_url)
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]: def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Compute doc embeddings using a TensorflowHub embedding model. """Compute doc embeddings using a TensorflowHub embedding model.

View File

@ -17,7 +17,7 @@ import aiohttp
import numpy as np import numpy as np
import requests import requests
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator from langchain_core.pydantic_v1 import BaseModel, root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
__all__ = ["TextEmbedEmbeddings"] __all__ = ["TextEmbedEmbeddings"]
@ -60,9 +60,7 @@ class TextEmbedEmbeddings(BaseModel, Embeddings):
"""TextEmbed client.""" """TextEmbed client."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=False, skip_on_failure=True) @root_validator(pre=False, skip_on_failure=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -16,7 +16,7 @@ from typing import (
import requests import requests
from langchain_core._api.deprecation import deprecated from langchain_core._api.deprecation import deprecated
from langchain_core.embeddings import Embeddings from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import BaseModel, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from tenacity import ( from tenacity import (
before_sleep_log, before_sleep_log,
@ -100,9 +100,7 @@ class VoyageEmbeddings(BaseModel, Embeddings):
raised if any given text exceeds the context length.""" raised if any given text exceeds the context length."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -72,8 +72,6 @@ class YandexGPTEmbeddings(BaseModel, Embeddings):
_grpc_metadata: Sequence _grpc_metadata: Sequence
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
@pre_init @pre_init

View File

@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional, cast
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import BaseModel, Extra, SecretStr from langchain_core.pydantic_v1 import BaseModel, SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
@ -69,9 +69,7 @@ class AI21(LLM):
"""Base url to use, if None decides based on model name.""" """Base url to use, if None decides based on model name."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional, Sequence
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, SecretStr from langchain_core.pydantic_v1 import SecretStr
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -163,9 +163,7 @@ class AlephAlpha(LLM):
by de-prioritizing your request below concurrent ones.""" by de-prioritizing your request below concurrent ones."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@pre_init @pre_init
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -3,7 +3,6 @@ from typing import Any, Dict, List, Mapping, Optional
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -45,9 +44,7 @@ class AmazonAPIGateway(LLM):
""" """
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@property @property
def _identifying_params(self) -> Mapping[str, Any]: def _identifying_params(self) -> Mapping[str, Any]:

View File

@ -181,8 +181,6 @@ class Anthropic(LLM, _AnthropicCommon):
""" """
class Config: class Config:
"""Configuration for this pydantic object."""
allow_population_by_field_name = True allow_population_by_field_name = True
arbitrary_types_allowed = True arbitrary_types_allowed = True

View File

@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional, Union, cast
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator from langchain_core.pydantic_v1 import SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from langchain_community.utilities.arcee import ArceeWrapper, DALMFilter from langchain_community.utilities.arcee import ArceeWrapper, DALMFilter
@ -52,9 +52,7 @@ class Arcee(LLM):
"""Keyword arguments to pass to the model.""" """Keyword arguments to pass to the model."""
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
underscore_attrs_are_private = True underscore_attrs_are_private = True
@property @property

View File

@ -5,7 +5,7 @@ from typing import Any, Dict, List, Mapping, Optional, Union, cast
import requests import requests
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, root_validator from langchain_core.pydantic_v1 import root_validator
from langchain_core.utils import get_from_dict_or_env from langchain_core.utils import get_from_dict_or_env
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -123,9 +123,7 @@ class Aviary(LLM):
version: Optional[str] = None version: Optional[str] = None
class Config: class Config:
"""Configuration for this pydantic object.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict: def validate_environment(cls, values: Dict) -> Dict:

View File

@ -3,7 +3,7 @@ from typing import Any, Dict, List, Mapping, Optional, cast
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator from langchain_core.pydantic_v1 import Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env, pre_init
from langchain_community.llms.utils import enforce_stop_tokens from langchain_community.llms.utils import enforce_stop_tokens
@ -41,9 +41,7 @@ class Banana(LLM):
banana_api_key: Optional[SecretStr] = None banana_api_key: Optional[SecretStr] = None
class Config: class Config:
"""Configuration for this pydantic config.""" extra = "forbid"
extra = Extra.forbid
@root_validator(pre=True) @root_validator(pre=True)
def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]:

Some files were not shown because too many files have changed in this diff Show More