diff --git a/libs/community/langchain_community/chat_models/oci_generative_ai.py b/libs/community/langchain_community/chat_models/oci_generative_ai.py index 85a3e763b8b..a900e488ffa 100644 --- a/libs/community/langchain_community/chat_models/oci_generative_ai.py +++ b/libs/community/langchain_community/chat_models/oci_generative_ai.py @@ -548,6 +548,9 @@ class ChatOCIGenAI(BaseChatModel, OCIGenAIBase): chat_params = {**_model_kwargs, **kwargs, **oci_params} + if not self.model_id: + raise ValueError("Model ID is required to chat") + if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX): serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id) else: diff --git a/libs/community/langchain_community/document_compressors/llmlingua_filter.py b/libs/community/langchain_community/document_compressors/llmlingua_filter.py index 9e05c9318ea..0c88a045180 100644 --- a/libs/community/langchain_community/document_compressors/llmlingua_filter.py +++ b/libs/community/langchain_community/document_compressors/llmlingua_filter.py @@ -8,7 +8,7 @@ from langchain_core.documents import Document from langchain_core.documents.compressor import ( BaseDocumentCompressor, ) -from pydantic import ConfigDict, model_validator +from pydantic import ConfigDict, Field, model_validator DEFAULT_LLM_LINGUA_INSTRUCTION = ( "Given this documents, please answer the final question" @@ -35,9 +35,9 @@ class LLMLinguaCompressor(BaseDocumentCompressor): """The target number of compressed tokens""" rank_method: str = "longllmlingua" """The ranking method to use""" - model_config: dict = {} + model_configuration: dict = Field(default_factory=dict, alias="model_config") """Custom configuration for the model""" - open_api_config: dict = {} + open_api_config: dict = Field(default_factory=dict) """open_api configuration""" instruction: str = DEFAULT_LLM_LINGUA_INSTRUCTION """The instruction for the LLM""" @@ -75,6 +75,7 @@ class LLMLinguaCompressor(BaseDocumentCompressor): model_config = ConfigDict( arbitrary_types_allowed=True, extra="forbid", + populate_by_name=True, ) @staticmethod diff --git a/libs/community/langchain_community/document_loaders/onenote.py b/libs/community/langchain_community/document_loaders/onenote.py index 33022e8dbcf..c58640e0846 100644 --- a/libs/community/langchain_community/document_loaders/onenote.py +++ b/libs/community/langchain_community/document_loaders/onenote.py @@ -1,7 +1,7 @@ """Loads data from OneNote Notebooks""" from pathlib import Path -from typing import Dict, Iterator, List, Optional +from typing import Any, Dict, Iterator, List, Optional import requests from langchain_core.documents import Document @@ -10,6 +10,7 @@ from pydantic import ( Field, FilePath, SecretStr, + model_validator, ) from pydantic_settings import BaseSettings, SettingsConfigDict @@ -17,13 +18,14 @@ from langchain_community.document_loaders.base import BaseLoader class _OneNoteGraphSettings(BaseSettings): - client_id: str = Field(..., alias="MS_GRAPH_CLIENT_ID") - client_secret: SecretStr = Field(..., alias="MS_GRAPH_CLIENT_SECRET") + client_id: str = Field(...) + client_secret: SecretStr = Field(...) model_config = SettingsConfigDict( case_sensitive=False, + populate_by_name=True, env_file=".env", - env_prefix="", + env_prefix="MS_GRAPH_", ) @@ -51,6 +53,14 @@ class OneNoteLoader(BaseLoader, BaseModel): object_ids: Optional[List[str]] = None """ The IDs of the objects to load data from.""" + @model_validator(mode="before") + @classmethod + def init(cls, values: Dict) -> Any: + """Initialize the class.""" + if "settings" in values and isinstance(values["settings"], dict): + values["settings"] = _OneNoteGraphSettings(**values["settings"]) + return values + def lazy_load(self) -> Iterator[Document]: """ Get pages from OneNote notebooks. diff --git a/libs/community/langchain_community/embeddings/oci_generative_ai.py b/libs/community/langchain_community/embeddings/oci_generative_ai.py index ff735b90512..72c4059d2be 100644 --- a/libs/community/langchain_community/embeddings/oci_generative_ai.py +++ b/libs/community/langchain_community/embeddings/oci_generative_ai.py @@ -181,6 +181,9 @@ class OCIGenAIEmbeddings(BaseModel, Embeddings): """ from oci.generative_ai_inference import models + if not self.model_id: + raise ValueError("Model ID is required to embed documents") + if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX): serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id) else: diff --git a/libs/community/langchain_community/llms/exllamav2.py b/libs/community/langchain_community/llms/exllamav2.py index 95de8468e19..d91a38034c2 100644 --- a/libs/community/langchain_community/llms/exllamav2.py +++ b/libs/community/langchain_community/llms/exllamav2.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Iterator, List, Optional +from typing import Any, Callable, Dict, Iterator, List, Optional from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models import LLM @@ -41,7 +41,7 @@ class ExLlamaV2(LLM): settings: Any = None # Langchain parameters - logfunc: callable = print + logfunc: Callable = print stop_sequences: List[str] = Field("") """Sequences that immediately will stop the generator.""" diff --git a/libs/community/langchain_community/tools/bearly/tool.py b/libs/community/langchain_community/tools/bearly/tool.py index 1196e79acf5..eba71c0d7ff 100644 --- a/libs/community/langchain_community/tools/bearly/tool.py +++ b/libs/community/langchain_community/tools/bearly/tool.py @@ -37,7 +37,7 @@ class BearlyInterpreterToolArguments(BaseModel): python_code: str = Field( ..., - example="print('Hello World')", + examples=["print('Hello World')"], description=( "The pure python script to be evaluated. " "The contents will be in main.py. " diff --git a/libs/community/langchain_community/tools/connery/models.py b/libs/community/langchain_community/tools/connery/models.py index 92554ea754d..537f58bea13 100644 --- a/libs/community/langchain_community/tools/connery/models.py +++ b/libs/community/langchain_community/tools/connery/models.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import Any, List, Optional from pydantic import BaseModel @@ -15,7 +15,7 @@ class Parameter(BaseModel): key: str title: str description: Optional[str] = None - type: str + type: Any validation: Optional[Validation] = None diff --git a/libs/community/langchain_community/tools/connery/tool.py b/libs/community/langchain_community/tools/connery/tool.py index b5c9d5f5f79..d74bfc1743e 100644 --- a/libs/community/langchain_community/tools/connery/tool.py +++ b/libs/community/langchain_community/tools/connery/tool.py @@ -154,8 +154,8 @@ class ConneryAction(BaseTool): type = param.type dynamic_input_fields[param.key] = ( - str, - Field(default, title=title, description=description, type=type), + type, + Field(default, title=title, description=description), ) InputModel = create_model("InputSchema", **dynamic_input_fields) diff --git a/libs/community/langchain_community/tools/e2b_data_analysis/tool.py b/libs/community/langchain_community/tools/e2b_data_analysis/tool.py index 7e780c60a64..f9705086a98 100644 --- a/libs/community/langchain_community/tools/e2b_data_analysis/tool.py +++ b/libs/community/langchain_community/tools/e2b_data_analysis/tool.py @@ -84,7 +84,7 @@ class E2BDataAnalysisToolArguments(BaseModel): python_code: str = Field( ..., - example="print('Hello World')", + examples=["print('Hello World')"], description=( "The python script to be evaluated. " "The contents will be in main.py. " diff --git a/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py b/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py index a59893e8b21..15ca91eea58 100644 --- a/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py +++ b/libs/community/tests/unit_tests/chat_models/test_oci_generative_ai.py @@ -23,7 +23,11 @@ def test_llm_chat(monkeypatch: MonkeyPatch, test_model_id: str) -> None: oci_gen_ai_client = MagicMock() llm = ChatOCIGenAI(model_id=test_model_id, client=oci_gen_ai_client) - provider = llm.model_id.split(".")[0].lower() + model_id = llm.model_id + if model_id is None: + raise ValueError("Model ID is required for OCI Generative AI LLM service.") + + provider = model_id.split(".")[0].lower() def mocked_response(*args): # type: ignore[no-untyped-def] response_text = "Assistant chat reply." diff --git a/libs/community/tests/unit_tests/document_loaders/test_onenote.py b/libs/community/tests/unit_tests/document_loaders/test_onenote.py index 7d8e1590ccc..40862ab2672 100644 --- a/libs/community/tests/unit_tests/document_loaders/test_onenote.py +++ b/libs/community/tests/unit_tests/document_loaders/test_onenote.py @@ -15,7 +15,6 @@ def test_initialization() -> None: os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET" loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", section_name="test_section", page_title="test_title", @@ -40,7 +39,6 @@ def test_load(mocker: MockerFixture) -> None: return_value=mocker.MagicMock(json=lambda: {"value": []}, links=None), ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", section_name="test_section", page_title="test_title", @@ -57,7 +55,6 @@ def test_load(mocker: MockerFixture) -> None: ), ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, object_ids=["test_id"], access_token="access_token", ) @@ -85,7 +82,6 @@ def test_msal_import(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None: return_value=FakeConfidentialClientApplication(), ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", section_name="test_section", page_title="test_title", @@ -99,7 +95,6 @@ def test_url() -> None: os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET" loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", section_name="test_section", page_title="test_title", @@ -115,7 +110,6 @@ def test_url() -> None: ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", section_name="test_section", access_token="access_token", @@ -129,7 +123,6 @@ def test_url() -> None: ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, notebook_name="test_notebook", access_token="access_token", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", @@ -141,7 +134,6 @@ def test_url() -> None: ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, section_name="test_section", access_token="access_token", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", @@ -153,7 +145,6 @@ def test_url() -> None: ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, section_name="test_section", page_title="test_title", access_token="access_token", @@ -167,7 +158,6 @@ def test_url() -> None: ) loader = OneNoteLoader( - settings={"client_id": "", "client_secret": ""}, page_title="test_title", access_token="access_token", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", diff --git a/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py b/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py index df600d4d775..d0472abf86c 100644 --- a/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py +++ b/libs/community/tests/unit_tests/llms/test_oci_generative_ai.py @@ -22,7 +22,12 @@ def test_llm_complete(monkeypatch: MonkeyPatch, test_model_id: str) -> None: oci_gen_ai_client = MagicMock() llm = OCIGenAI(model_id=test_model_id, client=oci_gen_ai_client) - provider = llm.model_id.split(".")[0].lower() + model_id = llm.model_id + + if model_id is None: + raise ValueError("Model ID is required for OCI Generative AI LLM service.") + + provider = model_id.split(".")[0].lower() def mocked_response(*args): # type: ignore[no-untyped-def] response_text = "This is the completion."