community[patch]: Fix more issues (#26116)

This PR resolves more type checking issues and fixes some bugs.
This commit is contained in:
Eugene Yurtsev
2024-09-05 16:31:21 -04:00
committed by GitHub
parent 0cc6584889
commit e02b093d81
12 changed files with 43 additions and 27 deletions

View File

@@ -548,6 +548,9 @@ class ChatOCIGenAI(BaseChatModel, OCIGenAIBase):
chat_params = {**_model_kwargs, **kwargs, **oci_params} chat_params = {**_model_kwargs, **kwargs, **oci_params}
if not self.model_id:
raise ValueError("Model ID is required to chat")
if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX): if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):
serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id) serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id)
else: else:

View File

@@ -8,7 +8,7 @@ from langchain_core.documents import Document
from langchain_core.documents.compressor import ( from langchain_core.documents.compressor import (
BaseDocumentCompressor, BaseDocumentCompressor,
) )
from pydantic import ConfigDict, model_validator from pydantic import ConfigDict, Field, model_validator
DEFAULT_LLM_LINGUA_INSTRUCTION = ( DEFAULT_LLM_LINGUA_INSTRUCTION = (
"Given this documents, please answer the final question" "Given this documents, please answer the final question"
@@ -35,9 +35,9 @@ class LLMLinguaCompressor(BaseDocumentCompressor):
"""The target number of compressed tokens""" """The target number of compressed tokens"""
rank_method: str = "longllmlingua" rank_method: str = "longllmlingua"
"""The ranking method to use""" """The ranking method to use"""
model_config: dict = {} model_configuration: dict = Field(default_factory=dict, alias="model_config")
"""Custom configuration for the model""" """Custom configuration for the model"""
open_api_config: dict = {} open_api_config: dict = Field(default_factory=dict)
"""open_api configuration""" """open_api configuration"""
instruction: str = DEFAULT_LLM_LINGUA_INSTRUCTION instruction: str = DEFAULT_LLM_LINGUA_INSTRUCTION
"""The instruction for the LLM""" """The instruction for the LLM"""
@@ -75,6 +75,7 @@ class LLMLinguaCompressor(BaseDocumentCompressor):
model_config = ConfigDict( model_config = ConfigDict(
arbitrary_types_allowed=True, arbitrary_types_allowed=True,
extra="forbid", extra="forbid",
populate_by_name=True,
) )
@staticmethod @staticmethod

View File

@@ -1,7 +1,7 @@
"""Loads data from OneNote Notebooks""" """Loads data from OneNote Notebooks"""
from pathlib import Path from pathlib import Path
from typing import Dict, Iterator, List, Optional from typing import Any, Dict, Iterator, List, Optional
import requests import requests
from langchain_core.documents import Document from langchain_core.documents import Document
@@ -10,6 +10,7 @@ from pydantic import (
Field, Field,
FilePath, FilePath,
SecretStr, SecretStr,
model_validator,
) )
from pydantic_settings import BaseSettings, SettingsConfigDict from pydantic_settings import BaseSettings, SettingsConfigDict
@@ -17,13 +18,14 @@ from langchain_community.document_loaders.base import BaseLoader
class _OneNoteGraphSettings(BaseSettings): class _OneNoteGraphSettings(BaseSettings):
client_id: str = Field(..., alias="MS_GRAPH_CLIENT_ID") client_id: str = Field(...)
client_secret: SecretStr = Field(..., alias="MS_GRAPH_CLIENT_SECRET") client_secret: SecretStr = Field(...)
model_config = SettingsConfigDict( model_config = SettingsConfigDict(
case_sensitive=False, case_sensitive=False,
populate_by_name=True,
env_file=".env", env_file=".env",
env_prefix="", env_prefix="MS_GRAPH_",
) )
@@ -51,6 +53,14 @@ class OneNoteLoader(BaseLoader, BaseModel):
object_ids: Optional[List[str]] = None object_ids: Optional[List[str]] = None
""" The IDs of the objects to load data from.""" """ The IDs of the objects to load data from."""
@model_validator(mode="before")
@classmethod
def init(cls, values: Dict) -> Any:
"""Initialize the class."""
if "settings" in values and isinstance(values["settings"], dict):
values["settings"] = _OneNoteGraphSettings(**values["settings"])
return values
def lazy_load(self) -> Iterator[Document]: def lazy_load(self) -> Iterator[Document]:
""" """
Get pages from OneNote notebooks. Get pages from OneNote notebooks.

View File

@@ -181,6 +181,9 @@ class OCIGenAIEmbeddings(BaseModel, Embeddings):
""" """
from oci.generative_ai_inference import models from oci.generative_ai_inference import models
if not self.model_id:
raise ValueError("Model ID is required to embed documents")
if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX): if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):
serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id) serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id)
else: else:

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict, Iterator, List, Optional from typing import Any, Callable, Dict, Iterator, List, Optional
from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models import LLM from langchain_core.language_models import LLM
@@ -41,7 +41,7 @@ class ExLlamaV2(LLM):
settings: Any = None settings: Any = None
# Langchain parameters # Langchain parameters
logfunc: callable = print logfunc: Callable = print
stop_sequences: List[str] = Field("") stop_sequences: List[str] = Field("")
"""Sequences that immediately will stop the generator.""" """Sequences that immediately will stop the generator."""

View File

@@ -37,7 +37,7 @@ class BearlyInterpreterToolArguments(BaseModel):
python_code: str = Field( python_code: str = Field(
..., ...,
example="print('Hello World')", examples=["print('Hello World')"],
description=( description=(
"The pure python script to be evaluated. " "The pure python script to be evaluated. "
"The contents will be in main.py. " "The contents will be in main.py. "

View File

@@ -1,4 +1,4 @@
from typing import List, Optional from typing import Any, List, Optional
from pydantic import BaseModel from pydantic import BaseModel
@@ -15,7 +15,7 @@ class Parameter(BaseModel):
key: str key: str
title: str title: str
description: Optional[str] = None description: Optional[str] = None
type: str type: Any
validation: Optional[Validation] = None validation: Optional[Validation] = None

View File

@@ -154,8 +154,8 @@ class ConneryAction(BaseTool):
type = param.type type = param.type
dynamic_input_fields[param.key] = ( dynamic_input_fields[param.key] = (
str, type,
Field(default, title=title, description=description, type=type), Field(default, title=title, description=description),
) )
InputModel = create_model("InputSchema", **dynamic_input_fields) InputModel = create_model("InputSchema", **dynamic_input_fields)

View File

@@ -84,7 +84,7 @@ class E2BDataAnalysisToolArguments(BaseModel):
python_code: str = Field( python_code: str = Field(
..., ...,
example="print('Hello World')", examples=["print('Hello World')"],
description=( description=(
"The python script to be evaluated. " "The python script to be evaluated. "
"The contents will be in main.py. " "The contents will be in main.py. "

View File

@@ -23,7 +23,11 @@ def test_llm_chat(monkeypatch: MonkeyPatch, test_model_id: str) -> None:
oci_gen_ai_client = MagicMock() oci_gen_ai_client = MagicMock()
llm = ChatOCIGenAI(model_id=test_model_id, client=oci_gen_ai_client) llm = ChatOCIGenAI(model_id=test_model_id, client=oci_gen_ai_client)
provider = llm.model_id.split(".")[0].lower() model_id = llm.model_id
if model_id is None:
raise ValueError("Model ID is required for OCI Generative AI LLM service.")
provider = model_id.split(".")[0].lower()
def mocked_response(*args): # type: ignore[no-untyped-def] def mocked_response(*args): # type: ignore[no-untyped-def]
response_text = "Assistant chat reply." response_text = "Assistant chat reply."

View File

@@ -15,7 +15,6 @@ def test_initialization() -> None:
os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET" os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET"
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
section_name="test_section", section_name="test_section",
page_title="test_title", page_title="test_title",
@@ -40,7 +39,6 @@ def test_load(mocker: MockerFixture) -> None:
return_value=mocker.MagicMock(json=lambda: {"value": []}, links=None), return_value=mocker.MagicMock(json=lambda: {"value": []}, links=None),
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
section_name="test_section", section_name="test_section",
page_title="test_title", page_title="test_title",
@@ -57,7 +55,6 @@ def test_load(mocker: MockerFixture) -> None:
), ),
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
object_ids=["test_id"], object_ids=["test_id"],
access_token="access_token", access_token="access_token",
) )
@@ -85,7 +82,6 @@ def test_msal_import(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None:
return_value=FakeConfidentialClientApplication(), return_value=FakeConfidentialClientApplication(),
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
section_name="test_section", section_name="test_section",
page_title="test_title", page_title="test_title",
@@ -99,7 +95,6 @@ def test_url() -> None:
os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET" os.environ["MS_GRAPH_CLIENT_SECRET"] = "CLIENT_SECRET"
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
section_name="test_section", section_name="test_section",
page_title="test_title", page_title="test_title",
@@ -115,7 +110,6 @@ def test_url() -> None:
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
section_name="test_section", section_name="test_section",
access_token="access_token", access_token="access_token",
@@ -129,7 +123,6 @@ def test_url() -> None:
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
notebook_name="test_notebook", notebook_name="test_notebook",
access_token="access_token", access_token="access_token",
onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote",
@@ -141,7 +134,6 @@ def test_url() -> None:
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
section_name="test_section", section_name="test_section",
access_token="access_token", access_token="access_token",
onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote",
@@ -153,7 +145,6 @@ def test_url() -> None:
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
section_name="test_section", section_name="test_section",
page_title="test_title", page_title="test_title",
access_token="access_token", access_token="access_token",
@@ -167,7 +158,6 @@ def test_url() -> None:
) )
loader = OneNoteLoader( loader = OneNoteLoader(
settings={"client_id": "", "client_secret": ""},
page_title="test_title", page_title="test_title",
access_token="access_token", access_token="access_token",
onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote", onenote_api_base_url="https://graph.microsoft.com/v1.0/me/onenote",

View File

@@ -22,7 +22,12 @@ def test_llm_complete(monkeypatch: MonkeyPatch, test_model_id: str) -> None:
oci_gen_ai_client = MagicMock() oci_gen_ai_client = MagicMock()
llm = OCIGenAI(model_id=test_model_id, client=oci_gen_ai_client) llm = OCIGenAI(model_id=test_model_id, client=oci_gen_ai_client)
provider = llm.model_id.split(".")[0].lower() model_id = llm.model_id
if model_id is None:
raise ValueError("Model ID is required for OCI Generative AI LLM service.")
provider = model_id.split(".")[0].lower()
def mocked_response(*args): # type: ignore[no-untyped-def] def mocked_response(*args): # type: ignore[no-untyped-def]
response_text = "This is the completion." response_text = "This is the completion."