mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-04 04:07:54 +00:00
lint
This commit is contained in:
parent
d6e2af40eb
commit
d9ce5f4464
@ -28,7 +28,7 @@ from typing_extensions import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import openai
|
||||
from openai.types.beta.threads import ThreadMessage
|
||||
from openai.types.beta.threads import ThreadMessage # type: ignore[attr-defined]
|
||||
from openai.types.beta.threads.required_action_function_tool_call import (
|
||||
RequiredActionFunctionToolCall,
|
||||
)
|
||||
@ -590,7 +590,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
|
||||
isinstance(content, openai.types.beta.threads.TextContentBlock)
|
||||
if version_gte_1_14
|
||||
else isinstance(
|
||||
content, openai.types.beta.threads.MessageContentText
|
||||
content,
|
||||
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
|
||||
)
|
||||
)
|
||||
for content in answer
|
||||
@ -743,7 +744,8 @@ class OpenAIAssistantRunnable(RunnableSerializable[Dict, OutputType]):
|
||||
isinstance(content, openai.types.beta.threads.TextContentBlock)
|
||||
if version_gte_1_14
|
||||
else isinstance(
|
||||
content, openai.types.beta.threads.MessageContentText
|
||||
content,
|
||||
openai.types.beta.threads.MessageContentText, # type: ignore[attr-defined]
|
||||
)
|
||||
)
|
||||
for content in answer
|
||||
|
@ -236,7 +236,9 @@ class FlareChain(Chain):
|
||||
"Please install langchain-openai."
|
||||
"pip install langchain-openai"
|
||||
)
|
||||
llm = ChatOpenAI(max_tokens=max_generation_len, logprobs=True, temperature=0)
|
||||
llm = ChatOpenAI(
|
||||
max_completion_tokens=max_generation_len, logprobs=True, temperature=0
|
||||
)
|
||||
response_chain = PROMPT | llm
|
||||
question_gen_chain = QUESTION_GENERATOR_PROMPT | llm | StrOutputParser()
|
||||
return cls(
|
||||
|
@ -65,7 +65,7 @@ class OpenAIModerationChain(Chain):
|
||||
except ValueError:
|
||||
values["openai_pre_1_0"] = True
|
||||
if values["openai_pre_1_0"]:
|
||||
values["client"] = openai.Moderation
|
||||
values["client"] = openai.Moderation # type: ignore[attr-defined]
|
||||
else:
|
||||
values["client"] = openai.OpenAI(api_key=openai_api_key)
|
||||
values["async_client"] = openai.AsyncOpenAI(api_key=openai_api_key)
|
||||
|
@ -30,7 +30,9 @@ def _embedding_factory() -> Embeddings:
|
||||
from langchain_openai import OpenAIEmbeddings
|
||||
except ImportError:
|
||||
try:
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
|
||||
OpenAIEmbeddings,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import OpenAIEmbeddings. Please install the "
|
||||
@ -89,7 +91,9 @@ class _EmbeddingDistanceChainMixin(Chain):
|
||||
pass
|
||||
|
||||
try:
|
||||
from langchain_community.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain_community.embeddings.openai import ( # type: ignore[no-redef]
|
||||
OpenAIEmbeddings,
|
||||
)
|
||||
|
||||
types_.append(OpenAIEmbeddings)
|
||||
except ImportError:
|
||||
|
@ -135,7 +135,9 @@ def load_evaluator(
|
||||
from langchain_openai import ChatOpenAI
|
||||
except ImportError:
|
||||
try:
|
||||
from langchain_community.chat_models.openai import ChatOpenAI
|
||||
from langchain_community.chat_models.openai import ( # type: ignore[no-redef]
|
||||
ChatOpenAI,
|
||||
)
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Could not import langchain_openai or fallback onto "
|
||||
|
Loading…
Reference in New Issue
Block a user