mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-08-25 18:49:23 +00:00
Fix mypy
This commit is contained in:
parent
a7b18058b5
commit
70ca241a8b
@ -113,7 +113,9 @@ class LLMComponent:
|
|||||||
)
|
)
|
||||||
case "tensorrt":
|
case "tensorrt":
|
||||||
try:
|
try:
|
||||||
from llama_index.llms.nvidia_tensorrt import LocalTensorRTLLM # type: ignore
|
from llama_index.llms.nvidia_tensorrt import ( # type: ignore
|
||||||
|
LocalTensorRTLLM,
|
||||||
|
)
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
"Nvidia TensorRTLLM dependencies not found, install with `poetry install --extras llms-nvidia-tensorrt`"
|
"Nvidia TensorRTLLM dependencies not found, install with `poetry install --extras llms-nvidia-tensorrt`"
|
||||||
|
@ -81,7 +81,9 @@ class DataSettings(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class LLMSettings(BaseModel):
|
class LLMSettings(BaseModel):
|
||||||
mode: Literal["llamacpp", "openai", "openailike", "sagemaker", "mock", "ollama", "tensorrt"]
|
mode: Literal[
|
||||||
|
"llamacpp", "openai", "openailike", "sagemaker", "mock", "ollama", "tensorrt"
|
||||||
|
]
|
||||||
max_new_tokens: int = Field(
|
max_new_tokens: int = Field(
|
||||||
256,
|
256,
|
||||||
description="The maximum number of token that the LLM is authorized to generate in one completion.",
|
description="The maximum number of token that the LLM is authorized to generate in one completion.",
|
||||||
|
Loading…
Reference in New Issue
Block a user