mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-08-24 18:19:11 +00:00
refines get_model_label()
refines get_model_label() removes reliance on PGPT_PROFILES; Instead, uses settings().llm.mode. Possible options: "local", "openai", "openailike", "sagemaker", "mock", "ollama".
This commit is contained in:
parent
5620248aae
commit
133c1da13a
@ -1,7 +1,6 @@
|
|||||||
"""This file should be imported only and only if you want to run the UI locally."""
|
"""This file should be imported if and only if you want to run the UI locally."""
|
||||||
import itertools
|
import itertools
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@ -410,15 +409,40 @@ class PrivateGptUi:
|
|||||||
inputs=system_prompt_input,
|
inputs=system_prompt_input,
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_model_label() -> str | None:
|
def get_model_label() -> str:
|
||||||
# Determine the model label based on PGPT_PROFILES env variable.
|
"""Get model label from llm mode setting YAML.
|
||||||
pgpt_profiles = os.environ.get("PGPT_PROFILES")
|
|
||||||
if pgpt_profiles == "ollama":
|
Raises:
|
||||||
return settings().ollama.model
|
ValueError: If an invalid 'llm_mode' is encountered.
|
||||||
elif pgpt_profiles == "vllm":
|
|
||||||
return settings().openai.model
|
Returns:
|
||||||
else:
|
str: The corresponding model label.
|
||||||
return None
|
"""
|
||||||
|
# Get model label from llm mode setting YAML
|
||||||
|
# Labels: local, openai, openailike, sagemaker, mock, ollama
|
||||||
|
config_settings = settings()
|
||||||
|
if config_settings is None:
|
||||||
|
raise ValueError("Settings are not configured.")
|
||||||
|
|
||||||
|
# Get llm_mode from settings
|
||||||
|
llm_mode = config_settings.llm.mode
|
||||||
|
|
||||||
|
# Mapping of 'llm_mode' to corresponding model labels
|
||||||
|
model_mapping = {
|
||||||
|
"local": config_settings.local.llm_hf_model_file,
|
||||||
|
"openai": config_settings.openai.model,
|
||||||
|
"openailike": config_settings.openai.model,
|
||||||
|
"sagemaker": config_settings.sagemaker.llm_endpoint_name,
|
||||||
|
"mock": llm_mode,
|
||||||
|
"ollama": config_settings.ollama.model,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
return model_mapping[llm_mode]
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid 'llm mode': {llm_mode}"
|
||||||
|
) from None
|
||||||
|
|
||||||
with gr.Column(scale=7, elem_id="col"):
|
with gr.Column(scale=7, elem_id="col"):
|
||||||
# Determine the model label based on the value of PGPT_PROFILES
|
# Determine the model label based on the value of PGPT_PROFILES
|
||||||
|
Loading…
Reference in New Issue
Block a user