Update ui.py

Related to Issue: Add Model Information to ChatInterface label in private_gpt/ui/ui.py #1647

Introduces a new function `get_model_label` that dynamically determines the model label based on the PGPT_PROFILES environment variable. The function returns the model label if it's set to either "ollama" or "vllm", or None otherwise.

The get_model_label function is then used to set the label text for the chatbot interface, which includes the LLM mode and the model label (if available). This change allows the UI to display the correct model label based on the user's configuration.

Please review the changes and let me know if you have any feedback or suggestions. Thank you!
This commit is contained in:
Ingrid Stevens 2024-02-24 15:15:12 +01:00 committed by GitHub
parent 12f3a39e8a
commit 5620248aae
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,6 +1,7 @@
"""This file should be imported only and only if you want to run the UI locally."""
import itertools
import logging
import os
import time
from collections.abc import Iterable
from pathlib import Path
@ -409,11 +410,30 @@ class PrivateGptUi:
inputs=system_prompt_input,
)
def get_model_label() -> str | None:
# Determine the model label based on PGPT_PROFILES env variable.
pgpt_profiles = os.environ.get("PGPT_PROFILES")
if pgpt_profiles == "ollama":
return settings().ollama.model
elif pgpt_profiles == "vllm":
return settings().openai.model
else:
return None
with gr.Column(scale=7, elem_id="col"):
# Determine the model label based on the value of PGPT_PROFILES
model_label = get_model_label()
if model_label is not None:
label_text = (
f"LLM: {settings().llm.mode} | Model: {model_label}"
)
else:
label_text = f"LLM: {settings().llm.mode}"
_ = gr.ChatInterface(
self._chat,
chatbot=gr.Chatbot(
label=f"LLM: {settings().llm.mode}",
label=label_text,
show_copy_button=True,
elem_id="chatbot",
render=False,