mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-09-03 16:25:55 +00:00
feat(llm): drop default_system_prompt (#1385)
As discussed on Discord, the decision has been made to remove the system prompts by default, to better segregate the API and the UI usages. A concurrent PR (#1353) is enabling the dynamic setting of a system prompt in the UI. Therefore, if UI users want to use a custom system prompt, they can specify one directly in the UI. If the API users want to use a custom prompt, they can pass it directly into their messages that they are passing to the API. In the highlight of the two use case above, it becomes clear that default system_prompt does not need to exist.
This commit is contained in:
@@ -23,10 +23,7 @@ class LLMComponent:
|
||||
case "local":
|
||||
from llama_index.llms import LlamaCPP
|
||||
|
||||
prompt_style_cls = get_prompt_style(settings.local.prompt_style)
|
||||
prompt_style = prompt_style_cls(
|
||||
default_system_prompt=settings.local.default_system_prompt
|
||||
)
|
||||
prompt_style = get_prompt_style(settings.local.prompt_style)
|
||||
|
||||
self.llm = LlamaCPP(
|
||||
model_path=str(models_path / settings.local.llm_hf_model_file),
|
||||
|
@@ -5,7 +5,6 @@ from typing import Any, Literal
|
||||
|
||||
from llama_index.llms import ChatMessage, MessageRole
|
||||
from llama_index.llms.llama_utils import (
|
||||
DEFAULT_SYSTEM_PROMPT,
|
||||
completion_to_prompt,
|
||||
messages_to_prompt,
|
||||
)
|
||||
@@ -29,7 +28,6 @@ class AbstractPromptStyle(abc.ABC):
|
||||
series of messages into a prompt.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
logger.debug("Initializing prompt_style=%s", self.__class__.__name__)
|
||||
|
||||
@@ -52,15 +50,6 @@ class AbstractPromptStyle(abc.ABC):
|
||||
return prompt
|
||||
|
||||
|
||||
class AbstractPromptStyleWithSystemPrompt(AbstractPromptStyle, abc.ABC):
|
||||
_DEFAULT_SYSTEM_PROMPT = DEFAULT_SYSTEM_PROMPT
|
||||
|
||||
def __init__(self, default_system_prompt: str | None) -> None:
|
||||
super().__init__()
|
||||
logger.debug("Got default_system_prompt='%s'", default_system_prompt)
|
||||
self.default_system_prompt = default_system_prompt
|
||||
|
||||
|
||||
class DefaultPromptStyle(AbstractPromptStyle):
|
||||
"""Default prompt style that uses the defaults from llama_utils.
|
||||
|
||||
@@ -83,7 +72,7 @@ class DefaultPromptStyle(AbstractPromptStyle):
|
||||
return ""
|
||||
|
||||
|
||||
class Llama2PromptStyle(AbstractPromptStyleWithSystemPrompt):
|
||||
class Llama2PromptStyle(AbstractPromptStyle):
|
||||
"""Simple prompt style that just uses the default llama_utils functions.
|
||||
|
||||
It transforms the sequence of messages into a prompt that should look like:
|
||||
@@ -94,18 +83,14 @@ class Llama2PromptStyle(AbstractPromptStyleWithSystemPrompt):
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, default_system_prompt: str | None = None) -> None:
|
||||
# If no system prompt is given, the default one of the implementation is used.
|
||||
super().__init__(default_system_prompt=default_system_prompt)
|
||||
|
||||
def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:
|
||||
return messages_to_prompt(messages, self.default_system_prompt)
|
||||
return messages_to_prompt(messages)
|
||||
|
||||
def _completion_to_prompt(self, completion: str) -> str:
|
||||
return completion_to_prompt(completion, self.default_system_prompt)
|
||||
return completion_to_prompt(completion)
|
||||
|
||||
|
||||
class TagPromptStyle(AbstractPromptStyleWithSystemPrompt):
|
||||
class TagPromptStyle(AbstractPromptStyle):
|
||||
"""Tag prompt style (used by Vigogne) that uses the prompt style `<|ROLE|>`.
|
||||
|
||||
It transforms the sequence of messages into a prompt that should look like:
|
||||
@@ -119,37 +104,8 @@ class TagPromptStyle(AbstractPromptStyleWithSystemPrompt):
|
||||
FIXME: should we add surrounding `<s>` and `</s>` tags, like in llama2?
|
||||
"""
|
||||
|
||||
def __init__(self, default_system_prompt: str | None = None) -> None:
|
||||
# We have to define a default system prompt here as the LLM will not
|
||||
# use the default llama_utils functions.
|
||||
default_system_prompt = default_system_prompt or self._DEFAULT_SYSTEM_PROMPT
|
||||
super().__init__(default_system_prompt)
|
||||
self.system_prompt: str = default_system_prompt
|
||||
|
||||
def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:
|
||||
messages = list(messages)
|
||||
if messages[0].role != MessageRole.SYSTEM:
|
||||
logger.info(
|
||||
"Adding system_promt='%s' to the given messages as there are none given in the session",
|
||||
self.system_prompt,
|
||||
)
|
||||
messages = [
|
||||
ChatMessage(content=self.system_prompt, role=MessageRole.SYSTEM),
|
||||
*messages,
|
||||
]
|
||||
return self._format_messages_to_prompt(messages)
|
||||
|
||||
def _completion_to_prompt(self, completion: str) -> str:
|
||||
return (
|
||||
f"<|system|>: {self.system_prompt.strip()}\n"
|
||||
f"<|user|>: {completion.strip()}\n"
|
||||
"<|assistant|>: "
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _format_messages_to_prompt(messages: list[ChatMessage]) -> str:
|
||||
"""Format message to prompt with `<|ROLE|>: MSG` style."""
|
||||
assert messages[0].role == MessageRole.SYSTEM
|
||||
prompt = ""
|
||||
for message in messages:
|
||||
role = message.role
|
||||
@@ -161,19 +117,24 @@ class TagPromptStyle(AbstractPromptStyleWithSystemPrompt):
|
||||
prompt += "<|assistant|>: "
|
||||
return prompt
|
||||
|
||||
def _completion_to_prompt(self, completion: str) -> str:
|
||||
return self._messages_to_prompt(
|
||||
[ChatMessage(content=completion, role=MessageRole.USER)]
|
||||
)
|
||||
|
||||
|
||||
def get_prompt_style(
|
||||
prompt_style: Literal["default", "llama2", "tag"] | None
|
||||
) -> type[AbstractPromptStyle]:
|
||||
) -> AbstractPromptStyle:
|
||||
"""Get the prompt style to use from the given string.
|
||||
|
||||
:param prompt_style: The prompt style to use.
|
||||
:return: The prompt style to use.
|
||||
"""
|
||||
if prompt_style is None or prompt_style == "default":
|
||||
return DefaultPromptStyle
|
||||
return DefaultPromptStyle()
|
||||
elif prompt_style == "llama2":
|
||||
return Llama2PromptStyle
|
||||
return Llama2PromptStyle()
|
||||
elif prompt_style == "tag":
|
||||
return TagPromptStyle
|
||||
return TagPromptStyle()
|
||||
raise ValueError(f"Unknown prompt_style='{prompt_style}'")
|
||||
|
Reference in New Issue
Block a user