fix: mypy/black

This commit is contained in:
Javier Martinez 2024-09-26 09:40:24 +02:00
parent 0cd016a732
commit 4b5c3362f1
No known key found for this signature in database
4 changed files with 20 additions and 18 deletions

View File

@ -286,8 +286,9 @@ class ChatMLPromptStyle(AbstractPromptStyle):
def get_prompt_style( def get_prompt_style(
prompt_style: Literal["default", "llama2", "llama3", "tag", "mistral", "chatml"] prompt_style: (
| None Literal["default", "llama2", "llama3", "tag", "mistral", "chatml"] | None
)
) -> AbstractPromptStyle: ) -> AbstractPromptStyle:
"""Get the prompt style to use from the given string. """Get the prompt style to use from the given string.

View File

@ -90,9 +90,9 @@ class SummarizeService:
# Add context documents to summarize # Add context documents to summarize
if use_context: if use_context:
# 1. Recover all ref docs # 1. Recover all ref docs
ref_docs: dict[ ref_docs: dict[str, RefDocInfo] | None = (
str, RefDocInfo self.storage_context.docstore.get_all_ref_doc_info()
] | None = self.storage_context.docstore.get_all_ref_doc_info() )
if ref_docs is None: if ref_docs is None:
raise ValueError("No documents have been ingested yet.") raise ValueError("No documents have been ingested yet.")

View File

@ -136,19 +136,19 @@ class LLMSettings(BaseModel):
0.1, 0.1,
description="The temperature of the model. Increasing the temperature will make the model answer more creatively. A value of 0.1 would be more factual.", description="The temperature of the model. Increasing the temperature will make the model answer more creatively. A value of 0.1 would be more factual.",
) )
prompt_style: Literal[ prompt_style: Literal["default", "llama2", "llama3", "tag", "mistral", "chatml"] = (
"default", "llama2", "llama3", "tag", "mistral", "chatml" Field(
] = Field( "llama2",
"llama2", description=(
description=( "The prompt style to use for the chat engine. "
"The prompt style to use for the chat engine. " "If `default` - use the default prompt style from the llama_index. It should look like `role: message`.\n"
"If `default` - use the default prompt style from the llama_index. It should look like `role: message`.\n" "If `llama2` - use the llama2 prompt style from the llama_index. Based on `<s>`, `[INST]` and `<<SYS>>`.\n"
"If `llama2` - use the llama2 prompt style from the llama_index. Based on `<s>`, `[INST]` and `<<SYS>>`.\n" "If `llama3` - use the llama3 prompt style from the llama_index."
"If `llama3` - use the llama3 prompt style from the llama_index." "If `tag` - use the `tag` prompt style. It should look like `<|role|>: message`. \n"
"If `tag` - use the `tag` prompt style. It should look like `<|role|>: message`. \n" "If `mistral` - use the `mistral prompt style. It shoudl look like <s>[INST] {System Prompt} [/INST]</s>[INST] { UserInstructions } [/INST]"
"If `mistral` - use the `mistral prompt style. It shoudl look like <s>[INST] {System Prompt} [/INST]</s>[INST] { UserInstructions } [/INST]" "`llama2` is the historic behaviour. `default` might work better with your custom models."
"`llama2` is the historic behaviour. `default` might work better with your custom models." ),
), )
) )

View File

@ -1,4 +1,5 @@
"""This file should be imported if and only if you want to run the UI locally.""" """This file should be imported if and only if you want to run the UI locally."""
import base64 import base64
import logging import logging
import time import time