Changed similarity value to work with RAG Settings instead of LLM Settings

This commit is contained in:
Wesley Stewart 2024-03-15 12:00:50 +00:00
parent 89a8e2795e
commit b78a68f40f
3 changed files with 11 additions and 2 deletions

View File

@ -118,7 +118,7 @@ class ChatService:
node_postprocessors=[
MetadataReplacementPostProcessor(target_metadata_key="window"),
SimilarityPostprocessor(
similarity_cutoff=settings.llm.similarity_value
similarity_cutoff=settings.rag.similarity_value
),
],
)

View File

@ -107,6 +107,12 @@ class LLMSettings(BaseModel):
description="If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.",
)
class RagSettings(BaseModel):
similarity_value: float = Field(
None,
description="If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.",
)
class VectorstoreSettings(BaseModel):
database: Literal["chroma", "qdrant", "pgvector"]
@ -346,6 +352,7 @@ class Settings(BaseModel):
data: DataSettings
ui: UISettings
llm: LLMSettings
rag: RagSettings
embedding: EmbeddingSettings
llamacpp: LlamaCPPSettings
huggingface: HuggingFaceSettings

View File

@ -40,7 +40,9 @@ llm:
max_new_tokens: 512
context_window: 3900
temperature: 0.1 # The temperature of the model. Increasing the temperature will make the model answer more creatively. A value of 0.1 would be more factual. (Default: 0.1)
#similarity_value: 0.4 #If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.
rag:
#similarity_value: 0.4 #Optional - If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.
llamacpp:
prompt_style: "mistral"