mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-07-13 15:14:09 +00:00
Changed similarity value to work with RAG Settings instead of LLM Settings
This commit is contained in:
parent
89a8e2795e
commit
b78a68f40f
@ -118,7 +118,7 @@ class ChatService:
|
||||
node_postprocessors=[
|
||||
MetadataReplacementPostProcessor(target_metadata_key="window"),
|
||||
SimilarityPostprocessor(
|
||||
similarity_cutoff=settings.llm.similarity_value
|
||||
similarity_cutoff=settings.rag.similarity_value
|
||||
),
|
||||
],
|
||||
)
|
||||
|
@ -107,6 +107,12 @@ class LLMSettings(BaseModel):
|
||||
description="If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.",
|
||||
)
|
||||
|
||||
class RagSettings(BaseModel):
|
||||
similarity_value: float = Field(
|
||||
None,
|
||||
description="If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.",
|
||||
)
|
||||
|
||||
|
||||
class VectorstoreSettings(BaseModel):
|
||||
database: Literal["chroma", "qdrant", "pgvector"]
|
||||
@ -346,6 +352,7 @@ class Settings(BaseModel):
|
||||
data: DataSettings
|
||||
ui: UISettings
|
||||
llm: LLMSettings
|
||||
rag: RagSettings
|
||||
embedding: EmbeddingSettings
|
||||
llamacpp: LlamaCPPSettings
|
||||
huggingface: HuggingFaceSettings
|
||||
|
@ -40,7 +40,9 @@ llm:
|
||||
max_new_tokens: 512
|
||||
context_window: 3900
|
||||
temperature: 0.1 # The temperature of the model. Increasing the temperature will make the model answer more creatively. A value of 0.1 would be more factual. (Default: 0.1)
|
||||
#similarity_value: 0.4 #If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.
|
||||
|
||||
rag:
|
||||
#similarity_value: 0.4 #Optional - If set, any documents retrieved from the RAG must meet a certain score. Acceptable values are between 0 and 1.
|
||||
|
||||
llamacpp:
|
||||
prompt_style: "mistral"
|
||||
|
Loading…
Reference in New Issue
Block a user