Fixed erroneous file to the correct file

This commit is contained in:
Wesley Stewart 2024-03-12 19:36:20 +00:00
parent 4ebf8e8814
commit 6bb4d18ade

View File

@ -7,11 +7,9 @@ from llama_index.core.chat_engine.types import (
) )
from llama_index.core.indices import VectorStoreIndex from llama_index.core.indices import VectorStoreIndex
from llama_index.core.indices.postprocessor import MetadataReplacementPostProcessor from llama_index.core.indices.postprocessor import MetadataReplacementPostProcessor
from llama_index.core.postprocessor import SimilarityPostprocessor
from llama_index.core.postprocessor import KeywordNodePostprocessor
from llama_index.core.llms import ChatMessage, MessageRole from llama_index.core.llms import ChatMessage, MessageRole
from llama_index.core.postprocessor import (
KeywordNodePostprocessor,
SimilarityPostprocessor,
)
from llama_index.core.storage import StorageContext from llama_index.core.storage import StorageContext
from llama_index.core.types import TokenGen from llama_index.core.types import TokenGen
from pydantic import BaseModel from pydantic import BaseModel
@ -26,7 +24,6 @@ from private_gpt.open_ai.extensions.context_filter import ContextFilter
from private_gpt.server.chunks.chunks_service import Chunk from private_gpt.server.chunks.chunks_service import Chunk
from private_gpt.settings.settings import settings from private_gpt.settings.settings import settings
class Completion(BaseModel): class Completion(BaseModel):
response: str response: str
sources: list[Chunk] | None = None sources: list[Chunk] | None = None
@ -107,45 +104,31 @@ class ChatService:
vector_index_retriever = self.vector_store_component.get_retriever( vector_index_retriever = self.vector_store_component.get_retriever(
index=self.index, context_filter=context_filter index=self.index, context_filter=context_filter
) )
# Initialize node_postporcessors
node_postprocessors_tmp: list[
MetadataReplacementPostProcessor
| SimilarityPostprocessor
| KeywordNodePostprocessor
]
node_postprocessors_tmp = [
MetadataReplacementPostProcessor(target_metadata_key="window"),
]
# If similarity value is set, use it. If not, dont add it
if settings().llm.similarity_value is not None:
node_postprocessors_tmp.append(
SimilarityPostprocessor(
similarity_cutoff=settings().llm.similarity_value
)
)
# If similarity value is set, use it. If not, dont add it #To use the keyword search, you must install spacy:
# if settings().llm.keywords_include is not empty or # pip install spacy
# settings().llm.keywords_exclude is not empty
if settings().llm.keywords_include or settings().llm.keywords_exclude: if settings().llm.keywords_include or settings().llm.keywords_exclude:
node_postprocessors_tmp.append( return ContextChatEngine.from_defaults(
KeywordNodePostprocessor( system_prompt=system_prompt,
required_keywords=settings().llm.keywords_include, retriever=vector_index_retriever,
exclude_keywords=settings().llm.keywords_exclude, llm=self.llm_component.llm, # Takes no effect at the moment
) node_postprocessors=[
MetadataReplacementPostProcessor(target_metadata_key="window"),
SimilarityPostprocessor(similarity_cutoff=settings().llm.similarity_value),
KeywordNodePostprocessor(required_keywords=settings().llm.keywords_include, exclude_keywords=settings().llm.keywords_exclude),
],
)
else:
return ContextChatEngine.from_defaults(
system_prompt=system_prompt,
retriever=vector_index_retriever,
llm=self.llm_component.llm, # Takes no effect at the moment
node_postprocessors=[
MetadataReplacementPostProcessor(target_metadata_key="window"),
SimilarityPostprocessor(similarity_cutoff=settings().llm.similarity_value),
],
) )
return ContextChatEngine.from_defaults(
system_prompt=system_prompt,
retriever=vector_index_retriever,
llm=self.llm_component.llm, # Takes no effect at the moment
node_postprocessors=[
MetadataReplacementPostProcessor(target_metadata_key="window"),
SimilarityPostprocessor(
similarity_cutoff=settings().llm.similarity_value
),
],
)
else: else:
return SimpleChatEngine.from_defaults( return SimpleChatEngine.from_defaults(
system_prompt=system_prompt, system_prompt=system_prompt,