diff --git a/private_gpt/server/chat/chat_service.py b/private_gpt/server/chat/chat_service.py index 77afe114..da6f494e 100644 --- a/private_gpt/server/chat/chat_service.py +++ b/private_gpt/server/chat/chat_service.py @@ -21,8 +21,6 @@ from private_gpt.open_ai.extensions.context_filter import ContextFilter from private_gpt.server.chunks.chunks_service import Chunk from private_gpt.settings.settings import settings -DEFAULT_CONTEXT_TEMPLATE = settings().rag.default_context_template - class Completion(BaseModel): response: str @@ -100,9 +98,11 @@ class ChatService: system_prompt: str | None = None, use_context: bool = False, context_filter: ContextFilter | None = None, - context_template: str | None = DEFAULT_CONTEXT_TEMPLATE, + context_template: str | None = None, ) -> BaseChatEngine: if use_context: + if context_template is None: + context_template = settings().rag.default_context_template vector_index_retriever = self.vector_store_component.get_retriever( index=self.index, context_filter=context_filter ) diff --git a/settings.yaml b/settings.yaml index 52ce505d..07a4b302 100644 --- a/settings.yaml +++ b/settings.yaml @@ -61,4 +61,8 @@ openai: model: gpt-3.5-turbo rag: - default_context_template: "Context information is below.\n--------------------\n{context_str}\n--------------------\n" + default_context_template: | + Context information is below. + -------------------- + {context_str} + --------------------