diff --git a/private_gpt/components/llm/prompt/prompt_helper.py b/private_gpt/components/llm/prompt/prompt_helper.py index b5f3fe5f..bd910175 100644 --- a/private_gpt/components/llm/prompt/prompt_helper.py +++ b/private_gpt/components/llm/prompt/prompt_helper.py @@ -1,3 +1,6 @@ +# Ignoring the mypy check in this file, given that this file is imported only if +# running in local mode (and therefore the llama-cpp-python library is installed). +# type: ignore """Helper to get your llama_index messages correctly serialized into a prompt. This set of classes and functions is used to format a series of