diff --git a/pilot/model/adapter.py b/pilot/model/adapter.py index c9f5cb6f1..c914195d8 100644 --- a/pilot/model/adapter.py +++ b/pilot/model/adapter.py @@ -106,7 +106,7 @@ class FalconAdapater(BaseLLMAdaper): def loader(self, model_path: str, from_pretrained_kwagrs: dict): tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False) - if QLORA == True: + if QLORA: model = AutoModelForCausalLM.from_pretrained( model_path, load_in_4bit=True, #quantize