fix: set num_gpus reference for mps + cpu

This commit is contained in:
csunny 2023-08-03 16:54:58 +08:00
parent d67a6a642a
commit 743863d52b

View File

@ -117,8 +117,8 @@ def huggingface_loader(llm_adapter: BaseLLMAdaper, model_params: ModelParams):
max_memory = None max_memory = None
# if device is cpu or mps. gpu need to be zero # if device is cpu or mps. gpu need to be zero
num_gpus = 0 num_gpus = 0
if device == "cpu": if device == "cpu":
kwargs = {"torch_dtype": torch.float32} kwargs = {"torch_dtype": torch.float32}
elif device == "cuda": elif device == "cuda":