mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-12-24 04:52:45 +00:00
[misc] add verbose arg for zero and op builder (#3552)
* [misc] add print verbose * [gemini] add print verbose * [zero] add print verbose for low level * [misc] add print verbose for op builder
This commit is contained in:
@@ -7,7 +7,7 @@ import os
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from .utils import check_cuda_availability, check_system_pytorch_cuda_match, print_rank_0
|
||||
|
||||
@@ -138,7 +138,7 @@ class Builder(ABC):
|
||||
# make sure system CUDA and pytorch CUDA match, an error will raised inside the function if not
|
||||
check_system_pytorch_cuda_match(CUDA_HOME)
|
||||
|
||||
def load(self, verbose=True):
|
||||
def load(self, verbose: Optional[bool] = None):
|
||||
"""
|
||||
load the kernel during runtime. If the kernel is not built during pip install, it will build the kernel.
|
||||
If the kernel is built during runtime, it will be stored in `~/.cache/colossalai/torch_extensions/`. If the
|
||||
@@ -149,6 +149,8 @@ class Builder(ABC):
|
||||
Args:
|
||||
verbose (bool, optional): show detailed info. Defaults to True.
|
||||
"""
|
||||
if verbose is None:
|
||||
verbose = os.environ.get('CAI_KERNEL_VERBOSE', '0') == '1'
|
||||
# if the kernel has be compiled and cached, we directly use it
|
||||
if self.cached_op_module is not None:
|
||||
return self.cached_op_module
|
||||
|
||||
Reference in New Issue
Block a user