mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-10-28 20:30:42 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
131
setup.py
131
setup.py
@@ -15,8 +15,8 @@ from op_builder.utils import (
|
||||
)
|
||||
|
||||
try:
|
||||
import torch
|
||||
from torch.utils.cpp_extension import CUDA_HOME, BuildExtension
|
||||
|
||||
TORCH_AVAILABLE = True
|
||||
except ImportError:
|
||||
TORCH_AVAILABLE = False
|
||||
@@ -26,14 +26,14 @@ except ImportError:
|
||||
MIN_PYTORCH_VERSION_MAJOR = 1
|
||||
MIN_PYTORCH_VERSION_MINOR = 10
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
BUILD_CUDA_EXT = int(os.environ.get('CUDA_EXT', '0')) == 1
|
||||
IS_NIGHTLY = int(os.environ.get('NIGHTLY', '0')) == 1
|
||||
BUILD_CUDA_EXT = int(os.environ.get("CUDA_EXT", "0")) == 1
|
||||
IS_NIGHTLY = int(os.environ.get("NIGHTLY", "0")) == 1
|
||||
|
||||
# a variable to store the op builder
|
||||
ext_modules = []
|
||||
|
||||
# we do not support windows currently
|
||||
if sys.platform == 'win32':
|
||||
if sys.platform == "win32":
|
||||
raise RuntimeError("Windows is not supported yet. Please try again within the Windows Subsystem for Linux (WSL).")
|
||||
|
||||
|
||||
@@ -64,7 +64,7 @@ def fetch_requirements(path) -> List[str]:
|
||||
Returns:
|
||||
The lines in the requirements file.
|
||||
"""
|
||||
with open(path, 'r') as fd:
|
||||
with open(path, "r") as fd:
|
||||
return [r.strip() for r in fd.readlines()]
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ def fetch_readme() -> str:
|
||||
Returns:
|
||||
The lines in the README file.
|
||||
"""
|
||||
with open('README.md', encoding='utf-8') as f:
|
||||
with open("README.md", encoding="utf-8") as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
@@ -89,21 +89,21 @@ def get_version() -> str:
|
||||
|
||||
setup_file_path = os.path.abspath(__file__)
|
||||
project_path = os.path.dirname(setup_file_path)
|
||||
version_txt_path = os.path.join(project_path, 'version.txt')
|
||||
version_py_path = os.path.join(project_path, 'colossalai/version.py')
|
||||
version_txt_path = os.path.join(project_path, "version.txt")
|
||||
version_py_path = os.path.join(project_path, "colossalai/version.py")
|
||||
|
||||
with open(version_txt_path) as f:
|
||||
version = f.read().strip()
|
||||
|
||||
# write version into version.py
|
||||
with open(version_py_path, 'w') as f:
|
||||
with open(version_py_path, "w") as f:
|
||||
f.write(f"__version__ = '{version}'\n")
|
||||
|
||||
# look for pytorch and cuda version
|
||||
if BUILD_CUDA_EXT:
|
||||
torch_major, torch_minor, _ = get_pytorch_version()
|
||||
torch_version = f'{torch_major}.{torch_minor}'
|
||||
cuda_version = '.'.join(get_cuda_bare_metal_version(CUDA_HOME))
|
||||
torch_version = f"{torch_major}.{torch_minor}"
|
||||
cuda_version = ".".join(get_cuda_bare_metal_version(CUDA_HOME))
|
||||
else:
|
||||
torch_version = None
|
||||
cuda_version = None
|
||||
@@ -112,12 +112,12 @@ def get_version() -> str:
|
||||
if torch_version:
|
||||
f.write(f'torch = "{torch_version}"\n')
|
||||
else:
|
||||
f.write('torch = None\n')
|
||||
f.write("torch = None\n")
|
||||
|
||||
if cuda_version:
|
||||
f.write(f'cuda = "{cuda_version}"\n')
|
||||
else:
|
||||
f.write('cuda = None\n')
|
||||
f.write("cuda = None\n")
|
||||
|
||||
return version
|
||||
|
||||
@@ -127,6 +127,7 @@ if BUILD_CUDA_EXT:
|
||||
set_cuda_arch_list(CUDA_HOME)
|
||||
|
||||
from op_builder import ALL_OPS
|
||||
|
||||
op_names = []
|
||||
|
||||
# load all builders
|
||||
@@ -135,7 +136,7 @@ if BUILD_CUDA_EXT:
|
||||
ext_modules.append(builder_cls().builder())
|
||||
|
||||
# show log
|
||||
op_name_list = ', '.join(op_names)
|
||||
op_name_list = ", ".join(op_names)
|
||||
print(f"[extension] loaded builders for {op_name_list}")
|
||||
|
||||
# always put not nightly branch as the if branch
|
||||
@@ -143,56 +144,62 @@ if BUILD_CUDA_EXT:
|
||||
# and it will mess up with the dependency graph insights
|
||||
if not IS_NIGHTLY:
|
||||
version = get_version()
|
||||
package_name = 'colossalai'
|
||||
package_name = "colossalai"
|
||||
else:
|
||||
# use date as the nightly version
|
||||
version = datetime.today().strftime('%Y.%m.%d')
|
||||
package_name = 'colossalai-nightly'
|
||||
version = datetime.today().strftime("%Y.%m.%d")
|
||||
package_name = "colossalai-nightly"
|
||||
|
||||
setup(name=package_name,
|
||||
version=version,
|
||||
packages=find_packages(exclude=(
|
||||
'op_builder',
|
||||
'benchmark',
|
||||
'docker',
|
||||
'tests',
|
||||
'docs',
|
||||
'examples',
|
||||
'tests',
|
||||
'scripts',
|
||||
'requirements',
|
||||
'*.egg-info',
|
||||
)),
|
||||
description='An integrated large-scale model training system with efficient parallelization techniques',
|
||||
long_description=fetch_readme(),
|
||||
long_description_content_type='text/markdown',
|
||||
license='Apache Software License 2.0',
|
||||
url='https://www.colossalai.org',
|
||||
project_urls={
|
||||
'Forum': 'https://github.com/hpcaitech/ColossalAI/discussions',
|
||||
'Bug Tracker': 'https://github.com/hpcaitech/ColossalAI/issues',
|
||||
'Examples': 'https://github.com/hpcaitech/ColossalAI-Examples',
|
||||
'Documentation': 'http://colossalai.readthedocs.io',
|
||||
'Github': 'https://github.com/hpcaitech/ColossalAI',
|
||||
},
|
||||
ext_modules=ext_modules,
|
||||
cmdclass={'build_ext': BuildExtension} if ext_modules else {},
|
||||
install_requires=fetch_requirements('requirements/requirements.txt'),
|
||||
entry_points='''
|
||||
setup(
|
||||
name=package_name,
|
||||
version=version,
|
||||
packages=find_packages(
|
||||
exclude=(
|
||||
"op_builder",
|
||||
"benchmark",
|
||||
"docker",
|
||||
"tests",
|
||||
"docs",
|
||||
"examples",
|
||||
"tests",
|
||||
"scripts",
|
||||
"requirements",
|
||||
"*.egg-info",
|
||||
)
|
||||
),
|
||||
description="An integrated large-scale model training system with efficient parallelization techniques",
|
||||
long_description=fetch_readme(),
|
||||
long_description_content_type="text/markdown",
|
||||
license="Apache Software License 2.0",
|
||||
url="https://www.colossalai.org",
|
||||
project_urls={
|
||||
"Forum": "https://github.com/hpcaitech/ColossalAI/discussions",
|
||||
"Bug Tracker": "https://github.com/hpcaitech/ColossalAI/issues",
|
||||
"Examples": "https://github.com/hpcaitech/ColossalAI-Examples",
|
||||
"Documentation": "http://colossalai.readthedocs.io",
|
||||
"Github": "https://github.com/hpcaitech/ColossalAI",
|
||||
},
|
||||
ext_modules=ext_modules,
|
||||
cmdclass={"build_ext": BuildExtension} if ext_modules else {},
|
||||
install_requires=fetch_requirements("requirements/requirements.txt"),
|
||||
entry_points="""
|
||||
[console_scripts]
|
||||
colossalai=colossalai.cli:cli
|
||||
''',
|
||||
python_requires='>=3.6',
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Environment :: GPU :: NVIDIA CUDA',
|
||||
'Topic :: Scientific/Engineering :: Artificial Intelligence',
|
||||
'Topic :: System :: Distributed Computing',
|
||||
],
|
||||
package_data={
|
||||
'colossalai': [
|
||||
'_C/*.pyi', 'kernel/cuda_native/csrc/*', 'kernel/cuda_native/csrc/kernel/*',
|
||||
'kernel/cuda_native/csrc/kernels/include/*'
|
||||
]
|
||||
})
|
||||
""",
|
||||
python_requires=">=3.6",
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Environment :: GPU :: NVIDIA CUDA",
|
||||
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
||||
"Topic :: System :: Distributed Computing",
|
||||
],
|
||||
package_data={
|
||||
"colossalai": [
|
||||
"_C/*.pyi",
|
||||
"kernel/cuda_native/csrc/*",
|
||||
"kernel/cuda_native/csrc/kernel/*",
|
||||
"kernel/cuda_native/csrc/kernels/include/*",
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user