mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 12:30:42 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -21,8 +21,23 @@ from .layout import Layout
|
||||
from .sharding_spec import ShardingSpec
|
||||
|
||||
__all__ = [
|
||||
'is_distributed_tensor', 'distribute_tensor', 'to_global', 'is_sharded', 'shard_rowwise', 'shard_colwise',
|
||||
'sharded_tensor_to_param', 'compute_global_numel', 'get_sharding_spec', 'get_global_shape', 'get_device_mesh',
|
||||
'redistribute', 'get_layout', 'is_customized_distributed_tensor', 'distribute_tensor_with_customization',
|
||||
'to_global_for_customized_distributed_tensor', 'customized_distributed_tensor_to_param', 'Layout', 'ShardingSpec'
|
||||
"is_distributed_tensor",
|
||||
"distribute_tensor",
|
||||
"to_global",
|
||||
"is_sharded",
|
||||
"shard_rowwise",
|
||||
"shard_colwise",
|
||||
"sharded_tensor_to_param",
|
||||
"compute_global_numel",
|
||||
"get_sharding_spec",
|
||||
"get_global_shape",
|
||||
"get_device_mesh",
|
||||
"redistribute",
|
||||
"get_layout",
|
||||
"is_customized_distributed_tensor",
|
||||
"distribute_tensor_with_customization",
|
||||
"to_global_for_customized_distributed_tensor",
|
||||
"customized_distributed_tensor_to_param",
|
||||
"Layout",
|
||||
"ShardingSpec",
|
||||
]
|
||||
|
Reference in New Issue
Block a user