ColossalAI/colossalai/tensor/d_tensor/__init__.py
Hongxin Liu 641b1ee71a
[devops] remove post commit ci (#5566)
* [devops] remove post commit ci

* [misc] run pre-commit on all files

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
2024-04-08 15:09:40 +08:00

48 lines
1.2 KiB
Python

from .api import (
compute_global_numel,
customized_distributed_tensor_to_param,
distribute_tensor,
distribute_tensor_with_customization,
get_device_mesh,
get_global_shape,
get_layout,
get_sharding_spec,
init_as_dtensor,
init_tensor_as_customization_distributed,
is_customized_distributed_tensor,
is_distributed_tensor,
is_sharded,
redistribute,
shard_colwise,
shard_rowwise,
sharded_tensor_to_param,
to_global,
to_global_for_customized_distributed_tensor,
)
from .layout import Layout
from .sharding_spec import ShardingSpec
__all__ = [
"is_distributed_tensor",
"distribute_tensor",
"init_as_dtensor",
"to_global",
"is_sharded",
"shard_rowwise",
"shard_colwise",
"sharded_tensor_to_param",
"compute_global_numel",
"get_sharding_spec",
"get_global_shape",
"get_device_mesh",
"redistribute",
"get_layout",
"is_customized_distributed_tensor",
"distribute_tensor_with_customization",
"init_tensor_as_customization_distributed",
"to_global_for_customized_distributed_tensor",
"customized_distributed_tensor_to_param",
"Layout",
"ShardingSpec",
]