[autoparallel] fix spelling error (#2270)

This commit is contained in:
YuliangLiu0306
2023-01-03 16:13:00 +08:00
committed by GitHub
parent af32022f74
commit fb87322773
10 changed files with 46 additions and 46 deletions

View File

@@ -1,7 +1,7 @@
import torch
import torch.nn as nn
from colossalai.auto_parallel.tensor_shard.node_handler.getatrr_handler import GetattrHandler
from colossalai.auto_parallel.tensor_shard.node_handler.getattr_handler import GetattrHandler
from colossalai.auto_parallel.tensor_shard.sharding_strategy import OperationData, OperationDataType, StrategiesVector
from colossalai.device.device_mesh import DeviceMesh
from colossalai.fx import ColoGraphModule, ColoTracer

View File

@@ -7,7 +7,7 @@ import torch.nn as nn
from colossalai.auto_parallel.tensor_shard.node_handler.getitem_handler import GetItemHandler
from colossalai.auto_parallel.tensor_shard.node_handler.linear_handler import LinearFunctionHandler
from colossalai.auto_parallel.tensor_shard.node_handler.placeholder_handler import PlacehodlerHandler
from colossalai.auto_parallel.tensor_shard.node_handler.placeholder_handler import PlaceholderHandler
from colossalai.auto_parallel.tensor_shard.node_handler.reshape_handler import ReshapeHandler
from colossalai.auto_parallel.tensor_shard.sharding_strategy import OperationData, OperationDataType, StrategiesVector
from colossalai.device.device_mesh import DeviceMesh
@@ -145,7 +145,7 @@ def test_getitem_from_tuple_handler():
split_strategies_vector = StrategiesVector(split_node)
# build handler
input_handler = PlacehodlerHandler(
input_handler = PlaceholderHandler(
node=input_node,
device_mesh=device_mesh,
strategies_vector=input_strategies_vector,

View File

@@ -1,7 +1,7 @@
import torch
import torch.nn as nn
from colossalai.auto_parallel.tensor_shard.node_handler.output_handler import OuputHandler
from colossalai.auto_parallel.tensor_shard.node_handler.output_handler import OutputHandler
from colossalai.auto_parallel.tensor_shard.sharding_strategy import OperationData, OperationDataType, StrategiesVector
from colossalai.device.device_mesh import DeviceMesh
from colossalai.fx import ColoGraphModule, ColoTracer
@@ -39,10 +39,10 @@ def test_output_handler(output_option):
output_strategies_vector = StrategiesVector(output_node)
# build handler
otuput_handler = OuputHandler(node=output_node,
device_mesh=device_mesh,
strategies_vector=output_strategies_vector,
output_option=output_option)
otuput_handler = OutputHandler(node=output_node,
device_mesh=device_mesh,
strategies_vector=output_strategies_vector,
output_option=output_option)
otuput_handler.register_strategy(compute_resharding_cost=False)
# check operation data mapping

View File

@@ -1,7 +1,7 @@
import torch
import torch.nn as nn
from colossalai.auto_parallel.tensor_shard.node_handler.placeholder_handler import PlacehodlerHandler
from colossalai.auto_parallel.tensor_shard.node_handler.placeholder_handler import PlaceholderHandler
from colossalai.auto_parallel.tensor_shard.sharding_strategy import OperationData, OperationDataType, StrategiesVector
from colossalai.device.device_mesh import DeviceMesh
from colossalai.fx import ColoGraphModule, ColoTracer
@@ -36,7 +36,7 @@ def test_placeholder_handler(placeholder_option):
placeholder_node = list(graph.nodes)[0]
placeholder_strategies_vector = StrategiesVector(placeholder_node)
# build handler
placeholder_handler = PlacehodlerHandler(node=placeholder_node,
placeholder_handler = PlaceholderHandler(node=placeholder_node,
device_mesh=device_mesh,
strategies_vector=placeholder_strategies_vector,
placeholder_option=placeholder_option)