mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 11:32:10 +00:00
[doc] Fix typo under colossalai and doc(#3618)
* Fixed several spelling errors under colossalai * Fix the spelling error in colossalai and docs directory * Cautious Changed the spelling error under the example folder * Update runtime_preparation_pass.py revert autograft to autograd * Update search_chunk.py utile to until * Update check_installation.py change misteach to mismatch in line 91 * Update 1D_tensor_parallel.md revert to perceptron * Update 2D_tensor_parallel.md revert to perceptron in line 73 * Update 2p5D_tensor_parallel.md revert to perceptron in line 71 * Update 3D_tensor_parallel.md revert to perceptron in line 80 * Update README.md revert to resnet in line 42 * Update reorder_graph.py revert to indice in line 7 * Update p2p.py revert to megatron in line 94 * Update initialize.py revert to torchrun in line 198 * Update routers.py change to detailed in line 63 * Update routers.py change to detailed in line 146 * Update README.md revert random number in line 402
This commit is contained in:
@@ -155,7 +155,7 @@ def split_module(
|
||||
use_partition = partitions[use_partition_name]
|
||||
use_partition.outputs.setdefault(def_node.name)
|
||||
|
||||
# split nodes into parititons
|
||||
# split nodes into partitions
|
||||
for node in m.graph.nodes:
|
||||
orig_nodes[node.name] = node
|
||||
|
||||
@@ -198,7 +198,7 @@ def split_module(
|
||||
if len(sorted_partitions) != len(partitions):
|
||||
raise RuntimeError("cycle exists between partitions!")
|
||||
|
||||
# add placeholders to parititons
|
||||
# add placeholders to partitions
|
||||
for partition_name in sorted_partitions:
|
||||
partition = partitions[partition_name]
|
||||
for input in partition.inputs:
|
||||
|
Reference in New Issue
Block a user