mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-13 13:11:05 +00:00
[nfc] fix typo colossalai/cli fx kernel (#3847)
* fix typo colossalai/autochunk auto_parallel amp * fix typo colossalai/auto_parallel nn utils etc. * fix typo colossalai/auto_parallel autochunk fx/passes etc. * fix typo docs/ * change placememt_policy to placement_policy in docs/ and examples/ * fix typo colossalai/ applications/ * fix typo colossalai/cli fx kernel
This commit is contained in:
@@ -51,7 +51,7 @@ class BiasAdditionModule(ABC):
|
||||
|
||||
For example:
|
||||
The kwargs for conv2d module is {} because the attributes like 'padding' or 'groups' are
|
||||
considered during module initilizing. However, we need to consider those attributes as kwargs
|
||||
considered during module initializing. However, we need to consider those attributes as kwargs
|
||||
in F.conv2d.
|
||||
"""
|
||||
pass
|
||||
|
@@ -295,7 +295,7 @@ class ColoTracer(Tracer):
|
||||
|
||||
@staticmethod
|
||||
def forward(ctx, run_function, preserve_rng_state, *args):
|
||||
# signal that the current tracing occurs within activaton checkpoint part
|
||||
# signal that the current tracing occurs within activation checkpoint part
|
||||
self.inside_torch_checkpoint_func = True
|
||||
out = run_function(*args)
|
||||
self.inside_torch_checkpoint_func = False
|
||||
|
@@ -92,7 +92,7 @@ class ColoTracer(Tracer):
|
||||
return proxy
|
||||
|
||||
# if graph is traced for auto parallelism module, some extra node will be added during
|
||||
# graph construction to deal with the compatability between bias addition and all reduce.
|
||||
# graph construction to deal with the compatibility between bias addition and all reduce.
|
||||
|
||||
# if no extra manipulation is applied, we just pass the origin arguments to create_proxy function
|
||||
# to create node on computation graph
|
||||
@@ -208,7 +208,7 @@ class ColoTracer(Tracer):
|
||||
self.proxy_cls = ColoProxy
|
||||
self.tracer_type = TracerType.META
|
||||
else:
|
||||
raise ValueError(f"Unrecognised tracer type {tracer_type}")
|
||||
raise ValueError(f"Unrecognized tracer type {tracer_type}")
|
||||
|
||||
def _meta_data_computing(self, kind, target, args, kwargs):
|
||||
|
||||
@@ -445,7 +445,7 @@ class ColoTracer(Tracer):
|
||||
|
||||
@staticmethod
|
||||
def forward(ctx, run_function, preserve_rng_state, *args):
|
||||
# signal that the current tracing occurs within activaton checkpoint part
|
||||
# signal that the current tracing occurs within activation checkpoint part
|
||||
self.inside_torch_checkpoint_func = True
|
||||
out = run_function(*args)
|
||||
self.inside_torch_checkpoint_func = False
|
||||
|
Reference in New Issue
Block a user