mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 11:32:10 +00:00
[nfc] fix typo colossalai/cli fx kernel (#3847)
* fix typo colossalai/autochunk auto_parallel amp * fix typo colossalai/auto_parallel nn utils etc. * fix typo colossalai/auto_parallel autochunk fx/passes etc. * fix typo docs/ * change placememt_policy to placement_policy in docs/ and examples/ * fix typo colossalai/ applications/ * fix typo colossalai/cli fx kernel
This commit is contained in:
@@ -92,7 +92,7 @@ class ColoTracer(Tracer):
|
||||
return proxy
|
||||
|
||||
# if graph is traced for auto parallelism module, some extra node will be added during
|
||||
# graph construction to deal with the compatability between bias addition and all reduce.
|
||||
# graph construction to deal with the compatibility between bias addition and all reduce.
|
||||
|
||||
# if no extra manipulation is applied, we just pass the origin arguments to create_proxy function
|
||||
# to create node on computation graph
|
||||
@@ -208,7 +208,7 @@ class ColoTracer(Tracer):
|
||||
self.proxy_cls = ColoProxy
|
||||
self.tracer_type = TracerType.META
|
||||
else:
|
||||
raise ValueError(f"Unrecognised tracer type {tracer_type}")
|
||||
raise ValueError(f"Unrecognized tracer type {tracer_type}")
|
||||
|
||||
def _meta_data_computing(self, kind, target, args, kwargs):
|
||||
|
||||
@@ -445,7 +445,7 @@ class ColoTracer(Tracer):
|
||||
|
||||
@staticmethod
|
||||
def forward(ctx, run_function, preserve_rng_state, *args):
|
||||
# signal that the current tracing occurs within activaton checkpoint part
|
||||
# signal that the current tracing occurs within activation checkpoint part
|
||||
self.inside_torch_checkpoint_func = True
|
||||
out = run_function(*args)
|
||||
self.inside_torch_checkpoint_func = False
|
||||
|
Reference in New Issue
Block a user