diff --git a/colossalai/fx/profiler/experimental/profiler_module/embedding.py b/colossalai/fx/profiler/experimental/profiler_module/embedding.py index dca6f9453..a1ade5d3a 100644 --- a/colossalai/fx/profiler/experimental/profiler_module/embedding.py +++ b/colossalai/fx/profiler/experimental/profiler_module/embedding.py @@ -1,5 +1,7 @@ from typing import Tuple + import torch + from ..registry import meta_profiler_module @@ -8,4 +10,4 @@ def torch_nn_embedding(self: torch.nn.Embedding, input: torch.Tensor) -> Tuple[i # nn.Embedding is a dictionary lookup, so technically it has 0 FLOPs. (https://discuss.pytorch.org/t/correct-way-to-calculate-flops-in-model/67198/6) flops = 0 macs = 0 - return flops, macs \ No newline at end of file + return flops, macs