[fx] added activation checkpointing annotation (#1349)

* [fx] added activation checkpointing annotation

* polish code

* polish code
This commit is contained in:
Frank Lee
2022-07-21 11:14:28 +08:00
committed by GitHub
parent 051592c64e
commit 05fae1fd56
2 changed files with 113 additions and 4 deletions

View File

@@ -0,0 +1,62 @@
import torch
import torch.nn as nn
from colossalai.fx import ColoTracer
from torch.fx import GraphModule
from torch.utils.checkpoint import checkpoint
class MLP(torch.nn.Module):
def __init__(self):
super().__init__()
self.linear1 = torch.nn.Linear(4, 4)
self.linear2 = torch.nn.Linear(4, 4)
def forward(self, x):
x = self.linear1(x)
x = self.linear2(x)
return x
# Simple module for demonstration
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.mlp_1 = MLP()
self.mlp_2 = MLP()
self.output = torch.nn.Linear(4, 4)
def forward(self, x):
x = checkpoint(self.mlp_1, x)
x = checkpoint(self.mlp_2, x)
x = self.output(x)
return x
def test_activation_checkpoint_annotation():
module = MyModule()
# test tracing with activation checkpoint
tracer = ColoTracer(trace_act_ckpt=True)
graph = tracer.trace(module)
gm = GraphModule(module, graph)
for node in gm.graph.nodes:
if node.name in ['mlp_1_linear1', 'mlp_1_linear2']:
assert getattr(node, 'activation_checkpoint', -1) == 0
for node in gm.graph.nodes:
if node.name in ['mlp_2_linear1', 'mlp_2_linear2']:
assert getattr(node, 'activation_checkpoint', -1) == 1
tracer = ColoTracer(trace_act_ckpt=False)
graph = tracer.trace(module)
gm = GraphModule(module, graph)
for node in gm.graph.nodes:
assert not hasattr(node, 'activation_checkpoint')
if __name__ == '__main__':
test_activation_checkpoint_annotation()