[Gemini] free and allocate cuda memory by tensor.storage, add grad hook (#2040)

This commit is contained in:
Zihao
2022-11-30 15:57:45 +08:00
committed by GitHub
parent 1e885329f4
commit 6a9158f1fa
4 changed files with 40 additions and 18 deletions

View File

@@ -16,7 +16,7 @@ def run_fwd_bwd(model, data, label, criterion, enable_autocast=False, dtype=torc
model.backward(loss)
def run_param_wrapper_testing():
test_models = ['simple_net']
test_models = ['simple_net', 'repeated_computed_layers', 'nested_model']
for model_name in test_models:
get_components_func = non_distributed_component_funcs.get_callable(model_name)