mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-13 13:11:05 +00:00
[Gemini] free and allocate cuda memory by tensor.storage, add grad hook (#2040)
This commit is contained in:
@@ -16,7 +16,7 @@ def run_fwd_bwd(model, data, label, criterion, enable_autocast=False, dtype=torc
|
||||
model.backward(loss)
|
||||
|
||||
def run_param_wrapper_testing():
|
||||
test_models = ['simple_net']
|
||||
test_models = ['simple_net', 'repeated_computed_layers', 'nested_model']
|
||||
|
||||
for model_name in test_models:
|
||||
get_components_func = non_distributed_component_funcs.get_callable(model_name)
|
||||
|
Reference in New Issue
Block a user