mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-16 14:41:53 +00:00
[pipeline] set optimizer to optional in execute_pipeline (#4630)
* set optimizer to optional in execute_pipeline * arrange device and mixed precision in booster init * fix execute_pipeline in booster.py
This commit is contained in:
@@ -110,9 +110,9 @@ def examine_pp(num_micro_batches):
|
||||
torch_loss.backward()
|
||||
|
||||
pp_ret = schedule.forward_backward_step(sharded_model,
|
||||
pp_optimizer,
|
||||
iter(input_list),
|
||||
criterion,
|
||||
pp_optimizer,
|
||||
return_loss=True,
|
||||
return_outputs=True)
|
||||
|
||||
|
@@ -90,9 +90,9 @@ def examine_pp():
|
||||
torch_loss.backward()
|
||||
|
||||
pp_ret = schedule.forward_backward_step(sharded_model,
|
||||
pp_optimizer,
|
||||
iter(input_list),
|
||||
criterion,
|
||||
pp_optimizer,
|
||||
return_loss=True,
|
||||
return_outputs=True)
|
||||
|
||||
|
Reference in New Issue
Block a user