mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-02 01:28:31 +00:00
[pipeline] set optimizer to optional in execute_pipeline (#4630)
* set optimizer to optional in execute_pipeline * arrange device and mixed precision in booster init * fix execute_pipeline in booster.py
This commit is contained in:
@@ -46,7 +46,6 @@ def move_to_cuda(batch):
|
||||
@torch.no_grad()
|
||||
def evaluate_model(
|
||||
model: nn.Module,
|
||||
optimizer,
|
||||
criterion,
|
||||
test_dataloader: Union[DataLoader, List[DataLoader]],
|
||||
num_labels: int,
|
||||
@@ -71,12 +70,7 @@ def evaluate_model(
|
||||
current_rank = dist.get_rank()
|
||||
#TODO pass dataloader to execute_pipeline directly
|
||||
batch = iter([batch])
|
||||
outputs = booster.execute_pipeline(batch,
|
||||
model,
|
||||
criterion,
|
||||
optimizer,
|
||||
return_loss=True,
|
||||
return_outputs=True)
|
||||
outputs = booster.execute_pipeline(batch, model, criterion, return_loss=True, return_outputs=True)
|
||||
|
||||
if booster.plugin.stage_manager.is_last_stage():
|
||||
val_loss = outputs["loss"]
|
||||
@@ -304,7 +298,7 @@ def main():
|
||||
for epoch in range(NUM_EPOCHS):
|
||||
train_epoch(epoch, model, optimizer, _criterion, lr_scheduler, train_dataloader, booster, coordinator)
|
||||
|
||||
results = evaluate_model(model, optimizer, _criterion, test_dataloader, data_builder.num_labels, args.task,
|
||||
results = evaluate_model(model, _criterion, test_dataloader, data_builder.num_labels, args.task,
|
||||
data_builder.eval_splits, booster, coordinator)
|
||||
|
||||
if coordinator.is_master():
|
||||
|
Reference in New Issue
Block a user