Files
ColossalAI/colossalai/booster/plugin/pp_plugin_base.py
Hongxin Liu 261eab02fb [plugin] add 3d parallel plugin (#4295)
* [amp] add mixed precision optimizer

* [plugin] add 3d parallel plugin

* [booster] support pipeline

* [plugin] 3d parallel plugin support clip grad norm

* [shardformer] fix sharder and add plugin test

* [plugin] rename 3d parallel plugin

* [ci] support testmon core pkg change detection (#4305)

* [hotfix] debug testmon

* [hotfix] fix llama

* [hotfix] fix p2p bugs

* [hotfix] fix requirements
2023-08-15 23:25:14 +08:00

22 lines
620 B
Python

from abc import abstractmethod
from typing import Any, Callable, Iterator
import torch
from colossalai.interface import ModelWrapper, OptimizerWrapper
from .plugin_base import Plugin
class PipelinePluginBase(Plugin):
@abstractmethod
def execute_pipeline(self,
data_iter: Iterator,
model: ModelWrapper,
criterion: Callable[[Any, Any], torch.Tensor],
optimizer: OptimizerWrapper,
return_loss: bool = True,
return_outputs: bool = False) -> dict:
pass