mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-15 14:43:13 +00:00
Merge branch 'dev/zero-offload' into offload
This commit is contained in:
commit
e893f88a4f
@ -110,4 +110,6 @@ class DistCrossEntropy(Function):
|
||||
def cross_entropy_1d(
|
||||
vocab_logits: torch.Tensor, labels: torch.Tensor, ignore_index: int = -100, process_group: ProcessGroup = None
|
||||
) -> torch.Tensor:
|
||||
|
||||
return DistCrossEntropy.apply(vocab_logits, labels, ignore_index, process_group)
|
||||
|
||||
|
@ -24,10 +24,7 @@ from transformers.models.llama.modeling_llama import (
|
||||
from transformers.utils import logging
|
||||
|
||||
from colossalai.pipeline.stage_manager import PipelineStageManager
|
||||
from colossalai.shardformer.layer._operation import (
|
||||
gather_forward_split_backward,
|
||||
split_forward_gather_backward,
|
||||
)
|
||||
from colossalai.shardformer.layer._operation import gather_forward_split_backward, split_forward_gather_backward
|
||||
from colossalai.shardformer.shard import ShardConfig
|
||||
|
||||
from ..layer import ColoAttention, cross_entropy_1d
|
||||
|
Loading…
Reference in New Issue
Block a user