mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-10 21:40:02 +00:00
[hotfix] Fix examples no pad token & auto parallel codegen bug; (#5606)
* fix no pad token bug * fixed some auto parallel codegen bug, but might not run on torch 2.1 --------- Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
@@ -3,7 +3,8 @@ from typing import Dict, List, Tuple
|
||||
import torch
|
||||
from torch.fx import Node
|
||||
|
||||
from colossalai.zero.legacy.gemini.tensor_utils import alloc_storage, free_storage
|
||||
from colossalai.utils.common import free_storage
|
||||
from colossalai.zero.gemini.chunk.chunk import alloc_storage
|
||||
|
||||
|
||||
class Region:
|
||||
|
Reference in New Issue
Block a user