[hotfix] Fix examples no pad token & auto parallel codegen bug; (#5606)

* fix no pad token bug

* fixed some auto parallel codegen bug, but might not run on torch 2.1

---------

Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
Edenzzzz
2024-04-18 18:15:50 +08:00
committed by GitHub
parent a0ad587c24
commit d83c633ca6
6 changed files with 8 additions and 5 deletions

View File

@@ -5,7 +5,7 @@ import torch
import torch.nn as nn
from colossalai.utils import _cast_float
from colossalai.zero.legacy.gemini.tensor_utils import free_storage
from colossalai.utils.common import free_storage
from .region_manager import RegionManager
from .util import GlobalRuntimeInfo

View File

@@ -3,7 +3,8 @@ from typing import Dict, List, Tuple
import torch
from torch.fx import Node
from colossalai.zero.legacy.gemini.tensor_utils import alloc_storage, free_storage
from colossalai.utils.common import free_storage
from colossalai.zero.gemini.chunk.chunk import alloc_storage
class Region: