mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-08 11:27:24 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
0dede489d6
commit
89917e247b
@ -1,3 +1,4 @@
|
||||
import warnings
|
||||
from typing import List, Optional, Tuple, Union
|
||||
|
||||
import torch
|
||||
@ -21,7 +22,6 @@ from transformers.models.falcon.modeling_falcon import (
|
||||
build_alibi_tensor,
|
||||
)
|
||||
from transformers.utils import logging
|
||||
import warnings
|
||||
|
||||
from colossalai.pipeline.stage_manager import PipelineStageManager
|
||||
from colossalai.shardformer.shard import ShardConfig
|
||||
@ -322,7 +322,7 @@ class FalconPipelineForwards:
|
||||
|
||||
hidden_states = outputs[0]
|
||||
if use_cache is True:
|
||||
next_decoder_cache = outputs[1]
|
||||
outputs[1]
|
||||
|
||||
if output_attentions:
|
||||
all_self_attentions = all_self_attentions + (outputs[2 if use_cache else 1],)
|
||||
|
Loading…
Reference in New Issue
Block a user