mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-07-02 02:02:32 +00:00
[moe] fix mixtral forward default value (#5329)
This commit is contained in:
parent
b60be18dcc
commit
956b561b54
@ -437,7 +437,7 @@ class MixtralPipelineForwards:
|
||||
use_cache: Optional[bool] = None,
|
||||
output_attentions: Optional[bool] = None,
|
||||
output_hidden_states: Optional[bool] = None,
|
||||
output_router_logits: Optional[bool] = True,
|
||||
output_router_logits: Optional[bool] = None,
|
||||
return_dict: Optional[bool] = None,
|
||||
stage_manager: Optional[PipelineStageManager] = None,
|
||||
hidden_states: Optional[torch.FloatTensor] = None,
|
||||
|
Loading…
Reference in New Issue
Block a user