mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-11 13:59:08 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -27,7 +27,7 @@ def data_gen_for_conditional_generation():
|
||||
# labels = tokenizer("Das Haus ist wunderbar.", return_tensors="pt").input_ids
|
||||
data = data_gen_for_encoder_only()
|
||||
labels = torch.Tensor([[644, 4598, 229, 19250, 5, 1, 644, 4598, 229, 19250, 5, 1, 229, 19250, 5, 1]]).long()
|
||||
data['labels'] = labels
|
||||
data["labels"] = labels
|
||||
return data
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ def data_gen_for_t5_model():
|
||||
# decoder_input_ids = model._shift_right(input_ids)
|
||||
data = data_gen_for_encoder_only()
|
||||
decoder_input_ids = torch.Tensor([[0, 13959, 1566, 12, 2968, 10, 37, 629, 19, 1627, 5, 5, 19, 1627, 5, 5]]).long()
|
||||
data['decoder_input_ids'] = decoder_input_ids
|
||||
data["decoder_input_ids"] = decoder_input_ids
|
||||
return data
|
||||
|
||||
|
||||
@@ -55,21 +55,27 @@ config = transformers.T5Config(d_model=128, num_layers=2, dropout_rate=0, decode
|
||||
# transformers.T5Model,
|
||||
# transformers.T5ForConditionalGeneration,
|
||||
# transformers.T5EncoderModel,
|
||||
model_zoo.register(name='transformers_t5',
|
||||
model_fn=lambda: transformers.T5Model(config),
|
||||
data_gen_fn=data_gen_for_t5_model,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_t5_model,
|
||||
model_attribute=ModelAttribute(has_control_flow=True))
|
||||
model_zoo.register(name='transformers_t5_for_conditional_generation',
|
||||
model_fn=lambda: transformers.T5ForConditionalGeneration(config),
|
||||
data_gen_fn=data_gen_for_conditional_generation,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_conditional_generation,
|
||||
model_attribute=ModelAttribute(has_control_flow=True))
|
||||
model_zoo.register(name='transformers_t5_encoder_model',
|
||||
model_fn=lambda: transformers.T5EncoderModel(config),
|
||||
data_gen_fn=data_gen_for_encoder_only,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_encoder_only,
|
||||
model_attribute=ModelAttribute(has_control_flow=True))
|
||||
model_zoo.register(
|
||||
name="transformers_t5",
|
||||
model_fn=lambda: transformers.T5Model(config),
|
||||
data_gen_fn=data_gen_for_t5_model,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_t5_model,
|
||||
model_attribute=ModelAttribute(has_control_flow=True),
|
||||
)
|
||||
model_zoo.register(
|
||||
name="transformers_t5_for_conditional_generation",
|
||||
model_fn=lambda: transformers.T5ForConditionalGeneration(config),
|
||||
data_gen_fn=data_gen_for_conditional_generation,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_conditional_generation,
|
||||
model_attribute=ModelAttribute(has_control_flow=True),
|
||||
)
|
||||
model_zoo.register(
|
||||
name="transformers_t5_encoder_model",
|
||||
model_fn=lambda: transformers.T5EncoderModel(config),
|
||||
data_gen_fn=data_gen_for_encoder_only,
|
||||
output_transform_fn=output_transform_fn,
|
||||
loss_fn=loss_fn_for_encoder_only,
|
||||
model_attribute=ModelAttribute(has_control_flow=True),
|
||||
)
|
||||
|
Reference in New Issue
Block a user