[test] fix gemini checkpoint and gpt test (#4620)

This commit is contained in:
Hongxin Liu
2023-09-05 16:02:23 +08:00
committed by GitHub
parent e71d245293
commit bd18678478
2 changed files with 2 additions and 3 deletions

View File

@@ -32,7 +32,7 @@ def exam_from_pretrained(plugin_type: str, model_name: str, shard=True, size_per
elif plugin_type == 'zero':
plugin = LowLevelZeroPlugin(stage=2, max_norm=1.0, initial_scale=32)
elif plugin_type == 'gemini':
plugin = GeminiPlugin(placement_policy='cuda', precision="fp16", initial_scale=32)
plugin = GeminiPlugin(precision="fp16", initial_scale=32)
else:
raise ValueError(f"Plugin with type {plugin_type} is invalid, please check your argument.")