support UniEval and add CHRF metric (#3924)

Co-authored-by: Yuanchen Xu <yuanchen.xu00@gmail.com>
This commit is contained in:
Yuanchen
2023-06-08 17:38:47 +08:00
committed by GitHub
parent 33eef714db
commit 21c4c0b1a0
12 changed files with 978 additions and 61 deletions

View File

@@ -40,7 +40,7 @@ def main(args):
# initialize evaluator
evaluator = Evaluator(metrics_per_category, battle_prompt, gpt_evaluation_prompt, args.gpt_model,
config["language"])
config["language"], config.get("path_for_UniEval", None))
if len(args.model_name_list) == 2:
answers1 = jload(args.answer_file_list[0])
answers2 = jload(args.answer_file_list[1])