rm torch.cuda.synchronize

This commit is contained in:
yuehuayingxueluo 2024-01-09 15:53:04 +08:00 committed by FrankLeeeee
parent fab294c7f4
commit 10e3c9f923

View File

@ -198,8 +198,6 @@ class RequestHandler:
if type in config_dict and config_dict[type] is not None:
logits = logit_processor(type, logits, config_dict[type])
torch.cuda.synchronize()
# calculate probs
probs = torch.softmax(logits, dim=-1, dtype=torch.float)
logprobs = torch.log_softmax(logits, dim=-1, dtype=torch.float)