fix: default proxyllm generator function (#971)

This commit is contained in:
xtyuns 2023-12-25 20:00:04 +08:00 committed by GitHub
parent 0c46c339ca
commit 048fb6c402
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -34,7 +34,7 @@ def proxyllm_generate_stream(
model_name = model_params.model_name
default_error_message = f"{model_name} LLM is not supported"
generator_function = generator_mapping.get(
model_name, lambda: default_error_message
model_name, lambda *args: [default_error_message]
)
yield from generator_function(model, tokenizer, params, device, context_len)