feat(model): Support claude proxy models (#2155)

This commit is contained in:
Fangyin Cheng
2024-11-26 19:47:28 +08:00
committed by GitHub
parent 9d8673a02f
commit 61509dc5ea
20 changed files with 508 additions and 157 deletions

View File

@@ -1,8 +1,9 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, List, Optional, Union
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
from dbgpt.core import ModelRequest, ModelRequestContext
from dbgpt.model.parameter import ProxyModelParameters
from dbgpt.model.proxy.base import ProxyLLMClient
from dbgpt.model.utils.token_utils import ProxyTokenizerWrapper
@@ -41,3 +42,30 @@ class ProxyModel:
int: token count, -1 if failed
"""
return self._tokenizer.count_token(messages, model_name)
def parse_model_request(
params: Dict[str, Any], default_model: str, stream: bool = True
) -> ModelRequest:
"""Parse model request from params.
Args:
params (Dict[str, Any]): request params
default_model (str): default model name
stream (bool, optional): whether stream. Defaults to True.
"""
context = ModelRequestContext(
stream=stream,
user_name=params.get("user_name"),
request_id=params.get("request_id"),
)
request = ModelRequest.build_request(
default_model,
messages=params["messages"],
temperature=params.get("temperature"),
context=context,
max_new_tokens=params.get("max_new_tokens"),
stop=params.get("stop"),
top_p=params.get("top_p"),
)
return request