Files
DB-GPT/dbgpt/model/proxy/llms/gitee.py
Aries-ckt 576da34e92 feat(model): Support Gitee models (#2257)
Co-authored-by: Fangyin Cheng <staneyffer@gmail.com>
2024-12-31 19:24:24 +08:00

84 lines
2.5 KiB
Python

import os
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
from dbgpt.model.proxy.llms.proxy_model import ProxyModel, parse_model_request
from .chatgpt import OpenAILLMClient
if TYPE_CHECKING:
from httpx._types import ProxiesTypes
from openai import AsyncAzureOpenAI, AsyncOpenAI
ClientType = Union[AsyncAzureOpenAI, AsyncOpenAI]
_GITEE_DEFAULT_MODEL = "Qwen2.5-72B-Instruct"
async def gitee_generate_stream(
model: ProxyModel, tokenizer, params, device, context_len=2048
):
client: GiteeLLMClient = model.proxy_llm_client
request = parse_model_request(params, client.default_model, stream=True)
async for r in client.generate_stream(request):
yield r
class GiteeLLMClient(OpenAILLMClient):
"""Gitee LLM Client.
Gitee's API is compatible with OpenAI's API, so we inherit from OpenAILLMClient.
"""
def __init__(
self,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
api_type: Optional[str] = None,
api_version: Optional[str] = None,
model: Optional[str] = None,
proxies: Optional["ProxiesTypes"] = None,
timeout: Optional[int] = 240,
model_alias: Optional[str] = "gitee_proxyllm",
context_length: Optional[int] = None,
openai_client: Optional["ClientType"] = None,
openai_kwargs: Optional[Dict[str, Any]] = None,
**kwargs
):
api_base = api_base or os.getenv("GITEE_API_BASE") or "https://ai.gitee.com/v1"
api_key = api_key or os.getenv("GITEE_API_KEY")
model = model or _GITEE_DEFAULT_MODEL
if not context_length:
if "200k" in model:
context_length = 200 * 1024
else:
context_length = 4096
if not api_key:
raise ValueError(
"Gitee API key is required, please set 'GITEE_API_KEY' in environment "
"or pass it as an argument."
)
super().__init__(
api_key=api_key,
api_base=api_base,
api_type=api_type,
api_version=api_version,
model=model,
proxies=proxies,
timeout=timeout,
model_alias=model_alias,
context_length=context_length,
openai_client=openai_client,
openai_kwargs=openai_kwargs,
**kwargs
)
@property
def default_model(self) -> str:
model = self._model
if not model:
model = _GITEE_DEFAULT_MODEL
return model