mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-10 13:29:35 +00:00
refactor: The first refactored version for sdk release (#907)
Co-authored-by: chengfangyin2 <chengfangyin3@jd.com>
This commit is contained in:
43
dbgpt/_private/llm_metadata.py
Normal file
43
dbgpt/_private/llm_metadata.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from dbgpt._private.pydantic import Field, BaseModel
|
||||
|
||||
DEFAULT_CONTEXT_WINDOW = 3900
|
||||
DEFAULT_NUM_OUTPUTS = 256
|
||||
|
||||
|
||||
class LLMMetadata(BaseModel):
|
||||
context_window: int = Field(
|
||||
default=DEFAULT_CONTEXT_WINDOW,
|
||||
description=(
|
||||
"Total number of tokens the model can be input and output for one response."
|
||||
),
|
||||
)
|
||||
num_output: int = Field(
|
||||
default=DEFAULT_NUM_OUTPUTS,
|
||||
description="Number of tokens the model can output when generating a response.",
|
||||
)
|
||||
is_chat_model: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
"Set True if the model exposes a chat interface (i.e. can be passed a"
|
||||
" sequence of messages, rather than text), like OpenAI's"
|
||||
" /v1/chat/completions endpoint."
|
||||
),
|
||||
)
|
||||
is_function_calling_model: bool = Field(
|
||||
default=False,
|
||||
# SEE: https://openai.com/blog/function-calling-and-other-api-updates
|
||||
description=(
|
||||
"Set True if the model supports function calling messages, similar to"
|
||||
" OpenAI's function calling API. For example, converting 'Email Anya to"
|
||||
" see if she wants to get coffee next Friday' to a function call like"
|
||||
" `send_email(to: string, body: string)`."
|
||||
),
|
||||
)
|
||||
model_name: str = Field(
|
||||
default="unknown",
|
||||
description=(
|
||||
"The model's name used for logging, testing, and sanity checking. For some"
|
||||
" models this can be automatically discerned. For other models, like"
|
||||
" locally loaded models, this must be manually specified."
|
||||
),
|
||||
)
|
Reference in New Issue
Block a user