feat(core): Support i18n (#1327)

This commit is contained in:
Fangyin Cheng
2024-03-25 20:15:39 +08:00
committed by GitHub
parent fa06be64c1
commit fcc325d411
179 changed files with 12052 additions and 69512 deletions

View File

@@ -21,7 +21,7 @@ from dbgpt.app.openapi.api_view_model import (
from dbgpt.app.scene import BaseChat, ChatFactory, ChatScene
from dbgpt.component import ComponentType
from dbgpt.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH
from dbgpt.core.awel import CommonLLMHttpRequestBody, CommonLLMHTTPRequestContext
from dbgpt.core.awel import CommonLLMHttpRequestBody
from dbgpt.core.schema.api import (
ChatCompletionResponseStreamChoice,
ChatCompletionStreamResponse,
@@ -355,17 +355,21 @@ async def chat_completions(
media_type="text/event-stream",
)
elif dialogue.chat_mode == ChatScene.ChatFlow.value():
flow_ctx = CommonLLMHTTPRequestContext(
conv_uid=dialogue.conv_uid,
chat_mode=dialogue.chat_mode,
user_name=dialogue.user_name,
sys_code=dialogue.sys_code,
)
flow_req = CommonLLMHttpRequestBody(
model=dialogue.model_name,
messages=dialogue.user_input,
stream=True,
context=flow_ctx,
# context=flow_ctx,
# temperature=
# max_new_tokens=
# enable_vis=
conv_uid=dialogue.conv_uid,
span_id=root_tracer.get_current_span_id(),
chat_mode=dialogue.chat_mode,
chat_param=dialogue.select_param,
user_name=dialogue.user_name,
sys_code=dialogue.sys_code,
incremental=dialogue.incremental,
)
return StreamingResponse(
flow_service.chat_flow(dialogue.select_param, flow_req),

View File

@@ -19,7 +19,7 @@ from dbgpt.app.openapi.api_v1.api_v1 import (
from dbgpt.app.scene import BaseChat, ChatScene
from dbgpt.client.schema import ChatCompletionRequestBody, ChatMode
from dbgpt.component import logger
from dbgpt.core.awel import CommonLLMHttpRequestBody, CommonLLMHTTPRequestContext
from dbgpt.core.awel import CommonLLMHttpRequestBody
from dbgpt.core.schema.api import (
ChatCompletionResponse,
ChatCompletionResponseChoice,
@@ -253,18 +253,7 @@ async def chat_flow_stream_wrapper(
token (APIToken): token
"""
flow_service = get_chat_flow()
flow_ctx = CommonLLMHTTPRequestContext(
conv_uid=request.conv_uid,
chat_mode=request.chat_mode,
user_name=request.user_name,
sys_code=request.sys_code,
)
flow_req = CommonLLMHttpRequestBody(
model=request.model,
messages=request.chat_param,
stream=True,
context=flow_ctx,
)
flow_req = CommonLLMHttpRequestBody(**request.dict())
async for output in flow_service.chat_flow(request.chat_param, flow_req):
if output.startswith("data: [DONE]"):
yield output