From 3a7a2cbbb826d657490960629ea47f581962bfff Mon Sep 17 00:00:00 2001 From: Fangyin Cheng Date: Wed, 27 Mar 2024 12:50:05 +0800 Subject: [PATCH] feat: Run AWEL flow in CLI (#1341) --- dbgpt/app/openapi/api_v1/api_v1.py | 2 +- dbgpt/app/openapi/api_v2.py | 70 +-- dbgpt/cli/cli_scripts.py | 33 +- dbgpt/client/_cli.py | 247 +++++++++++ dbgpt/client/client.py | 98 ++-- dbgpt/client/flow.py | 8 +- dbgpt/core/awel/operators/base.py | 6 + dbgpt/core/awel/trigger/iterator_trigger.py | 8 +- dbgpt/core/interface/llm.py | 6 + .../core/interface/operators/llm_operator.py | 6 +- dbgpt/core/schema/api.py | 8 + dbgpt/model/utils/chatgpt_utils.py | 5 +- dbgpt/serve/flow/api/endpoints.py | 8 +- dbgpt/serve/flow/service/service.py | 417 +++++++++++++----- dbgpt/util/console/__init__.py | 3 + dbgpt/util/console/console.py | 71 +++ dbgpt/util/dbgpts/cli.py | 77 +++- dbgpt/util/dbgpts/loader.py | 7 +- dbgpt/util/dbgpts/repo.py | 136 ++++-- i18n/Makefile | 22 +- i18n/locales/fr/LC_MESSAGES/dbgpt_client.mo | Bin 0 -> 363 bytes i18n/locales/fr/LC_MESSAGES/dbgpt_client.po | 78 ++++ i18n/locales/fr/LC_MESSAGES/dbgpt_model.mo | Bin 2940 -> 2944 bytes i18n/locales/fr/LC_MESSAGES/dbgpt_model.po | 15 +- i18n/locales/ja/LC_MESSAGES/dbgpt_client.mo | Bin 0 -> 357 bytes i18n/locales/ja/LC_MESSAGES/dbgpt_client.po | 78 ++++ i18n/locales/ja/LC_MESSAGES/dbgpt_model.mo | Bin 989 -> 2345 bytes i18n/locales/ja/LC_MESSAGES/dbgpt_model.po | 98 +--- i18n/locales/ko/LC_MESSAGES/dbgpt_client.mo | Bin 0 -> 357 bytes i18n/locales/ko/LC_MESSAGES/dbgpt_client.po | 78 ++++ i18n/locales/ko/LC_MESSAGES/dbgpt_model.mo | Bin 2846 -> 2800 bytes i18n/locales/ko/LC_MESSAGES/dbgpt_model.po | 14 +- i18n/locales/ru/LC_MESSAGES/dbgpt_client.mo | Bin 0 -> 438 bytes i18n/locales/ru/LC_MESSAGES/dbgpt_client.po | 79 ++++ i18n/locales/ru/LC_MESSAGES/dbgpt_model.mo | Bin 3454 -> 3457 bytes i18n/locales/ru/LC_MESSAGES/dbgpt_model.po | 9 +- .../locales/zh_CN/LC_MESSAGES/dbgpt_client.mo | Bin 0 -> 1393 bytes .../locales/zh_CN/LC_MESSAGES/dbgpt_client.po | 62 +++ i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.mo | Bin 907 -> 2709 bytes i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.po | 53 +-- i18n/translate_util.py | 66 ++- setup.py | 8 +- 42 files changed, 1454 insertions(+), 422 deletions(-) create mode 100644 dbgpt/client/_cli.py create mode 100644 dbgpt/util/console/__init__.py create mode 100644 dbgpt/util/console/console.py create mode 100644 i18n/locales/fr/LC_MESSAGES/dbgpt_client.mo create mode 100644 i18n/locales/fr/LC_MESSAGES/dbgpt_client.po create mode 100644 i18n/locales/ja/LC_MESSAGES/dbgpt_client.mo create mode 100644 i18n/locales/ja/LC_MESSAGES/dbgpt_client.po create mode 100644 i18n/locales/ko/LC_MESSAGES/dbgpt_client.mo create mode 100644 i18n/locales/ko/LC_MESSAGES/dbgpt_client.po create mode 100644 i18n/locales/ru/LC_MESSAGES/dbgpt_client.mo create mode 100644 i18n/locales/ru/LC_MESSAGES/dbgpt_client.po create mode 100644 i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.mo create mode 100644 i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.po diff --git a/dbgpt/app/openapi/api_v1/api_v1.py b/dbgpt/app/openapi/api_v1/api_v1.py index a121bdc3b..f782e5655 100644 --- a/dbgpt/app/openapi/api_v1/api_v1.py +++ b/dbgpt/app/openapi/api_v1/api_v1.py @@ -372,7 +372,7 @@ async def chat_completions( incremental=dialogue.incremental, ) return StreamingResponse( - flow_service.chat_flow(dialogue.select_param, flow_req), + flow_service.chat_stream_flow_str(dialogue.select_param, flow_req), headers=headers, media_type="text/event-stream", ) diff --git a/dbgpt/app/openapi/api_v2.py b/dbgpt/app/openapi/api_v2.py index bd283bc17..14cf2f554 100644 --- a/dbgpt/app/openapi/api_v2.py +++ b/dbgpt/app/openapi/api_v2.py @@ -2,11 +2,11 @@ import json import re import time import uuid -from typing import Optional +from typing import AsyncIterator, Optional from fastapi import APIRouter, Body, Depends, HTTPException from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer -from starlette.responses import StreamingResponse +from starlette.responses import JSONResponse, StreamingResponse from dbgpt.app.openapi.api_v1.api_v1 import ( CHAT_FACTORY, @@ -27,6 +27,7 @@ from dbgpt.core.schema.api import ( ChatCompletionStreamResponse, ChatMessage, DeltaMessage, + ErrorResponse, UsageInfo, ) from dbgpt.model.cluster.apiserver.api import APISettings @@ -114,11 +115,14 @@ async def chat_completions( media_type="text/event-stream", ) elif request.chat_mode == ChatMode.CHAT_AWEL_FLOW.value: - return StreamingResponse( - chat_flow_stream_wrapper(request), - headers=headers, - media_type="text/event-stream", - ) + if not request.stream: + return await chat_flow_wrapper(request) + else: + return StreamingResponse( + chat_flow_stream_wrapper(request), + headers=headers, + media_type="text/event-stream", + ) elif ( request.chat_mode is None or request.chat_mode == ChatMode.CHAT_NORMAL.value @@ -244,35 +248,43 @@ async def chat_app_stream_wrapper(request: ChatCompletionRequestBody = None): yield "data: [DONE]\n\n" +async def chat_flow_wrapper(request: ChatCompletionRequestBody): + flow_service = get_chat_flow() + flow_req = CommonLLMHttpRequestBody(**request.dict()) + flow_uid = request.chat_param + output = await flow_service.safe_chat_flow(flow_uid, flow_req) + if not output.success: + return JSONResponse( + ErrorResponse(message=output.text, code=output.error_code).dict(), + status_code=400, + ) + else: + choice_data = ChatCompletionResponseChoice( + index=0, + message=ChatMessage(role="assistant", content=output.text), + ) + if output.usage: + usage = UsageInfo(**output.usage) + else: + usage = UsageInfo() + return ChatCompletionResponse( + id=request.conv_uid, choices=[choice_data], model=request.model, usage=usage + ) + + async def chat_flow_stream_wrapper( - request: ChatCompletionRequestBody = None, -): + request: ChatCompletionRequestBody, +) -> AsyncIterator[str]: """chat app stream Args: request (OpenAPIChatCompletionRequest): request - token (APIToken): token """ flow_service = get_chat_flow() flow_req = CommonLLMHttpRequestBody(**request.dict()) - async for output in flow_service.chat_flow(request.chat_param, flow_req): - if output.startswith("data: [DONE]"): - yield output - if output.startswith("data:"): - output = output[len("data: ") :] - choice_data = ChatCompletionResponseStreamChoice( - index=0, - delta=DeltaMessage(role="assistant", content=output), - ) - chunk = ChatCompletionStreamResponse( - id=request.conv_uid, - choices=[choice_data], - model=request.model, - created=int(time.time()), - ) - chat_completion_response = ( - f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n" - ) - yield chat_completion_response + flow_uid = request.chat_param + + async for output in flow_service.chat_stream_openai(flow_uid, flow_req): + yield output def check_chat_request(request: ChatCompletionRequestBody = Body()): diff --git a/dbgpt/cli/cli_scripts.py b/dbgpt/cli/cli_scripts.py index da3225083..bcf065639 100644 --- a/dbgpt/cli/cli_scripts.py +++ b/dbgpt/cli/cli_scripts.py @@ -60,19 +60,26 @@ def db(): @click.group() def new(): - """New a template""" + """New a template.""" pass @click.group() def app(): - """Manage your apps(dbgpts)""" + """Manage your apps(dbgpts).""" pass @click.group() def repo(): - """The repository to install the dbgpts from""" + """The repository to install the dbgpts from.""" + pass + + +@click.group() +def run(): + """Run your dbgpts.""" + pass stop_all_func_list = [] @@ -92,6 +99,7 @@ cli.add_command(db) cli.add_command(new) cli.add_command(app) cli.add_command(repo) +cli.add_command(run) add_command_alias(stop_all, name="all", parent_group=stop) try: @@ -162,8 +170,13 @@ except ImportError as e: try: from dbgpt.util.dbgpts.cli import add_repo from dbgpt.util.dbgpts.cli import install as app_install - from dbgpt.util.dbgpts.cli import list_all_apps as app_list - from dbgpt.util.dbgpts.cli import list_repos, new_dbgpts, remove_repo + from dbgpt.util.dbgpts.cli import list_all_apps as app_list_remote + from dbgpt.util.dbgpts.cli import ( + list_installed_apps, + list_repos, + new_dbgpts, + remove_repo, + ) from dbgpt.util.dbgpts.cli import uninstall as app_uninstall from dbgpt.util.dbgpts.cli import update_repo @@ -173,12 +186,20 @@ try: add_command_alias(update_repo, name="update", parent_group=repo) add_command_alias(app_install, name="install", parent_group=app) add_command_alias(app_uninstall, name="uninstall", parent_group=app) - add_command_alias(app_list, name="list-remote", parent_group=app) + add_command_alias(app_list_remote, name="list-remote", parent_group=app) + add_command_alias(list_installed_apps, name="list", parent_group=app) add_command_alias(new_dbgpts, name="app", parent_group=new) except ImportError as e: logging.warning(f"Integrating dbgpt dbgpts command line tool failed: {e}") +try: + from dbgpt.client._cli import run_flow + + add_command_alias(run_flow, name="flow", parent_group=run) +except ImportError as e: + logging.warning(f"Integrating dbgpt client command line tool failed: {e}") + def main(): return cli() diff --git a/dbgpt/client/_cli.py b/dbgpt/client/_cli.py new file mode 100644 index 000000000..fa73aa69e --- /dev/null +++ b/dbgpt/client/_cli.py @@ -0,0 +1,247 @@ +"""CLI for DB-GPT client.""" + +import functools +import json +import time +import uuid +from typing import Any, Dict + +import click + +from dbgpt.util import get_or_create_event_loop +from dbgpt.util.console import CliLogger +from dbgpt.util.i18n_utils import _ + +from .client import Client +from .flow import list_flow + +cl = CliLogger() + + +def add_base_flow_options(func): + """Add base flow options to the command.""" + + @click.option( + "-n", + "--name", + type=str, + default=None, + required=False, + help=_("The name of the AWEL flow"), + ) + @click.option( + "--uid", + type=str, + default=None, + required=False, + help=_("The uid of the AWEL flow"), + ) + @functools.wraps(func) + def _wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return _wrapper + + +def add_chat_options(func): + """Add chat options to the command.""" + + @click.option( + "-m", + "--messages", + type=str, + default=None, + required=False, + help=_("The messages to run AWEL flow"), + ) + @click.option( + "--model", + type=str, + default=None, + required=False, + help=_("The model name of AWEL flow"), + ) + @click.option( + "-s", + "--stream", + type=bool, + default=False, + required=False, + is_flag=True, + help=_("Whether use stream mode to run AWEL flow"), + ) + @click.option( + "-t", + "--temperature", + type=float, + default=None, + required=False, + help=_("The temperature to run AWEL flow"), + ) + @click.option( + "--max_new_tokens", + type=int, + default=None, + required=False, + help=_("The max new tokens to run AWEL flow"), + ) + @click.option( + "--conv_uid", + type=str, + default=None, + required=False, + help=_("The conversation id of the AWEL flow"), + ) + @click.option( + "-d", + "--data", + type=str, + default=None, + required=False, + help=_("The json data to run AWEL flow, if set, will overwrite other options"), + ) + @click.option( + "-e", + "--extra", + type=str, + default=None, + required=False, + help=_("The extra json data to run AWEL flow."), + ) + @click.option( + "-i", + "--interactive", + type=bool, + default=False, + required=False, + is_flag=True, + help=_("Whether use interactive mode to run AWEL flow"), + ) + @functools.wraps(func) + def _wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return _wrapper + + +@click.command(name="flow") +@add_base_flow_options +@add_chat_options +def run_flow(name: str, uid: str, data: str, interactive: bool, **kwargs): + """Run a AWEL flow.""" + client = Client() + + loop = get_or_create_event_loop() + res = loop.run_until_complete(list_flow(client, name, uid)) + + if not res: + cl.error("Flow not found with the given name or uid", exit_code=1) + if len(res) > 1: + cl.error("More than one flow found", exit_code=1) + flow = res[0] + json_data = _parse_json_data(data, **kwargs) + json_data["chat_param"] = flow.uid + json_data["chat_mode"] = "chat_flow" + stream = "stream" in json_data and str(json_data["stream"]).lower() in ["true", "1"] + if stream: + loop.run_until_complete(_chat_stream(client, interactive, json_data)) + else: + loop.run_until_complete(_chat(client, interactive, json_data)) + + +def _parse_json_data(data: str, **kwargs): + json_data = {} + if data: + try: + json_data = json.loads(data) + except Exception as e: + cl.error(f"Invalid JSON data: {data}, {e}", exit_code=1) + if "extra" in kwargs and kwargs["extra"]: + try: + extra = json.loads(kwargs["extra"]) + kwargs["extra"] = extra + except Exception as e: + cl.error(f"Invalid extra JSON data: {kwargs['extra']}, {e}", exit_code=1) + for k, v in kwargs.items(): + if v is not None and k not in json_data: + json_data[k] = v + if "model" not in json_data: + json_data["model"] = "__empty__model__" + return json_data + + +async def _chat_stream(client: Client, interactive: bool, json_data: Dict[str, Any]): + user_input = json_data.get("messages", "") + if "conv_uid" not in json_data and interactive: + json_data["conv_uid"] = str(uuid.uuid4()) + first_message = True + while True: + try: + if interactive and not user_input: + cl.print("Type 'exit' or 'quit' to exit.") + while not user_input: + user_input = cl.ask("You") + if user_input.lower() in ["exit", "quit", "q"]: + break + start_time = time.time() + json_data["messages"] = user_input + if first_message: + cl.info("You: " + user_input) + cl.info("Chat stream started") + cl.debug(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}") + full_text = "" + cl.print("Bot: ") + async for out in client.chat_stream(**json_data): + if out.choices: + text = out.choices[0].delta.content + if text: + full_text += text + cl.print(text, end="") + end_time = time.time() + time_cost = round(end_time - start_time, 2) + cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s") + except Exception as e: + cl.error(f"Chat stream failed: {e}", exit_code=1) + finally: + first_message = False + if interactive: + user_input = "" + else: + break + + +async def _chat(client: Client, interactive: bool, json_data: Dict[str, Any]): + user_input = json_data.get("messages", "") + if "conv_uid" not in json_data and interactive: + json_data["conv_uid"] = str(uuid.uuid4()) + first_message = True + while True: + try: + if interactive and not user_input: + cl.print("Type 'exit' or 'quit' to exit.") + while not user_input: + user_input = cl.ask("You") + if user_input.lower() in ["exit", "quit", "q"]: + break + start_time = time.time() + json_data["messages"] = user_input + if first_message: + cl.info("You: " + user_input) + + cl.info("Chat started") + cl.debug(f"JSON data: {json.dumps(json_data, ensure_ascii=False)}") + res = await client.chat(**json_data) + cl.print("Bot: ") + if res.choices: + text = res.choices[0].message.content + cl.markdown(text) + time_cost = round(time.time() - start_time, 2) + cl.success(f"\n:tada: Chat stream finished, timecost: {time_cost} s") + except Exception as e: + cl.error(f"Chat failed: {e}", exit_code=1) + finally: + first_message = False + if interactive: + user_input = "" + else: + break diff --git a/dbgpt/client/client.py b/dbgpt/client/client.py index eccbd26ae..aaec050c8 100644 --- a/dbgpt/client/client.py +++ b/dbgpt/client/client.py @@ -1,7 +1,8 @@ """This module contains the client for the DB-GPT API.""" +import atexit import json import os -from typing import Any, AsyncGenerator, List, Optional, Union +from typing import Any, AsyncGenerator, Dict, List, Optional, Union from urllib.parse import urlparse import httpx @@ -98,6 +99,7 @@ class Client: self._http_client = httpx.AsyncClient( headers=headers, timeout=timeout if timeout else httpx.Timeout(None) ) + atexit.register(self.close) async def chat( self, @@ -113,6 +115,7 @@ class Client: span_id: Optional[str] = None, incremental: bool = True, enable_vis: bool = True, + **kwargs, ) -> ChatCompletionResponse: """ Chat Completion. @@ -187,6 +190,7 @@ class Client: span_id: Optional[str] = None, incremental: bool = True, enable_vis: bool = True, + **kwargs, ) -> AsyncGenerator[ChatCompletionStreamResponse, None]: """ Chat Stream Completion. @@ -238,10 +242,23 @@ class Client: incremental=incremental, enable_vis=enable_vis, ) + async for chat_completion_response in self._chat_stream(request.dict()): + yield chat_completion_response + + async def _chat_stream( + self, data: Dict[str, Any] + ) -> AsyncGenerator[ChatCompletionStreamResponse, None]: + """Chat Stream Completion. + + Args: + data: dict, The data to send to the API. + Returns: + AsyncGenerator[dict, None]: The chat completion response. + """ async with self._http_client.stream( method="POST", url=self._api_url + "/chat/completions", - json=request.dict(), + json=data, headers={}, ) as response: if response.status_code == 200: @@ -250,7 +267,11 @@ class Client: if line.strip() == "data: [DONE]": break if line.startswith("data:"): - json_data = json.loads(line[len("data: ") :]) + if line.startswith("data: "): + sse_data = line[len("data: ") :] + else: + sse_data = line[len("data:") :] + json_data = json.loads(sse_data) chat_completion_response = ChatCompletionStreamResponse( **json_data ) @@ -265,21 +286,20 @@ class Client: except Exception as e: raise e - async def get(self, path: str, *args): + async def get(self, path: str, *args, **kwargs): """Get method. Args: path: str, The path to get. args: Any, The arguments to pass to the get method. """ - try: - response = await self._http_client.get( - f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", - *args, - ) - return response - finally: - await self._http_client.aclose() + kwargs = {k: v for k, v in kwargs.items() if v is not None} + response = await self._http_client.get( + f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", + *args, + params=kwargs, + ) + return response async def post(self, path: str, args): """Post method. @@ -288,13 +308,10 @@ class Client: path: str, The path to post. args: Any, The arguments to pass to the post """ - try: - return await self._http_client.post( - f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", - json=args, - ) - finally: - await self._http_client.aclose() + return await self._http_client.post( + f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", + json=args, + ) async def post_param(self, path: str, args): """Post method. @@ -303,13 +320,10 @@ class Client: path: str, The path to post. args: Any, The arguments to pass to the post """ - try: - return await self._http_client.post( - f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", - params=args, - ) - finally: - await self._http_client.aclose() + return await self._http_client.post( + f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", + params=args, + ) async def patch(self, path: str, *args): """Patch method. @@ -329,26 +343,20 @@ class Client: path: str, The path to put. args: Any, The arguments to pass to the put. """ - try: - return await self._http_client.put( - f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", json=args - ) - finally: - await self._http_client.aclose() + return await self._http_client.put( + f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", json=args + ) async def delete(self, path: str, *args): """Delete method. Args: path: str, The path to delete. - args: Any, The arguments to pass to the delete. + args: Any, The arguments to pass to delete. """ - try: - return await self._http_client.delete( - f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", *args - ) - finally: - await self._http_client.aclose() + return await self._http_client.delete( + f"{self._api_url}/{CLIENT_SERVE_PATH}{path}", *args + ) async def head(self, path: str, *args): """Head method. @@ -359,6 +367,18 @@ class Client: """ return self._http_client.head(self._api_url + path, *args) + def close(self): + """Close the client.""" + from dbgpt.util import get_or_create_event_loop + + if not self._http_client.is_closed: + loop = get_or_create_event_loop() + loop.run_until_complete(self._http_client.aclose()) + + async def aclose(self): + """Close the client.""" + await self._http_client.aclose() + def is_valid_url(api_url: Any) -> bool: """Check if the given URL is valid. diff --git a/dbgpt/client/flow.py b/dbgpt/client/flow.py index f5206b2d1..15a453593 100644 --- a/dbgpt/client/flow.py +++ b/dbgpt/client/flow.py @@ -93,19 +93,23 @@ async def get_flow(client: Client, flow_id: str) -> FlowPanel: raise ClientException(f"Failed to get flow: {e}") -async def list_flow(client: Client) -> List[FlowPanel]: +async def list_flow( + client: Client, name: str | None = None, uid: str | None = None +) -> List[FlowPanel]: """ List flows. Args: client (Client): The dbgpt client. + name (str): The name of the flow. + uid (str): The uid of the flow. Returns: List[FlowPanel]: The list of flow panels. Raises: ClientException: If the request failed. """ try: - res = await client.get("/awel/flows") + res = await client.get("/awel/flows", **{"name": name, "uid": uid}) result: Result = res.json() if result["success"]: return [FlowPanel(**flow) for flow in result["data"]["items"]] diff --git a/dbgpt/core/awel/operators/base.py b/dbgpt/core/awel/operators/base.py index 76f712b9e..f9ba12169 100644 --- a/dbgpt/core/awel/operators/base.py +++ b/dbgpt/core/awel/operators/base.py @@ -127,6 +127,8 @@ class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta): """ streaming_operator: bool = False + incremental_output: bool = False + output_format: Optional[str] = None def __init__( self, @@ -147,6 +149,10 @@ class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta): from dbgpt.core.awel import DefaultWorkflowRunner runner = DefaultWorkflowRunner() + if "incremental_output" in kwargs: + self.incremental_output = bool(kwargs["incremental_output"]) + if "output_format" in kwargs: + self.output_format = kwargs["output_format"] self._runner: WorkflowRunner = runner self._dag_ctx: Optional[DAGContext] = None diff --git a/dbgpt/core/awel/trigger/iterator_trigger.py b/dbgpt/core/awel/trigger/iterator_trigger.py index 617216408..abcafc204 100644 --- a/dbgpt/core/awel/trigger/iterator_trigger.py +++ b/dbgpt/core/awel/trigger/iterator_trigger.py @@ -132,9 +132,11 @@ class IteratorTrigger(Trigger[List[Tuple[Any, Any]]]): task_id = self.node_id async def call_stream(call_data: Any): - async for out in await end_node.call_stream(call_data): - yield out - await dag._after_dag_end(end_node.current_event_loop_task_id) + try: + async for out in await end_node.call_stream(call_data): + yield out + finally: + await dag._after_dag_end(end_node.current_event_loop_task_id) async def run_node(call_data: Any) -> Tuple[Any, Any]: async with semaphore: diff --git a/dbgpt/core/interface/llm.py b/dbgpt/core/interface/llm.py index 98b549ba3..d87482af2 100644 --- a/dbgpt/core/interface/llm.py +++ b/dbgpt/core/interface/llm.py @@ -161,6 +161,7 @@ class ModelOutput: error_code: int """The error code of the model inference. If the model inference is successful, the error code is 0.""" + incremental: bool = False model_context: Optional[Dict] = None finish_reason: Optional[str] = None usage: Optional[Dict[str, Any]] = None @@ -171,6 +172,11 @@ class ModelOutput: """Convert the model output to dict.""" return asdict(self) + @property + def success(self) -> bool: + """Check if the model inference is successful.""" + return self.error_code == 0 + _ModelMessageType = Union[List[ModelMessage], List[Dict[str, Any]]] diff --git a/dbgpt/core/interface/operators/llm_operator.py b/dbgpt/core/interface/operators/llm_operator.py index 73b857e50..c716bad66 100644 --- a/dbgpt/core/interface/operators/llm_operator.py +++ b/dbgpt/core/interface/operators/llm_operator.py @@ -470,6 +470,8 @@ class CommonStreamingOutputOperator(TransformStreamAbsOperator[ModelOutput, str] Transform model output to the string output to show in DB-GPT chat flow page. """ + output_format = "SSE" + metadata = ViewMetadata( label=_("Common Streaming Output Operator"), name="common_streaming_output_operator", @@ -510,8 +512,8 @@ class CommonStreamingOutputOperator(TransformStreamAbsOperator[ModelOutput, str] yield f"data:{error_msg}" return decoded_unicode = model_output.text.replace("\ufffd", "") - msg = decoded_unicode.replace("\n", "\\n") - yield f"data:{msg}\n\n" + # msg = decoded_unicode.replace("\n", "\\n") + yield f"data:{decoded_unicode}\n\n" class StringOutput2ModelOutputOperator(MapOperator[str, ModelOutput]): diff --git a/dbgpt/core/schema/api.py b/dbgpt/core/schema/api.py index 647987b0f..1904d841b 100644 --- a/dbgpt/core/schema/api.py +++ b/dbgpt/core/schema/api.py @@ -114,3 +114,11 @@ class ChatCompletionResponse(BaseModel): ..., description="Chat completion response choices" ) usage: UsageInfo = Field(..., description="Usage info") + + +class ErrorResponse(BaseModel): + """Error response entity.""" + + object: str = Field("error", description="Object type") + message: str = Field(..., description="Error message") + code: int = Field(..., description="Error code") diff --git a/dbgpt/model/utils/chatgpt_utils.py b/dbgpt/model/utils/chatgpt_utils.py index 683cbbfd8..057a04bf5 100644 --- a/dbgpt/model/utils/chatgpt_utils.py +++ b/dbgpt/model/utils/chatgpt_utils.py @@ -154,6 +154,9 @@ def _build_openai_client(init_params: OpenAIParameters) -> Tuple[str, ClientType class OpenAIStreamingOutputOperator(TransformStreamAbsOperator[ModelOutput, str]): """Transform ModelOutput to openai stream format.""" + incremental_output = True + output_format = "SSE" + metadata = ViewMetadata( label=_("OpenAI Streaming Output Operator"), name="openai_streaming_output_operator", @@ -177,7 +180,7 @@ class OpenAIStreamingOutputOperator(TransformStreamAbsOperator[ModelOutput, str] str, is_list=True, description=_( - "The model output after transform to openai stream format" + "The model output after transformed to openai stream format." ), ) ], diff --git a/dbgpt/serve/flow/api/endpoints.py b/dbgpt/serve/flow/api/endpoints.py index c3939ceb8..746254f59 100644 --- a/dbgpt/serve/flow/api/endpoints.py +++ b/dbgpt/serve/flow/api/endpoints.py @@ -180,6 +180,8 @@ async def query_page( sys_code: Optional[str] = Query(default=None, description="system code"), page: int = Query(default=1, description="current page"), page_size: int = Query(default=20, description="page size"), + name: Optional[str] = Query(default=None, description="flow name"), + uid: Optional[str] = Query(default=None, description="flow uid"), service: Service = Depends(get_service), ) -> Result[PaginationResult[ServerResponse]]: """Query Flow entities @@ -189,13 +191,17 @@ async def query_page( sys_code (Optional[str]): The system code page (int): The page number page_size (int): The page size + name (Optional[str]): The flow name + uid (Optional[str]): The flow uid service (Service): The service Returns: ServerResponse: The response """ return Result.succ( service.get_list_by_page( - {"user_name": user_name, "sys_code": sys_code}, page, page_size + {"user_name": user_name, "sys_code": sys_code, "name": name, "uid": uid}, + page, + page_size, ) ) diff --git a/dbgpt/serve/flow/service/service.py b/dbgpt/serve/flow/service/service.py index 372525f4e..07d7f5191 100644 --- a/dbgpt/serve/flow/service/service.py +++ b/dbgpt/serve/flow/service/service.py @@ -1,11 +1,13 @@ import json import logging +import time import traceback -from typing import Any, List, Optional, cast +from typing import Any, AsyncIterator, List, Optional, cast import schedule from fastapi import HTTPException +from dbgpt._private.pydantic import model_to_json from dbgpt.component import SystemApp from dbgpt.core.awel import ( DAG, @@ -22,6 +24,13 @@ from dbgpt.core.awel.flow.flow_factory import ( ) from dbgpt.core.awel.trigger.http_trigger import CommonLLMHttpTrigger from dbgpt.core.interface.llm import ModelOutput +from dbgpt.core.schema.api import ( + ChatCompletionResponse, + ChatCompletionResponseChoice, + ChatCompletionResponseStreamChoice, + ChatCompletionStreamResponse, + DeltaMessage, +) from dbgpt.serve.core import BaseService from dbgpt.storage.metadata import BaseDao from dbgpt.storage.metadata._base_dao import QUERY_SPEC @@ -365,39 +374,117 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]): """ return self.dao.get_list_page(request, page, page_size) - async def chat_flow( - self, - flow_uid: str, - request: CommonLLMHttpRequestBody, - incremental: bool = False, - ): + async def chat_stream_flow_str( + self, flow_uid: str, request: CommonLLMHttpRequestBody + ) -> AsyncIterator[str]: + """Stream chat with the AWEL flow. + + Args: + flow_uid (str): The flow uid + request (CommonLLMHttpRequestBody): The request + """ + # Must be non-incremental + request.incremental = False + async for output in self.safe_chat_stream_flow(flow_uid, request): + text = output.text + # if text: + # text = text.replace("\n", "\\n") + if output.error_code != 0: + yield f"data:[SERVER_ERROR]{text}\n\n" + break + else: + yield f"data:{text}\n\n" + + async def chat_stream_openai( + self, flow_uid: str, request: CommonLLMHttpRequestBody + ) -> AsyncIterator[str]: + conv_uid = request.conv_uid + choice_data = ChatCompletionResponseStreamChoice( + index=0, + delta=DeltaMessage(role="assistant"), + finish_reason=None, + ) + chunk = ChatCompletionStreamResponse( + id=conv_uid, choices=[choice_data], model=request.model + ) + yield f"data: {chunk.json(exclude_unset=True, ensure_ascii=False)}\n\n" + + request.incremental = True + async for output in self.safe_chat_stream_flow(flow_uid, request): + if not output.success: + yield f"data: {json.dumps(output.to_dict(), ensure_ascii=False)}\n\n" + yield "data: [DONE]\n\n" + return + choice_data = ChatCompletionResponseStreamChoice( + index=0, + delta=DeltaMessage(role="assistant", content=output.text), + ) + chunk = ChatCompletionStreamResponse( + id=conv_uid, + choices=[choice_data], + model=request.model, + ) + json_data = model_to_json(chunk, exclude_unset=True, ensure_ascii=False) + yield f"data: {json_data}\n\n" + yield "data: [DONE]\n\n" + + async def safe_chat_flow( + self, flow_uid: str, request: CommonLLMHttpRequestBody + ) -> ModelOutput: """Chat with the AWEL flow. Args: flow_uid (str): The flow uid request (CommonLLMHttpRequestBody): The request - incremental (bool): Whether to return the result incrementally + + Returns: + ModelOutput: The output """ + incremental = request.incremental try: - async for output in self._call_chat_flow(flow_uid, request, incremental): + task = await self._get_callable_task(flow_uid) + return await _safe_chat_with_dag_task(task, request) + except HTTPException as e: + return ModelOutput(error_code=1, text=e.detail, incremental=incremental) + except Exception as e: + return ModelOutput(error_code=1, text=str(e), incremental=incremental) + + async def safe_chat_stream_flow( + self, flow_uid: str, request: CommonLLMHttpRequestBody + ) -> AsyncIterator[ModelOutput]: + """Stream chat with the AWEL flow. + + Args: + flow_uid (str): The flow uid + request (CommonLLMHttpRequestBody): The request + + Returns: + AsyncIterator[ModelOutput]: The output + """ + incremental = request.incremental + try: + task = await self._get_callable_task(flow_uid) + async for output in _safe_chat_stream_with_dag_task( + task, request, incremental + ): yield output except HTTPException as e: - yield f"data:[SERVER_ERROR]{e.detail}\n\n" + yield ModelOutput(error_code=1, text=e.detail, incremental=incremental) except Exception as e: - yield f"data:[SERVER_ERROR]{str(e)}\n\n" + yield ModelOutput(error_code=1, text=str(e), incremental=incremental) - async def _call_chat_flow( + async def _get_callable_task( self, flow_uid: str, - request: CommonLLMHttpRequestBody, - incremental: bool = False, - ): - """Chat with the AWEL flow. + ) -> BaseOperator: + """Return the callable task. - Args: - flow_uid (str): The flow uid - request (CommonLLMHttpRequestBody): The request - incremental (bool): Whether to return the result incrementally + Returns: + BaseOperator: The callable task + + Raises: + HTTPException: If the flow is not found + ValueError: If the flow is not a chat flow or the leaf node is not found. """ flow = self.get({"uid": flow_uid}) if not flow: @@ -416,10 +503,7 @@ class Service(BaseService[ServeEntity, ServeRequest, ServerResponse]): leaf_nodes = dag.leaf_nodes if len(leaf_nodes) != 1: raise ValueError("Chat Flow just support one leaf node in dag") - end_node = cast(BaseOperator, leaf_nodes[0]) - async for output in _chat_with_dag_task(end_node, request, incremental): - yield output - await dag._after_dag_end(end_node.current_event_loop_task_id) + return cast(BaseOperator, leaf_nodes[0]) def _parse_flow_category(self, dag: DAG) -> FlowCategory: """Parse the flow category @@ -470,99 +554,202 @@ def _is_chat_flow_type(output_obj: Any, is_class: bool = False) -> bool: return isinstance(output_obj, chat_types) -async def _chat_with_dag_task( +async def _safe_chat_with_dag_task(task: BaseOperator, request: Any) -> ModelOutput: + """Chat with the DAG task.""" + try: + finish_reason = None + usage = None + metrics = None + error_code = 0 + text = "" + async for output in _safe_chat_stream_with_dag_task(task, request, False): + finish_reason = output.finish_reason + usage = output.usage + metrics = output.metrics + error_code = output.error_code + text = output.text + return ModelOutput( + error_code=error_code, + text=text, + metrics=metrics, + usage=usage, + finish_reason=finish_reason, + ) + except Exception as e: + return ModelOutput(error_code=1, text=str(e), incremental=False) + + +async def _safe_chat_stream_with_dag_task( task: BaseOperator, - request: CommonLLMHttpRequestBody, - incremental: bool = False, -): - """Chat with the DAG task. + request: Any, + incremental: bool, +) -> AsyncIterator[ModelOutput]: + """Chat with the DAG task.""" + try: + async for output in _chat_stream_with_dag_task(task, request, incremental): + yield output + except Exception as e: + yield ModelOutput(error_code=1, text=str(e), incremental=incremental) + finally: + if task.streaming_operator: + if task.dag: + await task.dag._after_dag_end(task.current_event_loop_task_id) - Args: - task (BaseOperator): The task - request (CommonLLMHttpRequestBody): The request - """ - if request.stream and task.streaming_operator: + +async def _chat_stream_with_dag_task( + task: BaseOperator, + request: Any, + incremental: bool, +) -> AsyncIterator[ModelOutput]: + """Chat with the DAG task.""" + is_sse = task.output_format and task.output_format.upper() == "SSE" + if not task.streaming_operator: try: - from dbgpt.model.utils.chatgpt_utils import OpenAIStreamingOutputOperator - except ImportError: - OpenAIStreamingOutputOperator = None - if incremental: - async for output in await task.call_stream(request): - yield output - else: - if OpenAIStreamingOutputOperator and isinstance( - task, OpenAIStreamingOutputOperator - ): - from dbgpt.core.schema.api import ChatCompletionResponseStreamChoice - - previous_text = "" - async for output in await task.call_stream(request): - if not isinstance(output, str): - yield "data:[SERVER_ERROR]The output is not a stream format\n\n" - return - if output == "data: [DONE]\n\n": - return - json_data = "".join(output.split("data: ")[1:]) - dict_data = json.loads(json_data) - if "choices" not in dict_data: - error_msg = dict_data.get("text", "Unknown error") - yield f"data:[SERVER_ERROR]{error_msg}\n\n" - return - choices = dict_data["choices"] - if choices: - choice = choices[0] - delta_data = ChatCompletionResponseStreamChoice(**choice) - if delta_data.delta.content: - previous_text += delta_data.delta.content - if previous_text: - full_text = previous_text.replace("\n", "\\n") - yield f"data:{full_text}\n\n" - else: - async for output in await task.call_stream(request): - str_msg = "" - should_return = False - if isinstance(output, str): - if output.strip(): - str_msg = output - elif isinstance(output, ModelOutput): - if output.error_code != 0: - str_msg = f"[SERVER_ERROR]{output.text}" - should_return = True - else: - str_msg = output.text - else: - str_msg = ( - f"[SERVER_ERROR]The output is not a valid format" - f"({type(output)})" - ) - should_return = True - if str_msg: - str_msg = str_msg.replace("\n", "\\n") - yield f"data:{str_msg}\n\n" - if should_return: - return + result = await task.call(request) + model_output = _parse_single_output(result, is_sse) + model_output.incremental = incremental + yield model_output + except Exception as e: + yield ModelOutput(error_code=1, text=str(e), incremental=incremental) else: - result = await task.call(request) - str_msg = "" - if result is None: - str_msg = "[SERVER_ERROR]The result is None!" - elif isinstance(result, str): - str_msg = result - elif isinstance(result, ModelOutput): - if result.error_code != 0: - str_msg = f"[SERVER_ERROR]{result.text}" - else: - str_msg = result.text - elif isinstance(result, CommonLLMHttpResponseBody): - if result.error_code != 0: - str_msg = f"[SERVER_ERROR]{result.text}" - else: - str_msg = result.text - elif isinstance(result, dict): - str_msg = json.dumps(result, ensure_ascii=False) - else: - str_msg = f"[SERVER_ERROR]The result is not a valid format({type(result)})" + from dbgpt.model.utils.chatgpt_utils import OpenAIStreamingOutputOperator - if str_msg: - str_msg = str_msg.replace("\n", "\\n") - yield f"data:{str_msg}\n\n" + if OpenAIStreamingOutputOperator and isinstance( + task, OpenAIStreamingOutputOperator + ): + full_text = "" + async for output in await task.call_stream(request): + model_output = _parse_openai_output(output) + # The output of the OpenAI streaming API is incremental + full_text += model_output.text + model_output.incremental = incremental + model_output.text = model_output.text if incremental else full_text + yield model_output + if not model_output.success: + break + else: + full_text = "" + previous_text = "" + async for output in await task.call_stream(request): + model_output = _parse_single_output(output, is_sse) + model_output.incremental = incremental + if task.incremental_output: + # Output is incremental, append the text + full_text += model_output.text + else: + # Output is not incremental, last output is the full text + full_text = model_output.text + if not incremental: + # Return the full text + model_output.text = full_text + else: + # Return the incremental text + delta_text = full_text[len(previous_text) :] + previous_text = ( + full_text + if len(full_text) > len(previous_text) + else previous_text + ) + model_output.text = delta_text + yield model_output + if not model_output.success: + break + + +def _parse_single_output(output: Any, is_sse: bool) -> ModelOutput: + """Parse the single output.""" + finish_reason = None + usage = None + metrics = None + if output is None: + error_code = 1 + text = "The output is None!" + elif isinstance(output, str): + if is_sse: + sse_output = _parse_sse_data(output) + if sse_output is None: + error_code = 1 + text = "The output is not a SSE format" + else: + error_code = 0 + text = sse_output + else: + error_code = 0 + text = output + elif isinstance(output, ModelOutput): + error_code = output.error_code + text = output.text + finish_reason = output.finish_reason + usage = output.usage + metrics = output.metrics + elif isinstance(output, CommonLLMHttpResponseBody): + error_code = output.error_code + text = output.text + elif isinstance(output, dict): + error_code = 0 + text = json.dumps(output, ensure_ascii=False) + else: + error_code = 1 + text = f"The output is not a valid format({type(output)})" + return ModelOutput( + error_code=error_code, + text=text, + finish_reason=finish_reason, + usage=usage, + metrics=metrics, + ) + + +def _parse_openai_output(output: Any) -> ModelOutput: + """Parse the OpenAI output.""" + text = "" + if not isinstance(output, str): + return ModelOutput( + error_code=1, + text="The output is not a stream format", + ) + if output.strip() == "data: [DONE]" or output.strip() == "data:[DONE]": + return ModelOutput(error_code=0, text="") + if not output.startswith("data:"): + return ModelOutput( + error_code=1, + text="The output is not a stream format", + ) + + sse_output = _parse_sse_data(output) + if sse_output is None: + return ModelOutput(error_code=1, text="The output is not a SSE format") + json_data = sse_output.strip() + try: + dict_data = json.loads(json_data) + except Exception as e: + return ModelOutput( + error_code=1, + text=f"Invalid JSON data: {json_data}, {e}", + ) + if "choices" not in dict_data: + return ModelOutput( + error_code=1, + text=dict_data.get("text", "Unknown error"), + ) + choices = dict_data["choices"] + finish_reason: Optional[str] = None + if choices: + choice = choices[0] + delta_data = ChatCompletionResponseStreamChoice(**choice) + if delta_data.delta.content: + text = delta_data.delta.content + finish_reason = delta_data.finish_reason + return ModelOutput(error_code=0, text=text, finish_reason=finish_reason) + + +def _parse_sse_data(output: str) -> Optional[str]: + if output.startswith("data:"): + if output.startswith("data: "): + output = output[6:] + else: + output = output[5:] + + return output + else: + return None diff --git a/dbgpt/util/console/__init__.py b/dbgpt/util/console/__init__.py new file mode 100644 index 000000000..10ea12650 --- /dev/null +++ b/dbgpt/util/console/__init__.py @@ -0,0 +1,3 @@ +from .console import CliLogger # noqa: F401 + +__ALL__ = ["CliLogger"] diff --git a/dbgpt/util/console/console.py b/dbgpt/util/console/console.py new file mode 100644 index 000000000..709a2e91a --- /dev/null +++ b/dbgpt/util/console/console.py @@ -0,0 +1,71 @@ +"""Console utility functions for CLI.""" +import dataclasses +import sys +from functools import lru_cache +from typing import Any + +from rich.console import Console +from rich.markdown import Markdown +from rich.prompt import Prompt +from rich.theme import Theme + + +@dataclasses.dataclass +class Output: + """Output file.""" + + title: str + file: str + + +def _get_theme(): + return Theme( + { + "success": "green", + "info": "bright_blue", + "warning": "bright_yellow", + "error": "red", + } + ) + + +@lru_cache(maxsize=None) +def get_console(output: Output | None = None) -> Console: + return Console( + force_terminal=True, + color_system="standard", + theme=_get_theme(), + file=output.file if output else None, + ) + + +class CliLogger: + def __init__(self, output: Output | None = None): + self.console = get_console(output) + + def success(self, msg: str, **kwargs): + self.console.print(f"[success]{msg}[/]", **kwargs) + + def info(self, msg: str, **kwargs): + self.console.print(f"[info]{msg}[/]", **kwargs) + + def warning(self, msg: str, **kwargs): + self.console.print(f"[warning]{msg}[/]", **kwargs) + + def error(self, msg: str, exit_code: int = 0, **kwargs): + self.console.print(f"[error]{msg}[/]", **kwargs) + if exit_code != 0: + sys.exit(exit_code) + + def debug(self, msg: str, **kwargs): + self.console.print(f"[cyan]{msg}[/]", **kwargs) + + def print(self, *objects: Any, sep: str = " ", end: str = "\n", **kwargs): + self.console.print(*objects, sep=sep, end=end, **kwargs) + + def markdown(self, msg: str, **kwargs): + md = Markdown(msg) + self.console.print(md, **kwargs) + + def ask(self, msg: str, **kwargs): + return Prompt.ask(msg, **kwargs) diff --git a/dbgpt/util/dbgpts/cli.py b/dbgpt/util/dbgpts/cli.py index 9192689e8..dd7b35748 100644 --- a/dbgpt/util/dbgpts/cli.py +++ b/dbgpt/util/dbgpts/cli.py @@ -1,12 +1,14 @@ import functools import subprocess -import sys from pathlib import Path import click +from ..console import CliLogger from .base import DEFAULT_PACKAGE_TYPES +cl = CliLogger() + def check_poetry_installed(): try: @@ -18,13 +20,13 @@ def check_poetry_installed(): stderr=subprocess.DEVNULL, ) except (subprocess.CalledProcessError, FileNotFoundError): - print("Poetry is not installed. Please install Poetry to proceed.") - print( - "Visit https://python-poetry.org/docs/#installation for installation " - "instructions." - ) + cl.error("Poetry is not installed. Please install Poetry to proceed.") # Exit with error - sys.exit(1) + cl.error( + "Visit https://python-poetry.org/docs/#installation for installation " + "instructions.", + exit_code=1, + ) def add_tap_options(func): @@ -43,15 +45,41 @@ def add_tap_options(func): return wrapper +def add_add_common_options(func): + @click.option( + "-r", + "--repo", + type=str, + default=None, + required=False, + help="The repository to install the dbgpts from", + ) + @click.option( + "-U", + "--update", + type=bool, + required=False, + default=False, + is_flag=True, + help="Whether to update the repo", + ) + @functools.wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + @click.command(name="install") -@add_tap_options +@add_add_common_options @click.argument("name", type=str) -def install(repo: str | None, name: str): +def install(repo: str | None, update: bool, name: str): """Install your dbgpts(operators,agents,workflows or apps)""" - from .repo import install + from .repo import _install_default_repos_if_no_repos, install check_poetry_installed() - install(name, repo) + _install_default_repos_if_no_repos() + install(name, repo, with_update=update) @click.command(name="uninstall") @@ -63,20 +91,33 @@ def uninstall(name: str): uninstall(name) -@click.command(name="list") -def list_all_apps(): - """List all installed dbgpts""" - from .repo import list_repo_apps +@click.command(name="list-remote") +@add_add_common_options +def list_all_apps( + repo: str | None, + update: bool, +): + """List all available dbgpts""" + from .repo import _install_default_repos_if_no_repos, list_repo_apps - list_repo_apps() + _install_default_repos_if_no_repos() + list_repo_apps(repo, with_update=update) + + +@click.command(name="list") +def list_installed_apps(): + """List all installed dbgpts""" + from .repo import list_installed_apps + + list_installed_apps() @click.command(name="list") def list_repos(): """List all repos""" - from .repo import list_repos + from .repo import _print_repos - print("\n".join(list_repos())) + _print_repos() @click.command(name="add") diff --git a/dbgpt/util/dbgpts/loader.py b/dbgpt/util/dbgpts/loader.py index 5628c95ab..b6d53fa6f 100644 --- a/dbgpt/util/dbgpts/loader.py +++ b/dbgpt/util/dbgpts/loader.py @@ -41,6 +41,7 @@ class BasePackage(BaseModel): ) root: str = Field(..., description="The root of the package") repo: str = Field(..., description="The repository of the package") + package: str = Field(..., description="The package name(like name in pypi)") @classmethod def build_from(cls, values: Dict[str, Any], ext_dict: Dict[str, Any]): @@ -131,6 +132,7 @@ class InstalledPackage(BaseModel): name: str = Field(..., description="The name of the package") repo: str = Field(..., description="The repository of the package") root: str = Field(..., description="The root of the package") + package: str = Field(..., description="The package name(like name in pypi)") def _get_classes_from_module(module): @@ -160,6 +162,7 @@ def _parse_package_metadata(package: InstalledPackage) -> BasePackage: ext_metadata[key] = value pkg_dict["root"] = package.root pkg_dict["repo"] = package.repo + pkg_dict["package"] = package.package if pkg_dict["package_type"] == "flow": return FlowPackage.build_from(pkg_dict, ext_metadata) elif pkg_dict["package_type"] == "operator": @@ -186,7 +189,9 @@ def _load_installed_package(path: str) -> List[InstalledPackage]: name = metadata["name"] repo = metadata["repo"] packages.append( - InstalledPackage(name=name, repo=repo, root=str(full_path)) + InstalledPackage( + name=name, repo=repo, root=str(full_path), package=package + ) ) return packages diff --git a/dbgpt/util/dbgpts/repo.py b/dbgpt/util/dbgpts/repo.py index c01a198ab..13b63570b 100644 --- a/dbgpt/util/dbgpts/repo.py +++ b/dbgpt/util/dbgpts/repo.py @@ -1,13 +1,14 @@ import functools -import logging import os import shutil import subprocess from pathlib import Path from typing import List, Tuple -import click +from rich.table import Table +from ..console import CliLogger +from ..i18n_utils import _ from .base import ( DBGPTS_METADATA_FILE, DBGPTS_REPO_HOME, @@ -17,9 +18,9 @@ from .base import ( INSTALL_METADATA_FILE, _print_path, ) +from .loader import _load_package_from_path -logger = logging.getLogger("dbgpt_cli") - +cl = CliLogger() _DEFAULT_REPO = "eosphoros/dbgpts" @@ -45,9 +46,10 @@ def list_repos() -> List[str]: def _get_repo_path(repo: str) -> Path: repo_arr = repo.split("/") if len(repo_arr) != 2: - raise ValueError( + cl.error( f"Invalid repo name '{repo}', repo name must split by '/', " - f"eg.(eosphoros/dbgpts)." + f"eg.(eosphoros/dbgpts).", + exit_code=1, ) return Path(DBGPTS_REPO_HOME) / repo_arr[0] / repo_arr[1] @@ -63,6 +65,35 @@ def _list_repos_details() -> List[Tuple[str, str]]: return results +def _print_repos(): + """Print all repos""" + repos = _list_repos_details() + repos.sort(key=lambda x: (x[0], x[1])) + table = Table(title=_("Repos")) + table.add_column(_("Repository"), justify="right", style="cyan", no_wrap=True) + table.add_column(_("Path"), justify="right", style="green") + for repo, full_path in repos: + if full_path.startswith(str(Path.home())): + full_path = full_path.replace(str(Path.home()), "~") + table.add_row(repo, full_path) + cl.print(table) + + +def _install_default_repos_if_no_repos(): + """Install the default repos if no repos exist.""" + has_repos = False + for repo, full_path in _list_repos_details(): + if os.path.exists(full_path): + has_repos = True + break + if not has_repos: + repo_url = DEFAULT_REPO_MAP[_DEFAULT_REPO] + cl.info( + f"No repos found, installing default repos {_DEFAULT_REPO} from {repo_url}" + ) + add_repo(_DEFAULT_REPO, repo_url) + + def add_repo(repo: str, repo_url: str, branch: str | None = None): """Add a new repo @@ -73,13 +104,14 @@ def add_repo(repo: str, repo_url: str, branch: str | None = None): """ exist_repos = list_repos() if repo in exist_repos and repo_url not in DEFAULT_REPO_MAP.values(): - raise ValueError(f"The repo '{repo}' already exists.") + cl.error(f"The repo '{repo}' already exists.", exit_code=1) repo_arr = repo.split("/") if len(repo_arr) != 2: - raise ValueError( + cl.error( f"Invalid repo name '{repo}', repo name must split by '/', " - f"eg.(eosphoros/dbgpts)." + "eg.(eosphoros/dbgpts).", + exit_code=1, ) repo_name = repo_arr[1] repo_group_dir = os.path.join(DBGPTS_REPO_HOME, repo_arr[0]) @@ -99,12 +131,12 @@ def remove_repo(repo: str): """ repo_path = _get_repo_path(repo) if not os.path.exists(repo_path): - raise ValueError(f"The repo '{repo}' does not exist.") + cl.error(f"The repo '{repo}' does not exist.", exit_code=1) if os.path.islink(repo_path): os.unlink(repo_path) else: shutil.rmtree(repo_path) - logger.info(f"Repo '{repo}' removed successfully.") + cl.info(f"Repo '{repo}' removed successfully.") def clone_repo( @@ -132,11 +164,11 @@ def clone_repo( subprocess.run(clone_command, check=True) if branch: - click.echo( + cl.info( f"Repo '{repo}' cloned from {repo_url} with branch '{branch}' successfully." ) else: - click.echo(f"Repo '{repo}' cloned from {repo_url} successfully.") + cl.info(f"Repo '{repo}' cloned from {repo_url} successfully.") def update_repo(repo: str): @@ -145,20 +177,20 @@ def update_repo(repo: str): Args: repo (str): The name of the repo """ - print(f"Updating repo '{repo}'...") + cl.info(f"Updating repo '{repo}'...") repo_path = os.path.join(DBGPTS_REPO_HOME, repo) if not os.path.exists(repo_path): if repo in DEFAULT_REPO_MAP: add_repo(repo, DEFAULT_REPO_MAP[repo]) if not os.path.exists(repo_path): - raise ValueError(f"The repo '{repo}' does not exist.") + cl.error(f"The repo '{repo}' does not exist.", exit_code=1) else: - raise ValueError(f"The repo '{repo}' does not exist.") + cl.error(f"The repo '{repo}' does not exist.", exit_code=1) os.chdir(repo_path) if not os.path.exists(".git"): - logger.info(f"Repo '{repo}' is not a git repository.") + cl.info(f"Repo '{repo}' is not a git repository.") return - logger.info(f"Updating repo '{repo}'...") + cl.info(f"Updating repo '{repo}'...") subprocess.run(["git", "pull"], check=False) @@ -176,8 +208,7 @@ def install( """ repo_info = check_with_retry(name, repo, with_update=with_update, is_first=True) if not repo_info: - click.echo(f"The specified dbgpt '{name}' does not exist.", err=True) - return + cl.error(f"The specified dbgpt '{name}' does not exist.", exit_code=1) repo, dbgpt_path = repo_info _copy_and_install(repo, name, dbgpt_path) @@ -190,38 +221,34 @@ def uninstall(name: str): """ install_path = INSTALL_DIR / name if not install_path.exists(): - click.echo( - f"The dbgpt '{name}' has not been installed yet.", - err=True, - ) - return + cl.error(f"The dbgpt '{name}' has not been installed yet.", exit_code=1) os.chdir(install_path) subprocess.run(["pip", "uninstall", name, "-y"], check=True) shutil.rmtree(install_path) - logger.info(f"dbgpt '{name}' uninstalled successfully.") + cl.info(f"Uninstalling dbgpt '{name}'...") def _copy_and_install(repo: str, name: str, package_path: Path): if not package_path.exists(): - raise ValueError( - f"The specified dbgpt '{name}' does not exist in the {repo} tap." + cl.error( + f"The specified dbgpt '{name}' does not exist in the {repo} tap.", + exit_code=1, ) install_path = INSTALL_DIR / name if install_path.exists(): - click.echo( + cl.error( f"The dbgpt '{name}' has already been installed" f"({_print_path(install_path)}).", - err=True, + exit_code=1, ) - return try: shutil.copytree(package_path, install_path) - logger.info(f"Installing dbgpts '{name}' from {repo}...") + cl.info(f"Installing dbgpts '{name}' from {repo}...") os.chdir(install_path) subprocess.run(["poetry", "install"], check=True) _write_install_metadata(name, repo, install_path) - click.echo(f"Installed dbgpts at {_print_path(install_path)}.") - click.echo(f"dbgpts '{name}' installed successfully.") + cl.success(f"Installed dbgpts at {_print_path(install_path)}.") + cl.success(f"dbgpts '{name}' installed successfully.") except Exception as e: if install_path.exists(): shutil.rmtree(install_path) @@ -257,10 +284,9 @@ def check_with_retry( """ repos = _list_repos_details() if spec_repo: - repos = list(filter(lambda x: x[0] == repo, repos)) + repos = list(filter(lambda x: x[0] == spec_repo, repos)) if not repos: - logger.error(f"The specified repo '{spec_repo}' does not exist.") - return + cl.error(f"The specified repo '{spec_repo}' does not exist.", exit_code=1) if is_first and with_update: for repo in repos: update_repo(repo[0]) @@ -288,11 +314,17 @@ def list_repo_apps(repo: str | None = None, with_update: bool = True): if repo: repos = list(filter(lambda x: x[0] == repo, repos)) if not repos: - logger.error(f"The specified repo '{repo}' does not exist.") - return + cl.error(f"The specified repo '{repo}' does not exist.", exit_code=1) if with_update: for repo in repos: update_repo(repo[0]) + table = Table(title=_("dbgpts In All Repos")) + + table.add_column(_("Repository"), justify="right", style="cyan", no_wrap=True) + table.add_column(_("Type"), style="magenta") + table.add_column(_("Name"), justify="right", style="green") + + data = [] for repo in repos: repo_path = Path(repo[1]) for package in DEFAULT_PACKAGES: @@ -304,4 +336,28 @@ def list_repo_apps(repo: str | None = None, with_update: bool = True): and dbgpt_path.is_dir() and dbgpt_metadata_path.exists() ): - click.echo(f"{app}({repo[0]}/{package}/{app})") + data.append((repo[0], package, app)) + # Sort by repo name, package name, and app name + data.sort(key=lambda x: (x[0], x[1], x[2])) + for repo, package, app in data: + table.add_row(repo, package, app) + cl.print(table) + + +def list_installed_apps(): + """List all installed dbgpts""" + packages = _load_package_from_path(INSTALL_DIR) + table = Table(title=_("Installed dbgpts")) + + table.add_column(_("Name"), justify="right", style="cyan", no_wrap=True) + table.add_column(_("Type"), style="blue") + table.add_column(_("Repository"), style="magenta") + table.add_column(_("Path"), justify="right", style="green") + + packages.sort(key=lambda x: (x.package, x.package_type, x.repo)) + for package in packages: + str_path = package.root + if str_path.startswith(str(Path.home())): + str_path = str_path.replace(str(Path.home()), "~") + table.add_row(package.package, package.package_type, package.repo, str_path) + cl.print(table) diff --git a/i18n/Makefile b/i18n/Makefile index 7c4fd835a..38c75406f 100644 --- a/i18n/Makefile +++ b/i18n/Makefile @@ -10,14 +10,18 @@ SUBPACKAGES = agent app cli client configs core datasource model rag serve stora # Default: English (en) LANGUAGES = zh_CN ja ko fr ru +# Command line arguments for language and module, use LANGUAGES and SUBPACKAGES as default if not specified +LANGUAGE ?= $(LANGUAGES) +MODULE ?= $(SUBPACKAGES) + .PHONY: help pot po mo clean all: pot po mo -# Generate POT files for each existing subpackage +# Generate POT files for the specified or existing subpackage pot: @echo "Generating POT files for subpackages..." - @$(foreach pkg,$(SUBPACKAGES),\ + @$(foreach pkg,$(MODULE),\ if [ -d $(SRC_DIR)/$(pkg) ]; then \ echo "Processing $(pkg) package..."; \ mkdir -p $(LOCALEDIR)/pot; \ @@ -27,10 +31,10 @@ pot: fi; \ ) -# Update or create new .po files for each language +# Update or create new .po files for the specified language and module po: pot - @for lang in $(LANGUAGES); do \ - for pkg in $(SUBPACKAGES); do \ + @for lang in $(LANGUAGE); do \ + for pkg in $(MODULE); do \ if [ -f $(LOCALEDIR)/pot/$(DOMAIN)_$$pkg.pot ]; then \ mkdir -p $(LOCALEDIR)/$$lang/LC_MESSAGES; \ if [ -f $(LOCALEDIR)/$$lang/LC_MESSAGES/$(DOMAIN)_$$pkg.po ]; then \ @@ -46,11 +50,10 @@ po: pot done \ done - -# Compile .po files to .mo files +# Compile .po files to .mo files for the specified language and module mo: - @for lang in $(LANGUAGES); do \ - for pkg in $(SUBPACKAGES); do \ + @for lang in $(LANGUAGE); do \ + for pkg in $(MODULE); do \ if [ -f $(LOCALEDIR)/$$lang/LC_MESSAGES/$(DOMAIN)_$$pkg.po ]; then \ echo "Compiling $$lang MO file for $$pkg..."; \ msgfmt -o $(LOCALEDIR)/$$lang/LC_MESSAGES/$(DOMAIN)_$$pkg.mo $(LOCALEDIR)/$$lang/LC_MESSAGES/$(DOMAIN)_$$pkg.po; \ @@ -80,3 +83,4 @@ help: @echo "3. Translate the strings in .po files using a PO file editor." @echo "4. Run 'make mo' to compile the translated .po files into .mo files." @echo "5. Run 'make clean' to clean up if necessary." + @echo "Use 'LANGUAGE= MODULE=' to specify a language and a module." diff --git a/i18n/locales/fr/LC_MESSAGES/dbgpt_client.mo b/i18n/locales/fr/LC_MESSAGES/dbgpt_client.mo new file mode 100644 index 0000000000000000000000000000000000000000..491103e71e54041ea7b141c005cdfd9b8fc383a6 GIT binary patch literal 363 zcmYL@K~BRk5Jih%RTX~*a2`roQ1%B!-!|zo<39*Q;xajlv8bn{l(V0kuU{A^i zo;ziHt%7wDMs2Wb6?CEMdIJ?2bQCYiXsT9K$a7R{3C0@SzDZa)Hk;1U-I~#bU}I|n z8fw33Xnw%2aM{#Kn*kK>%6SaK*L=d0ov&MCg^s7Du%$MYgyc;Jmes9OHJ@0w_B6Tq UyZT`~fd6uMV&E2{(*f)K0yp+% 1);\n" + +#: ../dbgpt/client/_cli.py:30 +#, fuzzy +msgid "The name of the AWEL flow" +msgstr "Le nom du flux" + +#: ../dbgpt/client/_cli.py:37 +#, fuzzy +msgid "The uid of the AWEL flow" +msgstr "L'UID du flux" + +#: ../dbgpt/client/_cli.py:55 +#, fuzzy +msgid "The messages to run AWEL flow" +msgstr "Les messages du flux" + +#: ../dbgpt/client/_cli.py:62 +#, fuzzy +msgid "The model name of AWEL flow" +msgstr "Le modèle du flux" + +#: ../dbgpt/client/_cli.py:71 +msgid "Whether use stream mode to run AWEL flow" +msgstr "" + +#: ../dbgpt/client/_cli.py:79 +#, fuzzy +msgid "The temperature to run AWEL flow" +msgstr "La température du flux" + +#: ../dbgpt/client/_cli.py:86 +#, fuzzy +msgid "The max new tokens to run AWEL flow" +msgstr "Le nombre maximal de nouveaux tokens du flux" + +#: ../dbgpt/client/_cli.py:93 +#, fuzzy +msgid "The conversation id of the AWEL flow" +msgstr "L'identifiant de conversation du flux" + +#: ../dbgpt/client/_cli.py:101 +msgid "The json data to run AWEL flow, if set, will overwrite other options" +msgstr "" + +#: ../dbgpt/client/_cli.py:109 +#, fuzzy +msgid "The extra json data to run AWEL flow." +msgstr "Les données JSON supplémentaires du flux" + +#: ../dbgpt/client/_cli.py:118 +#, fuzzy +msgid "Whether use interactive mode to run AWEL flow" +msgstr "La température du flux" + +#~ msgid "Whether to stream the flow, default is False" +#~ msgstr "Indique si le flux doit être diffusé en continu, par défaut False" + +#~ msgid "The json data of the flow" +#~ msgstr "Les données JSON du flux" diff --git a/i18n/locales/fr/LC_MESSAGES/dbgpt_model.mo b/i18n/locales/fr/LC_MESSAGES/dbgpt_model.mo index 7822cce8217a0b25b61ed56047382913b1201ac6..4d8d19101cbec46f45036b7c3be0ff8cd69ca5ac 100644 GIT binary patch delta 391 zcmXZYy-UMT6vp8W!EZ{B{@eC_?jgxqfJNPtQ zmqw+!B`si;`gTStki;vD8kv5HG*28-SG!DM|3Ciw$S<1>2rj+gj_<_C^) z5(B5dHGB^n}bMkQ1E+6rw{M$-MhPY<^Gl*MuTk;F-?(G+(idZaUR>ajMrGgCp^Ps za(40}eokZyE5uKxMapzo3m@SinEjK691~(nf1m_JEHIB#yC;=UBuMYU6j* z!krnBO*}#!w1)?HgG=~|+UNuI{unpVVv2pNqAu34YuRRZ1a$IeT)`o__=bJ_ME!y0 ztOyIIzcq!-lh(PhW31;S@r`d|(ThSa>@?cJh1ZL^jiBWH&_N(RIozv*J#jDydtBiJ5?hTsJ4c|^+`6r&$?f0O NPeuvlL(VssyOWSU@CEQ;un4{d z9tA1*BiIW520jJux(l{o9BctiP{x}Fp9a4Np9Cx5yWsED_k-IA*^Zbd_#DPRzMGKe z!RufW{26>4Y`TY#4A=oa3YNeWcmsSDeDPjF4uajFTt5TK@gKl9!Cyg{$L{+Gc?s+T zWj;qh13U&w`#(UWLtySVq(ws2XQ_*951>EjKM{iT$Ybc(j;|BsAV|q?Qdij?3gG|V zU-K;YAsa#x5-o6p?lJTtvp9`2+g3A-;GJ7WF2^hyO{Xo4l6E$r7IK_+b;W5ar!k$s z3@e?bIE8~jqhQhYHf3)jN%KaU<*3c9qNZnFAw4G3qn$LG=%j6`%{IQjMsPPYLUSOr#*>SceL{mRv&7M_QfdEi<)KVdBmqh)zZ}d9ILPOxsMHQc>VVg z_Ql@$=Ky`&Vrm}SqCEv}7LcCFER`D;>1BC?vn|x&S%w|@2AgZN=2yzQ>NJUURr&1I z$kM{7hOH;kBoN#PGExUPPK{ftZVwn%9+KjCV6Uqh4V+LpuJTRR8UwUo21dc`R#Ep? zt!Ugo>p%?hc9vnM2yReSR#mE6RdW0Jpt5Yrvnr>m#r$Kp3#MsU97*;MVkXc}F2~xu zsd5dcTTPc_IX5E>4UJ8^>%;goMw-LnFiBX(Vbq|~nN|)k%hn7%LKD$F`=Wbe^gyh) zud}B+)XQWLC2nW5w9;0{*h19Ptu2I@m&LXra+_X~(?@*d{(8gVnov&{X zBSxr8wYid%g68B=N2t6q5Kv30x!fSlFkRLFOT$pl6mWf%B#I=04eD%lEJ6>fp?GIJ zww7jNeK;g98cJD74w{J1*%97g=2T5@p{cBD;R3YoPj)EHYj*y*GE0f+DI<+SjL_zO zjfWDsf~Dq^4xFrw$fo~lw>GxW;ITE_5+cI;RCu0nuM2NUxL1TzS{a@#k587*k5-OP z5#fF*JbXulw;(ON8Q*SNc%#Cb7w%O!7#7~7a4|6_?VJ*dQMi|+se4|yFqP|XEsX?a zE9}b;lE&cVzE5!$>b*& zg*&nG`9isLMmVR0b3r)o3wOMH?yK?%WO=E4wu<$At5(aHfQNQVhAZaa_6Q zzYVO9ivNfIEhzN#rJnVL`6LjgJbbM@HdR^rXt{K%yl?`7V;UObFh;hLy4L1D$i8WO z#8?X+{PWz_u13f8w&%*1 zc}(PWBgu0Hi|1DKw3aY=-sH7rPTw%~P0r@Ip(it`?5alQU&q3lE(dPf7(?$QJ<(2i z(lRp7a3h>&&p;*b9aUy2L0GrKE(_~iSS4XsguN@D^6_}|d2>>|K48MeyeRCeYVo1E ycUJF(eMx5PLRj}?b6$PjlkyRD^Zy&fj@7t?2F{eftM_NupKK@L537yQWcdau*Qe?L diff --git a/i18n/locales/ja/LC_MESSAGES/dbgpt_model.po b/i18n/locales/ja/LC_MESSAGES/dbgpt_model.po index 7d9f0b8b4..5a7f0078e 100644 --- a/i18n/locales/ja/LC_MESSAGES/dbgpt_model.po +++ b/i18n/locales/ja/LC_MESSAGES/dbgpt_model.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-03-24 22:53+0800\n" +"POT-Creation-Date: 2024-03-26 05:55+0800\n" "PO-Revision-Date: 2024-03-23 16:45+0800\n" "Last-Translator: Automatically generated\n" "Language-Team: none\n" @@ -18,138 +18,78 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=1; plural=0;\n" -#: ../dbgpt/model/cluster/client.py:20 -#, fuzzy -msgid "Default LLM Client" -msgstr "LLM クライアント。" - #: ../dbgpt/model/cluster/client.py:23 msgid "Default LLM client(Connect to your DB-GPT model serving)" -msgstr "" +msgstr "デフォルトのLLMクライアント(DB-GPTモデルサービングに接続)" #: ../dbgpt/model/cluster/client.py:26 ../dbgpt/model/cluster/client.py:127 msgid "Auto Convert Message" -msgstr "" +msgstr "メッセージの自動変換" #: ../dbgpt/model/cluster/client.py:32 ../dbgpt/model/cluster/client.py:133 msgid "" "Whether to auto convert the messages that are not supported by the LLM to a " "compatible format" -msgstr "" - -#: ../dbgpt/model/cluster/client.py:113 -#, fuzzy -msgid "Remote LLM Client" -msgstr "LLM クライアント。" +msgstr "LLMでサポートされていないメッセージを互換性のある形式に自動変換するかどうか" #: ../dbgpt/model/cluster/client.py:116 msgid "Remote LLM client(Connect to the remote DB-GPT model serving)" -msgstr "" +msgstr "リモートLLMクライアント(リモートDB-GPTモデルサービングに接続)" #: ../dbgpt/model/cluster/client.py:119 msgid "Controller Address" -msgstr "" +msgstr "コントローラーアドレス" #: ../dbgpt/model/cluster/client.py:123 msgid "http://127.0.0.1:8000" -msgstr "" - -#: ../dbgpt/model/cluster/client.py:124 -msgid "Model controller address" -msgstr "" - -#: ../dbgpt/model/proxy/llms/chatgpt.py:48 -#, fuzzy -msgid "OpenAI LLM Client" -msgstr "LLM クライアント。" +msgstr "モデルコントローラーアドレス" #: ../dbgpt/model/proxy/llms/chatgpt.py:53 msgid "OpenAI API Key" -msgstr "" +msgstr "OpenAI APIキー" #: ../dbgpt/model/proxy/llms/chatgpt.py:59 msgid "" "OpenAI API Key, not required if you have set OPENAI_API_KEY environment " "variable." msgstr "" +"OpenAI APIキー、OPENAI_API_KEY環境変数を設定している場合は必要ありません。" #: ../dbgpt/model/proxy/llms/chatgpt.py:64 msgid "OpenAI API Base" -msgstr "" +msgstr "OpenAI APIベース" #: ../dbgpt/model/proxy/llms/chatgpt.py:70 msgid "" "OpenAI API Base, not required if you have set OPENAI_API_BASE environment " "variable." msgstr "" - -#: ../dbgpt/model/operators/llm_operator.py:66 -msgid "LLM Operator" -msgstr "LLM 演算子" - -#: ../dbgpt/model/operators/llm_operator.py:69 -msgid "The LLM operator." -msgstr "LLM 演算子。" - -#: ../dbgpt/model/operators/llm_operator.py:72 -#: ../dbgpt/model/operators/llm_operator.py:120 -msgid "LLM Client" -msgstr "LLM クライアント" - -#: ../dbgpt/model/operators/llm_operator.py:77 -#: ../dbgpt/model/operators/llm_operator.py:125 -msgid "The LLM Client." -msgstr "LLM クライアント。" - -#: ../dbgpt/model/operators/llm_operator.py:82 -#: ../dbgpt/model/operators/llm_operator.py:130 -msgid "Model Request" -msgstr "モデルリクエスト" - -#: ../dbgpt/model/operators/llm_operator.py:85 -#: ../dbgpt/model/operators/llm_operator.py:133 -msgid "The model request." -msgstr "モデルリクエスト。" - -#: ../dbgpt/model/operators/llm_operator.py:90 -#: ../dbgpt/model/operators/llm_operator.py:138 -#: ../dbgpt/model/utils/chatgpt_utils.py:175 -msgid "Model Output" -msgstr "モデル出力" - -#: ../dbgpt/model/operators/llm_operator.py:93 -#: ../dbgpt/model/operators/llm_operator.py:141 -msgid "The model output." -msgstr "モデル出力。" +"OpenAI APIベース、OPENAI_API_BASE環境変数を設定している場合は必要ありません。" #: ../dbgpt/model/operators/llm_operator.py:113 msgid "Streaming LLM Operator" -msgstr "ストリーミング LLM 演算子" +msgstr "ストリーミングLLM演算子" #: ../dbgpt/model/operators/llm_operator.py:117 msgid "The streaming LLM operator." -msgstr "ストリーミング LLM 演算子。" +msgstr "ストリーミングLLM演算子。" #: ../dbgpt/model/utils/chatgpt_utils.py:158 -#, fuzzy msgid "OpenAI Streaming Output Operator" -msgstr "ストリーミング LLM 演算子" +msgstr "OpenAIストリーミング出力演算子" #: ../dbgpt/model/utils/chatgpt_utils.py:162 -#, fuzzy msgid "The OpenAI streaming LLM operator." -msgstr "ストリーミング LLM 演算子。" +msgstr "OpenAIストリーミングLLM演算子。" #: ../dbgpt/model/utils/chatgpt_utils.py:166 -#, fuzzy msgid "Upstream Model Output" -msgstr "モデル出力" +msgstr "上流モデル出力" #: ../dbgpt/model/utils/chatgpt_utils.py:170 -#, fuzzy msgid "The model output of upstream." -msgstr "モデル出力。" +msgstr "上流のモデル出力。" #: ../dbgpt/model/utils/chatgpt_utils.py:180 -msgid "The model output after transform to openai stream format" -msgstr "" +msgid "The model output after transformed to openai stream format." +msgstr "OpenAI ストリーム形式に変換されたモデル出力。" \ No newline at end of file diff --git a/i18n/locales/ko/LC_MESSAGES/dbgpt_client.mo b/i18n/locales/ko/LC_MESSAGES/dbgpt_client.mo new file mode 100644 index 0000000000000000000000000000000000000000..49af1843d743999934dab904d00beb0f95494ab9 GIT binary patch literal 357 zcmYL@u};G<5Qd9j^2p5Kfvp_4DH6d^hf-2hL1`31cat2!klK;$Ao?J@9?!xBr1qCS z`E39H-TfRNeGf?lub3Fp=QW+ZXQ#~BcPs-eDp;fQ+8W8T>Fj=bm-2O* zKP^^|EXUTm083x%3U0dELt!On*$Q&})O`kSRY1uj5nX~9Lv+Q(SVp6vh=pJe$_FT% zGQLs4I?1P9up1S0shVcXYc%L6UXjt%ovINER2#{SHMo0|{KK;4VwvvO9Gwd`vnHUS z_S=@`2mA^bZKJeFc=@KB$1r&=<`D0F{Ti?6AT_0}w5cVJUvywu(>c|^+`5gY$?f0O NPeuvz7{~EvU+-(#{9SezYwKN+zbQp+vL>J}QzcS|O~we;Qx9Mp#;_4*aSoH%AX1QPI1SA5f}SV>jcJe z9)oy{y3nPWzr!x-XPm%a)G#AXWy3QVre4MX?%)C*mg|DZH=RM!)>Mmxm>BhnwBR&$ z;1Z7Gwi&-cP4;MfF}|a||AV^m#u|}Dj3P;87w2&w^}P~y^MCQyRtAP}l6uP2d#Fc$ zhPuElYSITB!%x(sjdK1vKaQF-f%;tvbpuBU1M@Vt;S4rm78h_E8%0WT$ICnmkJya8euCf_>O6xJ zn8O%epa!}z-yg7>`5S6Nwk^_$lc@WbP!r$84m`jpp5QX)OT{cS2Sf&_x)0<0uuv-! z!5p?@3DbCF)}K%-{4jnS?cm@45!3{Ra0RE3iscL!@eLGf`mZgXLj9gGuA^4Ghq~WGO{9$4fnb>Zdd=D{x-TSDZLwCbE&Csu zlnx54-m!RHR@|5J#;v|Yf;A1U2We1ps#cw>>|4iHo8uOo!q)0$cEj0mi>ukZvsNhP Yv#vL6_Xi5Q?$)kbnX|wA-bm>B7Yv?2F8}}l diff --git a/i18n/locales/ko/LC_MESSAGES/dbgpt_model.po b/i18n/locales/ko/LC_MESSAGES/dbgpt_model.po index 14c3d330e..ad0f61596 100644 --- a/i18n/locales/ko/LC_MESSAGES/dbgpt_model.po +++ b/i18n/locales/ko/LC_MESSAGES/dbgpt_model.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-03-24 22:53+0800\n" +"POT-Creation-Date: 2024-03-26 05:55+0800\n" "PO-Revision-Date: 2024-03-23 16:45+0800\n" "Last-Translator: Automatically generated\n" "Language-Team: none\n" @@ -34,8 +34,7 @@ msgstr "메시지 자동 변환" msgid "" "Whether to auto convert the messages that are not supported by the LLM to a " "compatible format" -msgstr "" -"LLM에서 지원하지 않는 메시지를 호환되는 형식으로 자동 변환할지 여부" +msgstr "LLM에서 지원하지 않는 메시지를 호환되는 형식으로 자동 변환할지 여부" #: ../dbgpt/model/cluster/client.py:113 msgid "Remote LLM Client" @@ -117,11 +116,6 @@ msgstr "모델 요청." msgid "Model Output" msgstr "모델 출력" -#: ../dbgpt/model/operators/llm_operator.py:93 -#: ../dbgpt/model/operators/llm_operator.py:141 -msgid "The model output." -msgstr "모델 출력." - #: ../dbgpt/model/operators/llm_operator.py:113 msgid "Streaming LLM Operator" msgstr "스트리밍 LLM 연산자" @@ -147,5 +141,5 @@ msgid "The model output of upstream." msgstr "상류의 모델 출력." #: ../dbgpt/model/utils/chatgpt_utils.py:180 -msgid "The model output after transform to openai stream format" -msgstr "OpenAI 스트림 형식으로 변환된 모델 출력" +msgid "The model output after transformed to openai stream format." +msgstr "OpenAI 스트림 형식으로 변환된 모델 출력" \ No newline at end of file diff --git a/i18n/locales/ru/LC_MESSAGES/dbgpt_client.mo b/i18n/locales/ru/LC_MESSAGES/dbgpt_client.mo new file mode 100644 index 0000000000000000000000000000000000000000..f389d47bd1418efc68c4c063f9bd7784c4898db5 GIT binary patch literal 438 zcmYLD(N4lJ6xHa{9)0%VnkbQIwHqYPLN$oP5JeW8yw_3Z%xIUkGm`Kd{+{3B9Yi?E zNzd&)=id8!u>Z4z*hL;9kCA)Gb7YMWxy0r*zO`qA*yvB0Ix?J*sWdj%ib34#KlUCX zm`0PA;rQ8`$Wj|eMmEc5ucqt!tsj|_dEIRF3UpXssrhVFjhMLnhZ#|_0?y*OG6@+>RGNb2H{)oytt^0D9AvY zxy8-pscpp<`_O1WE$I>WeYgb*Xvg}72mh@uZ`X2_pk1HBa=DJ6hc+?e KVJyq!Nb5O!Mt literal 0 HcmV?d00001 diff --git a/i18n/locales/ru/LC_MESSAGES/dbgpt_client.po b/i18n/locales/ru/LC_MESSAGES/dbgpt_client.po new file mode 100644 index 000000000..8ccaa75d2 --- /dev/null +++ b/i18n/locales/ru/LC_MESSAGES/dbgpt_client.po @@ -0,0 +1,79 @@ +# Russian translations for PACKAGE package +# Английские переводы для пакета PACKAGE. +# Copyright (C) 2024 THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# Automatically generated, 2024. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-03-27 09:43+0800\n" +"PO-Revision-Date: 2024-03-27 03:21+0800\n" +"Last-Translator: Automatically generated\n" +"Language-Team: none\n" +"Language: ru\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && " +"n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" + +#: ../dbgpt/client/_cli.py:30 +#, fuzzy +msgid "The name of the AWEL flow" +msgstr "Имя потока" + +#: ../dbgpt/client/_cli.py:37 +#, fuzzy +msgid "The uid of the AWEL flow" +msgstr "UID потока" + +#: ../dbgpt/client/_cli.py:55 +#, fuzzy +msgid "The messages to run AWEL flow" +msgstr "Сообщения потока" + +#: ../dbgpt/client/_cli.py:62 +#, fuzzy +msgid "The model name of AWEL flow" +msgstr "Модель потока" + +#: ../dbgpt/client/_cli.py:71 +msgid "Whether use stream mode to run AWEL flow" +msgstr "" + +#: ../dbgpt/client/_cli.py:79 +#, fuzzy +msgid "The temperature to run AWEL flow" +msgstr "Температура потока" + +#: ../dbgpt/client/_cli.py:86 +#, fuzzy +msgid "The max new tokens to run AWEL flow" +msgstr "Максимальное количество новых токенов потока" + +#: ../dbgpt/client/_cli.py:93 +#, fuzzy +msgid "The conversation id of the AWEL flow" +msgstr "Идентификатор беседы потока" + +#: ../dbgpt/client/_cli.py:101 +msgid "The json data to run AWEL flow, if set, will overwrite other options" +msgstr "" + +#: ../dbgpt/client/_cli.py:109 +#, fuzzy +msgid "The extra json data to run AWEL flow." +msgstr "Дополнительные JSON-данные потока" + +#: ../dbgpt/client/_cli.py:118 +#, fuzzy +msgid "Whether use interactive mode to run AWEL flow" +msgstr "Температура потока" + +#~ msgid "Whether to stream the flow, default is False" +#~ msgstr "Потоковая передача, по умолчанию False" + +#~ msgid "The json data of the flow" +#~ msgstr "JSON-данные потока" diff --git a/i18n/locales/ru/LC_MESSAGES/dbgpt_model.mo b/i18n/locales/ru/LC_MESSAGES/dbgpt_model.mo index 6d1b8e047f81ee84cbc6d33b4315389573684d9c..057005a5000eb2d453ca17cd64e610dd29583899 100644 GIT binary patch delta 394 zcmXZYJxjw-6vpu&w5egkTh^TZD!Iw##M1n>sQd4V2r3iwnP(Og+ z;NVc9;^OKe^h@|1x(VVx>Tvm;b1wJZbMANH%DL?}mn4~{G=UXN;XaPy89I24V|b5y z`1+q`Ea}3KJlvrE*e^|C59ctOln!teNALkN_<}k7NUEuEf=>!X{K7PP13Zo^80XJe z$9IemmItLp+{9_T!6rW8JT46V4YGrA}nslcY1kzCxy1+3#7UgI;q zV)D@Oh{V8uzcqm5)dVZwb diff --git a/i18n/locales/ru/LC_MESSAGES/dbgpt_model.po b/i18n/locales/ru/LC_MESSAGES/dbgpt_model.po index 4bb5d7bc9..6bfdfa7da 100644 --- a/i18n/locales/ru/LC_MESSAGES/dbgpt_model.po +++ b/i18n/locales/ru/LC_MESSAGES/dbgpt_model.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-03-24 22:53+0800\n" +"POT-Creation-Date: 2024-03-26 05:55+0800\n" "PO-Revision-Date: 2024-03-23 16:45+0800\n" "Last-Translator: Automatically generated\n" "Language-Team: none\n" @@ -45,7 +45,8 @@ msgstr "Удаленный клиент LLM" #: ../dbgpt/model/cluster/client.py:116 msgid "Remote LLM client(Connect to the remote DB-GPT model serving)" -msgstr "Удаленный клиент LLM (Подключение к удаленной модели обслуживания DB-GPT)" +msgstr "" +"Удаленный клиент LLM (Подключение к удаленной модели обслуживания DB-GPT)" #: ../dbgpt/model/cluster/client.py:119 msgid "Controller Address" @@ -149,5 +150,5 @@ msgid "The model output of upstream." msgstr "Выход модели выше." #: ../dbgpt/model/utils/chatgpt_utils.py:180 -msgid "The model output after transform to openai stream format" -msgstr "Выход модели после преобразования в формат потока openai" +msgid "The model output after transformed to openai stream format." +msgstr "Выход модели после преобразования в формат потока OpenAI" \ No newline at end of file diff --git a/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.mo b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.mo new file mode 100644 index 0000000000000000000000000000000000000000..d581af0f52988f5d21de5ab0f439a065d023e084 GIT binary patch literal 1393 zcmaiy%TE(Q9LHCDR}-U&5uf=PHSx7r&_q(>0il?xJcNQrv$oT+xZP=Xr+~(T8lMpm zjW$M+h{1vfX+8L=q9;$Dy%}R-cUw*#{0IE*f`Td4Nq+m;nfcD|JM)|Qx@y5)32ixI z1L7QFCE^RhX*5rgwt}0%ZD2iE2WsFJ@Hw~<{0y!Ke}Zel6=hfp?f}cdBcKe1!0q5I z&;xB2T!;S0`I6)Xzk$cWl?xxRW?rkh1hQyONPN|cqFXmZaeUNVQ+u@Lu%Fuf zEvgK!hZ0d!4ssuPnw#Vn)}=aYEM`+#Wn{jf~pKjQ!|pVLSu>($bgL>7jO>yJpcdz literal 0 HcmV?d00001 diff --git a/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.po b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.po new file mode 100644 index 000000000..d0ed3f10a --- /dev/null +++ b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_client.po @@ -0,0 +1,62 @@ +# Chinese translations for PACKAGE package +# PACKAGE 软件包的简体中文翻译. +# Copyright (C) 2024 THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# Automatically generated, 2024. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-03-27 09:43+0800\n" +"PO-Revision-Date: 2024-03-27 03:21+0800\n" +"Last-Translator: Automatically generated\n" +"Language-Team: none\n" +"Language: zh_CN\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" + +#: ../dbgpt/client/_cli.py:30 +msgid "The name of the AWEL flow" +msgstr "AWEL 工作流的名称" + +#: ../dbgpt/client/_cli.py:37 +msgid "The uid of the AWEL flow" +msgstr "AWEL 工作流的 UID" + +#: ../dbgpt/client/_cli.py:55 +msgid "The messages to run AWEL flow" +msgstr "运行 AWEL 工作流的消息" + +#: ../dbgpt/client/_cli.py:62 +msgid "The model name of AWEL flow" +msgstr "AWEL 工作流的模型名称" + +#: ../dbgpt/client/_cli.py:71 +msgid "Whether use stream mode to run AWEL flow" +msgstr "是否使用流模式运行 AWEL 工作流" + +#: ../dbgpt/client/_cli.py:79 +msgid "The temperature to run AWEL flow" +msgstr "运行 AWEL 工作流的温度" + +#: ../dbgpt/client/_cli.py:86 +msgid "The max new tokens to run AWEL flow" +msgstr "运行 AWEL 工作流的最大新标记数" + +#: ../dbgpt/client/_cli.py:93 +msgid "The conversation id of the AWEL flow" +msgstr "AWEL 工作流的对话 ID" + +#: ../dbgpt/client/_cli.py:101 +msgid "The json data to run AWEL flow, if set, will overwrite other options" +msgstr "用于运行 AWEL 工作流的 JSON 数据,如果设置,将覆盖其他选项" + +#: ../dbgpt/client/_cli.py:109 +msgid "The extra json data to run AWEL flow." +msgstr "用于运行 AWEL 工作流的额外 JSON 数据" + +#: ../dbgpt/client/_cli.py:118 +msgid "Whether use interactive mode to run AWEL flow" +msgstr "是否使用交互模式运行 AWEL 工作流" \ No newline at end of file diff --git a/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.mo b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.mo index 6f08c52029238bc1ee42693c65d97c6e0de72701..f1d59fcffe1904f43fe525ee4e463694f24eaa01 100644 GIT binary patch literal 2709 zcma)-TTC2P7{|xji>~+DdaIffABrL~EMOxXiFx;wMXoZW~o z0@fCLp~OoBgB6HHw92B1Kr3xc)Cb>c;)4&S589bso|+gReenOC8Q5K*%8=dPe&3w) z-Ou6L=Jg*acpk^+QGD*aMNzhc|A6$2u2YmP;25|WobmZHSOxiaFaU176?;MEQw8sW z90DH#_k;I<8rTAMfp>#HfiHr;g7<+>u7?e<3MBb3xCeXh>V z4Q|+=D35_*koLEM6n7W64Lk~N1xLYGz!_iOvJrV8=chqhziX4{XFo{!zXVboNs#h7 z4^rM2K+5|^pTB@){~AdC{{~yZjkkI7%U~3;0g~Mxz=y%#eEBbs{A{}2TOR=*gnS63 z^(H9u0Lvkdf>htDJ{NqhLvXZy8%X&CL8?y;Nb$W1Qj4ipD7^x=vc7;^Po(D&4+_$d zc~UI&kWZ9c_5vl7^Mg{#o&xf@1K;=izo};QP_8mAkm5kNvQ9L6iw{Cm?!^bAg+``@ z$!bg^!!5xYxNU2lT!AbsGm+pH3&&y>w8~!Ip`{ao)z>$$nuN{`QKD*>s;98g;O&CJ zX1AHP*xqV&UsIGN%@|KGn_C&(=-jRp*`S||DQ;=Pw3G(9tT8Q8Y2gdayerM^MX9|I zm*&TX%|^JEg_~+wwPtf=nfxp>Ou;Nz(JdZhdI!bD;#!6y9Kjl!A_u~?ZP?vb9d3y* zZe(=JG?GY_VR0>qqA{;3g<3W9q$zlD zJBl8XbDR* z><-gPqA?g4tT!~Bc}5uNHQ|{r*=u&Nbjn+_@@=t(nvT)&R5uH))i=dTX75m(qhN^8 zWE!0h%$+c5 z-a?EHe-gp9Yl%cR>*NL|LJqce?*K#gMhLP*fyOS%wyBi>eXAka|T5Gh1jQ5Fo) zt$~B1M!Qo8LGaf_c`Bjl#%|Ug*DM^jJ+09f)T+Wd*?(@S5u@FVVaP+Q>WD4^O8)p4 z&WXwVr7zvncGms6&li@*=AQ<-lAKR2=Ei!8Zwya!WB!yg(dV8WK}gQ*H~Fc{xl2iDh-pZmnt*|0D z$RWEvcad&Z`bUCQ?&v&{as~%5EXBR2_1EW4xN~!!7O%a`3!c~TLQW3tST&wv`->Ad olywFtoatjST}*)ce%3wJOTo#rQG8mEi1YTmGj+ks;U-tjzjnK@g8%>k delta 358 zcmbO#+Ra{nPl#nI0}yZnu?!HG0I?ttp8;YJ5MX3r-~!TuK$-_g%Rp&OAk7Qpn*nJt zAngLA`GIsakOt|`hSIe_S_sJR2h!3&dJ&N30^)r@4AfuGaFPjP!F3=FvfvSveh;KU z2K@oj;y{{@8A7W8X$c_T4oZguX(=GT07wJPWMBni5CD0I4TxEQm;;DGo&W)$G6tZL zV89M`8j#5e6#!9_MVL({uVk^E%*o0#IiJ;VvlbgSGwB!mX97i^ZEbwIryoh~aYGXW0Gz@!V*mgE diff --git a/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.po b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.po index 7395d669b..4ef30c0fd 100644 --- a/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.po +++ b/i18n/locales/zh_CN/LC_MESSAGES/dbgpt_model.po @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-03-24 22:53+0800\n" +"POT-Creation-Date: 2024-03-26 05:29+0800\n" "PO-Revision-Date: 2024-03-23 16:45+0800\n" "Last-Translator: Automatically generated\n" "Language-Team: none\n" @@ -18,69 +18,66 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" #: ../dbgpt/model/cluster/client.py:20 -#, fuzzy msgid "Default LLM Client" -msgstr "这是 LLM 客户端。" +msgstr "默认 LLM 客户端" #: ../dbgpt/model/cluster/client.py:23 msgid "Default LLM client(Connect to your DB-GPT model serving)" -msgstr "" +msgstr "默认 LLM 客户端(连接到您的 DB-GPT 模型服务)" #: ../dbgpt/model/cluster/client.py:26 ../dbgpt/model/cluster/client.py:127 msgid "Auto Convert Message" -msgstr "" +msgstr "自动转换消息" #: ../dbgpt/model/cluster/client.py:32 ../dbgpt/model/cluster/client.py:133 msgid "" "Whether to auto convert the messages that are not supported by the LLM to a " "compatible format" -msgstr "" +msgstr "是否将 LLM 不支持的消息自动转换为兼容格式" #: ../dbgpt/model/cluster/client.py:113 -#, fuzzy msgid "Remote LLM Client" -msgstr "这是 LLM 客户端。" +msgstr "远程 LLM 客户端" #: ../dbgpt/model/cluster/client.py:116 msgid "Remote LLM client(Connect to the remote DB-GPT model serving)" -msgstr "" +msgstr "远程 LLM 客户端(连接到远程 DB-GPT 模型服务)" #: ../dbgpt/model/cluster/client.py:119 msgid "Controller Address" -msgstr "" +msgstr "控制器地址" #: ../dbgpt/model/cluster/client.py:123 msgid "http://127.0.0.1:8000" -msgstr "" +msgstr "http://127.0.0.1:8000" #: ../dbgpt/model/cluster/client.py:124 msgid "Model controller address" -msgstr "" +msgstr "模型控制器地址" #: ../dbgpt/model/proxy/llms/chatgpt.py:48 -#, fuzzy msgid "OpenAI LLM Client" -msgstr "这是 LLM 客户端。" +msgstr "OpenAI LLM 客户端" #: ../dbgpt/model/proxy/llms/chatgpt.py:53 msgid "OpenAI API Key" -msgstr "" +msgstr "OpenAI API 密钥" #: ../dbgpt/model/proxy/llms/chatgpt.py:59 msgid "" "OpenAI API Key, not required if you have set OPENAI_API_KEY environment " "variable." -msgstr "" +msgstr "OpenAI API 密钥,如果您已设置 OPENAI_API_KEY 环境变量,则不需要。" #: ../dbgpt/model/proxy/llms/chatgpt.py:64 msgid "OpenAI API Base" -msgstr "" +msgstr "OpenAI API 基础" #: ../dbgpt/model/proxy/llms/chatgpt.py:70 msgid "" "OpenAI API Base, not required if you have set OPENAI_API_BASE environment " "variable." -msgstr "" +msgstr "OpenAI API 基础,如果您已设置 OPENAI_API_BASE 环境变量,则不需要。" #: ../dbgpt/model/operators/llm_operator.py:66 msgid "LLM Operator" @@ -88,7 +85,7 @@ msgstr "LLM 算子" #: ../dbgpt/model/operators/llm_operator.py:69 msgid "The LLM operator." -msgstr "这个是 LLM 算子。" +msgstr "这是 LLM 算子。" #: ../dbgpt/model/operators/llm_operator.py:72 #: ../dbgpt/model/operators/llm_operator.py:120 @@ -127,28 +124,24 @@ msgstr "流式 LLM 算子" #: ../dbgpt/model/operators/llm_operator.py:117 msgid "The streaming LLM operator." -msgstr "这是流式 LLM 算子。" +msgstr "流式 LLM 算子。" #: ../dbgpt/model/utils/chatgpt_utils.py:158 -#, fuzzy msgid "OpenAI Streaming Output Operator" -msgstr "流式 LLM 算子" +msgstr "OpenAI 流式输出算子" #: ../dbgpt/model/utils/chatgpt_utils.py:162 -#, fuzzy msgid "The OpenAI streaming LLM operator." -msgstr "这是流式 LLM 算子。" +msgstr "OpenAI 流式 LLM 算子。" #: ../dbgpt/model/utils/chatgpt_utils.py:166 -#, fuzzy msgid "Upstream Model Output" -msgstr "模型输出" +msgstr "上游模型输出" #: ../dbgpt/model/utils/chatgpt_utils.py:170 -#, fuzzy msgid "The model output of upstream." -msgstr "这是模型的输出。" +msgstr "上游模型的输出。" #: ../dbgpt/model/utils/chatgpt_utils.py:180 -msgid "The model output after transform to openai stream format" -msgstr "" +msgid "The model output after transformed to openai stream format." +msgstr "转换为 OpenAI 流格式后的模型输出。" \ No newline at end of file diff --git a/i18n/translate_util.py b/i18n/translate_util.py index 37fcd5e2e..f02e543b8 100644 --- a/i18n/translate_util.py +++ b/i18n/translate_util.py @@ -1,6 +1,4 @@ -"""Translate the po file content to Chinese using GPT-4. - -""" +"""Translate the po file content to Chinese using LLM.""" from typing import List, Dict, Any import asyncio import os @@ -148,6 +146,7 @@ vocabulary_map = { "AWEL": "AWEL", "RAG": "RAG", "DB-GPT": "DB-GPT", + "AWEL flow": "AWEL 工作流", }, "default": { "Transformer": "Transformer", @@ -159,6 +158,7 @@ vocabulary_map = { "AWEL": "AWEL", "RAG": "RAG", "DB-GPT": "DB-GPT", + "AWEL flow": "AWEL flow", }, } @@ -383,8 +383,11 @@ async def run_translate_po_dag( "parallel_num": parallel_num, "model_name": model_name, } - result = await task.call({"file_path": full_path, "ext_dict": ext_dict}) - return result + try: + result = await task.call({"file_path": full_path, "ext_dict": ext_dict}) + return result + except Exception as e: + print(f"Error in {module_name}: {e}") if __name__ == "__main__": @@ -413,34 +416,51 @@ if __name__ == "__main__": } parser = argparse.ArgumentParser() - parser.add_argument("--modules", type=str, default=",".join(all_modules)) - parser.add_argument("--lang", type=str, default="zh_CN") + parser.add_argument( + "--modules", + type=str, + default=",".join(all_modules), + help="Modules to translate, 'all' for all modules, split by ','.", + ) + parser.add_argument( + "--lang", + type=str, + default="zh_CN", + help="Language to translate, 'all' for all languages, split by ','.", + ) parser.add_argument("--max_new_token", type=int, default=1024) parser.add_argument("--parallel_num", type=int, default=10) parser.add_argument("--model_name", type=str, default="gpt-3.5-turbo") args = parser.parse_args() print(f"args: {args}") + # model_name = "gpt-3.5-turbo" # model_name = "gpt-4" model_name = args.model_name # modules = ["app", "core", "model", "rag", "serve", "storage", "util"] - modules = args.modules.strip().split(",") + modules = all_modules if args.modules == "all" else args.modules.strip().split(",") max_new_token = args.max_new_token parallel_num = args.parallel_num - lang = args.lang - if lang not in lang_map: - raise ValueError(f"Language {lang} not supported.") - lang_desc = lang_map[lang] - for module in modules: - asyncio.run( - run_translate_po_dag( - save_translated_po_file_task, - lang, - lang_desc, - module, - max_new_token, - parallel_num, - model_name, + langs = lang_map.keys() if args.lang == "all" else args.lang.strip().split(",") + + for lang in langs: + if lang not in lang_map: + raise ValueError( + f"Language {lang} not supported, now only support {','.join(lang_map.keys())}." + ) + + for lang in langs: + lang_desc = lang_map[lang] + for module in modules: + asyncio.run( + run_translate_po_dag( + save_translated_po_file_task, + lang, + lang_desc, + module, + max_new_token, + parallel_num, + model_name, + ) ) - ) diff --git a/setup.py b/setup.py index a0efbd927..23990a107 100644 --- a/setup.py +++ b/setup.py @@ -382,6 +382,7 @@ def core_requires(): "psutil==5.9.4", "colorama==0.4.6", "tomlkit", + "rich", ] # Just use by DB-GPT internal, we should find the smallest dependency set for run # we core unit test. @@ -561,16 +562,17 @@ def all_datasource_requires(): setup_spec.extras["datasource"] = [ # "sqlparse==0.4.4", "pymysql", - # for doris - # mysqlclient 2.2.x have pkg-config issue on 3.10+ - "mysqlclient==2.1.0", ] + # If you want to install psycopg2 and mysqlclient in ubuntu, you should install + # libpq-dev and libmysqlclient-dev first. setup_spec.extras["datasource_all"] = setup_spec.extras["datasource"] + [ "pyspark", "pymssql", # install psycopg2-binary when you are in a virtual environment # pip install psycopg2-binary "psycopg2", + # mysqlclient 2.2.x have pkg-config issue on 3.10+ + "mysqlclient==2.1.0", # pydoris is too old, we should find a new package to replace it. "pydoris>=1.0.2,<2.0.0", "clickhouse-connect",