mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-09-09 04:49:26 +00:00
doc(core): Add Trace document
This commit is contained in:
309
docs/getting_started/observability.md
Normal file
309
docs/getting_started/observability.md
Normal file
@@ -0,0 +1,309 @@
|
||||
# Debugging
|
||||
-------------
|
||||
|
||||
DB-GPT provides a set of tools to help you troubleshoot and resolve some of the issues you may encounter.
|
||||
|
||||
|
||||
## Trace Logs
|
||||
|
||||
DB-GPT writes some critical system runtime information to trace logs. By default, these are located in `logs/dbgpt*.jsonl`.
|
||||
|
||||
DB-GPT also offers a command-line tool, `dbgpt trace`, to help you analyze these trace logs. You can see its specific usage with the command `dbgpt trace --help`.
|
||||
|
||||
|
||||
## Viewing Chat Details
|
||||
|
||||
You can use the `dbgpt trace chat` command to view chat details. By default, it will display the latest chat message.
|
||||
|
||||
### Viewing Service Runtime Information
|
||||
|
||||
```bash
|
||||
dbgpt trace chat --hide_conv
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
|
||||
```
|
||||
+------------------------+--------------------------+-----------------------------+------------------------------------+
|
||||
| Config Key (Webserver) | Config Value (Webserver) | Config Key (EmbeddingModel) | Config Value (EmbeddingModel) |
|
||||
+------------------------+--------------------------+-----------------------------+------------------------------------+
|
||||
| host | 0.0.0.0 | model_name | text2vec |
|
||||
| port | 5000 | model_path | /app/models/text2vec-large-chinese |
|
||||
| daemon | False | device | cuda |
|
||||
| share | False | normalize_embeddings | None |
|
||||
| remote_embedding | False | | |
|
||||
| log_level | None | | |
|
||||
| light | False | | |
|
||||
+------------------------+--------------------------+-----------------------------+------------------------------------+
|
||||
+--------------------------+-----------------------------+----------------------------+------------------------------+
|
||||
| Config Key (ModelWorker) | Config Value (ModelWorker) | Config Key (WorkerManager) | Config Value (WorkerManager) |
|
||||
+--------------------------+-----------------------------+----------------------------+------------------------------+
|
||||
| model_name | vicuna-13b-v1.5 | model_name | vicuna-13b-v1.5 |
|
||||
| model_path | /app/models/vicuna-13b-v1.5 | model_path | /app/models/vicuna-13b-v1.5 |
|
||||
| device | cuda | worker_type | None |
|
||||
| model_type | huggingface | worker_class | None |
|
||||
| prompt_template | None | model_type | huggingface |
|
||||
| max_context_size | 4096 | host | 0.0.0.0 |
|
||||
| num_gpus | None | port | 5000 |
|
||||
| max_gpu_memory | None | daemon | False |
|
||||
| cpu_offloading | False | limit_model_concurrency | 5 |
|
||||
| load_8bit | False | standalone | True |
|
||||
| load_4bit | False | register | True |
|
||||
| quant_type | nf4 | worker_register_host | None |
|
||||
| use_double_quant | True | controller_addr | http://127.0.0.1:5000 |
|
||||
| compute_dtype | None | send_heartbeat | True |
|
||||
| trust_remote_code | True | heartbeat_interval | 20 |
|
||||
| verbose | False | log_level | None |
|
||||
+--------------------------+-----------------------------+----------------------------+------------------------------+
|
||||
```
|
||||
|
||||
### Viewing the Latest Chat Message
|
||||
|
||||
```bash
|
||||
dbgpt trace chat --hide_run_params
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
|
||||
```
|
||||
+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Chat Trace Details |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| Key | Value Value |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| trace_id | 5d1900c3-5aad-4159-9946-fbb600666530 |
|
||||
| span_id | 5d1900c3-5aad-4159-9946-fbb600666530:14772034-bed4-4b4e-b43f-fcf3a8aad6a7 |
|
||||
| conv_uid | 5e456272-68ac-11ee-9fba-0242ac150003 |
|
||||
| user_input | Who are you? |
|
||||
| chat_mode | chat_normal |
|
||||
| select_param | None |
|
||||
| model_name | vicuna-13b-v1.5 |
|
||||
| temperature | 0.6 |
|
||||
| max_new_tokens | 1024 |
|
||||
| echo | False |
|
||||
| llm_adapter | FastChatLLMModelAdaperWrapper(fastchat.model.model_adapter.VicunaAdapter) |
|
||||
| User prompt | A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polit |
|
||||
| | e answers to the user's questions. USER: Who are you? ASSISTANT: |
|
||||
| Model output | You can call me Vicuna, and I was trained by Large Model Systems Organization (LMSYS) researchers as a language model. |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
```
|
||||
|
||||
|
||||
### Viewing Chat Details and Call Chain
|
||||
|
||||
```bash
|
||||
dbgpt trace chat --hide_run_params --tree
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
|
||||
```
|
||||
|
||||
Invoke Trace Tree:
|
||||
|
||||
Operation: DB-GPT-Web-Entry (Start: 2023-10-12 03:06:43.180, End: None)
|
||||
Operation: get_chat_instance (Start: 2023-10-12 03:06:43.258, End: None)
|
||||
Operation: get_chat_instance (Start: 2023-10-12 03:06:43.258, End: 2023-10-12 03:06:43.424)
|
||||
Operation: stream_generator (Start: 2023-10-12 03:06:43.425, End: None)
|
||||
Operation: BaseChat.stream_call (Start: 2023-10-12 03:06:43.426, End: None)
|
||||
Operation: WorkerManager.generate_stream (Start: 2023-10-12 03:06:43.426, End: None)
|
||||
Operation: DefaultModelWorker.generate_stream (Start: 2023-10-12 03:06:43.428, End: None)
|
||||
Operation: DefaultModelWorker_call.generate_stream_func (Start: 2023-10-12 03:06:43.430, End: None)
|
||||
Operation: DefaultModelWorker_call.generate_stream_func (Start: 2023-10-12 03:06:43.430, End: 2023-10-12 03:06:48.518)
|
||||
Operation: DefaultModelWorker.generate_stream (Start: 2023-10-12 03:06:43.428, End: 2023-10-12 03:06:48.518)
|
||||
Operation: WorkerManager.generate_stream (Start: 2023-10-12 03:06:43.426, End: 2023-10-12 03:06:48.518)
|
||||
Operation: BaseChat.stream_call (Start: 2023-10-12 03:06:43.426, End: 2023-10-12 03:06:48.519)
|
||||
Operation: stream_generator (Start: 2023-10-12 03:06:43.425, End: 2023-10-12 03:06:48.519)
|
||||
Operation: DB-GPT-Web-Entry (Start: 2023-10-12 03:06:43.180, End: 2023-10-12 03:06:43.257)
|
||||
+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Chat Trace Details |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| Key | Value Value |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| trace_id | 5d1900c3-5aad-4159-9946-fbb600666530 |
|
||||
| span_id | 5d1900c3-5aad-4159-9946-fbb600666530:14772034-bed4-4b4e-b43f-fcf3a8aad6a7 |
|
||||
| conv_uid | 5e456272-68ac-11ee-9fba-0242ac150003 |
|
||||
| user_input | Who are you? |
|
||||
| chat_mode | chat_normal |
|
||||
| select_param | None |
|
||||
| model_name | vicuna-13b-v1.5 |
|
||||
| temperature | 0.6 |
|
||||
| max_new_tokens | 1024 |
|
||||
| echo | False |
|
||||
| llm_adapter | FastChatLLMModelAdaperWrapper(fastchat.model.model_adapter.VicunaAdapter) |
|
||||
| User prompt | A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polit |
|
||||
| | e answers to the user's questions. USER: Who are you? ASSISTANT: |
|
||||
| Model output | You can call me Vicuna, and I was trained by Large Model Systems Organization (LMSYS) researchers as a language model. |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
```
|
||||
|
||||
### Viewing Chat Details Based on trace_id
|
||||
|
||||
```bash
|
||||
dbgpt trace chat --hide_run_params --trace_id ec30d733-7b35-4d61-b02e-2832fd2e29ff
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
|
||||
```
|
||||
+-------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| Chat Trace Details |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| Key | Value Value |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
| trace_id | ec30d733-7b35-4d61-b02e-2832fd2e29ff |
|
||||
| span_id | ec30d733-7b35-4d61-b02e-2832fd2e29ff:0482a0c5-38b3-4b38-8101-e42489f90ccd |
|
||||
| conv_uid | 87a722de-68ae-11ee-9fba-0242ac150003 |
|
||||
| user_input | Hello |
|
||||
| chat_mode | chat_normal |
|
||||
| select_param | None |
|
||||
| model_name | vicuna-13b-v1.5 |
|
||||
| temperature | 0.6 |
|
||||
| max_new_tokens | 1024 |
|
||||
| echo | False |
|
||||
| llm_adapter | FastChatLLMModelAdaperWrapper(fastchat.model.model_adapter.VicunaAdapter) |
|
||||
| User prompt | A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polit |
|
||||
| | e answers to the user's questions. USER: Hello ASSISTANT: |
|
||||
| Model output | Hello! How can I help you today? Is there something specific you want to know or talk about? I'm here to answer any ques |
|
||||
| | tions you might have, to the best of my ability. |
|
||||
+----------------+--------------------------------------------------------------------------------------------------------------------------+
|
||||
```
|
||||
|
||||
### More `chat` Usage
|
||||
|
||||
```bash
|
||||
dbgpt trace chat --help
|
||||
```
|
||||
|
||||
```
|
||||
Usage: dbgpt trace chat [OPTIONS] [FILES]...
|
||||
|
||||
Show conversation details
|
||||
|
||||
Options:
|
||||
--trace_id TEXT Specify the trace ID to analyze. If None,
|
||||
show latest conversation details
|
||||
--tree Display trace spans as a tree
|
||||
--hide_conv Hide your conversation details
|
||||
--hide_run_params Hide run params
|
||||
--output [text|html|csv|latex|json]
|
||||
The output format
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
## Viewing Call Tree Based on `trace_id`
|
||||
|
||||
```bash
|
||||
dbgpt trace tree --trace_id ec30d733-7b35-4d61-b02e-2832fd2e29ff
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
|
||||
```
|
||||
Operation: DB-GPT-Web-Entry (Start: 2023-10-12 03:22:10.592, End: None)
|
||||
Operation: get_chat_instance (Start: 2023-10-12 03:22:10.594, End: None)
|
||||
Operation: get_chat_instance (Start: 2023-10-12 03:22:10.594, End: 2023-10-12 03:22:10.658)
|
||||
Operation: stream_generator (Start: 2023-10-12 03:22:10.659, End: None)
|
||||
Operation: BaseChat.stream_call (Start: 2023-10-12 03:22:10.659, End: None)
|
||||
Operation: WorkerManager.generate_stream (Start: 2023-10-12 03:22:10.660, End: None)
|
||||
Operation: DefaultModelWorker.generate_stream (Start: 2023-10-12 03:22:10.675, End: None)
|
||||
Operation: DefaultModelWorker_call.generate_stream_func (Start: 2023-10-12 03:22:10.676, End: None)
|
||||
Operation: DefaultModelWorker_call.generate_stream_func (Start: 2023-10-12 03:22:10.676, End: 2023-10-12 03:22:16.130)
|
||||
Operation: DefaultModelWorker.generate_stream (Start: 2023-10-12 03:22:10.675, End: 2023-10-12 03:22:16.130)
|
||||
Operation: WorkerManager.generate_stream (Start: 2023-10-12 03:22:10.660, End: 2023-10-12 03:22:16.130)
|
||||
Operation: BaseChat.stream_call (Start: 2023-10-12 03:22:10.659, End: 2023-10-12 03:22:16.130)
|
||||
Operation: stream_generator (Start: 2023-10-12 03:22:10.659, End: 2023-10-12 03:22:16.130)
|
||||
Operation: DB-GPT-Web-Entry (Start: 2023-10-12 03:22:10.592, End: 2023-10-12 03:22:10.673)
|
||||
```
|
||||
|
||||
|
||||
## Listing Trace Information
|
||||
|
||||
### Listing All Trace Information
|
||||
|
||||
|
||||
```bash
|
||||
dbgpt trace list
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
```
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-----------------------------------+------------------+
|
||||
| Trace ID | Span ID | Operation Name | Conversation UID |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-----------------------------------+------------------+
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:f650065f-f761-4790-99f7-8109c15f756a | run_webserver | None |
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:b2ff279e-0557-4b2d-8959-85e25dcfe94e | EmbeddingLoader.load | None |
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:b2ff279e-0557-4b2d-8959-85e25dcfe94e | EmbeddingLoader.load | None |
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:3e8b1b9d-5ef2-4382-af62-6b2b21cc04fd | WorkerManager._start_local_worker | None |
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:3e8b1b9d-5ef2-4382-af62-6b2b21cc04fd | WorkerManager._start_local_worker | None |
|
||||
| eaf4830f-976f-45a4-9a50-244f3ab6f9e1 | eaf4830f-976f-45a4-9a50-244f3ab6f9e1:4c280ec9-0fd6-4ee8-b79f-1afcab0f9901 | DefaultModelWorker.start | None |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-----------------------------------+------------------+
|
||||
```
|
||||
|
||||
### Listing Trace Information by Trace Type
|
||||
|
||||
```bash
|
||||
dbgpt trace list --span_type chat
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
```
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-------------------+--------------------------------------+
|
||||
| Trace ID | Span ID | Operation Name | Conversation UID |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-------------------+--------------------------------------+
|
||||
| 5d1900c3-5aad-4159-9946-fbb600666530 | 5d1900c3-5aad-4159-9946-fbb600666530:14772034-bed4-4b4e-b43f-fcf3a8aad6a7 | get_chat_instance | 5e456272-68ac-11ee-9fba-0242ac150003 |
|
||||
| 5d1900c3-5aad-4159-9946-fbb600666530 | 5d1900c3-5aad-4159-9946-fbb600666530:14772034-bed4-4b4e-b43f-fcf3a8aad6a7 | get_chat_instance | 5e456272-68ac-11ee-9fba-0242ac150003 |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:0482a0c5-38b3-4b38-8101-e42489f90ccd | get_chat_instance | 87a722de-68ae-11ee-9fba-0242ac150003 |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:0482a0c5-38b3-4b38-8101-e42489f90ccd | get_chat_instance | 87a722de-68ae-11ee-9fba-0242ac150003 |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+-------------------+--------------------------------------+
|
||||
```
|
||||
|
||||
### Searching Trace Information
|
||||
|
||||
```bash
|
||||
dbgpt trace list --search Hello
|
||||
```
|
||||
|
||||
You will see an output like:
|
||||
```
|
||||
+--------------------------------------+---------------------------------------------------------------------------+----------------------------------------------+--------------------------------------+
|
||||
| Trace ID | Span ID | Operation Name | Conversation UID |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+----------------------------------------------+--------------------------------------+
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:0482a0c5-38b3-4b38-8101-e42489f90ccd | get_chat_instance | 87a722de-68ae-11ee-9fba-0242ac150003 |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:0482a0c5-38b3-4b38-8101-e42489f90ccd | get_chat_instance | 87a722de-68ae-11ee-9fba-0242ac150003 |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:03de6c87-34d6-426a-85e8-7d46d475411e | BaseChat.stream_call | None |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:03de6c87-34d6-426a-85e8-7d46d475411e | BaseChat.stream_call | None |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:19593596-b4c7-4d15-a3c1-0924d86098dd | DefaultModelWorker_call.generate_stream_func | None |
|
||||
| ec30d733-7b35-4d61-b02e-2832fd2e29ff | ec30d733-7b35-4d61-b02e-2832fd2e29ff:19593596-b4c7-4d15-a3c1-0924d86098dd | DefaultModelWorker_call.generate_stream_func | None |
|
||||
+--------------------------------------+---------------------------------------------------------------------------+----------------------------------------------+--------------------------------------+
|
||||
```
|
||||
|
||||
### More `list` Usage
|
||||
|
||||
```bash
|
||||
dbgpt trace list --help
|
||||
```
|
||||
|
||||
```
|
||||
Usage: dbgpt trace list [OPTIONS] [FILES]...
|
||||
|
||||
List your trace spans
|
||||
|
||||
Options:
|
||||
--trace_id TEXT Specify the trace ID to list
|
||||
--span_id TEXT Specify the Span ID to list.
|
||||
--span_type TEXT Specify the Span Type to list.
|
||||
--parent_span_id TEXT Specify the Parent Span ID to list.
|
||||
--search TEXT Search trace_id, span_id, parent_span_id,
|
||||
operation_name or content in metadata.
|
||||
-l, --limit INTEGER Limit the number of recent span displayed.
|
||||
--start_time TEXT Filter by start time. Format: "YYYY-MM-DD
|
||||
HH:MM:SS.mmm"
|
||||
--end_time TEXT Filter by end time. Format: "YYYY-MM-DD
|
||||
HH:MM:SS.mmm"
|
||||
--desc Whether to use reverse sorting. By default,
|
||||
sorting is based on start time.
|
||||
--output [text|html|csv|latex|json]
|
||||
The output format
|
||||
--help Show this message and exit.
|
||||
```
|
@@ -53,6 +53,7 @@ Getting Started
|
||||
getting_started/concepts.md
|
||||
getting_started/tutorials.md
|
||||
getting_started/faq.rst
|
||||
getting_started/observability.md
|
||||
|
||||
|
||||
Modules
|
||||
|
121
docs/locales/zh_CN/LC_MESSAGES/getting_started/observability.po
Normal file
121
docs/locales/zh_CN/LC_MESSAGES/getting_started/observability.po
Normal file
@@ -0,0 +1,121 @@
|
||||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) 2023, csunny
|
||||
# This file is distributed under the same license as the DB-GPT package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, 2023.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: DB-GPT 👏👏 0.3.9\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2023-10-12 11:54+0800\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language: zh_CN\n"
|
||||
"Language-Team: zh_CN <LL@li.org>\n"
|
||||
"Plural-Forms: nplurals=1; plural=0;\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"Generated-By: Babel 2.12.1\n"
|
||||
|
||||
#: ../../getting_started/observability.md:1 c88ce18295444597baa7355efc79ab15
|
||||
msgid "Debugging"
|
||||
msgstr ""
|
||||
|
||||
#: ../../getting_started/observability.md:4 522a4e83c62c493381ca3a452ced8ecf
|
||||
msgid ""
|
||||
"DB-GPT provides a set of tools to help you troubleshoot and resolve some "
|
||||
"of the issues you may encounter."
|
||||
msgstr ""
|
||||
"DB-GPT 提供了一套工具来帮助你排查和解决一些遇到的问题。"
|
||||
|
||||
#: ../../getting_started/observability.md:7 7fe5b8ab29194e42b74a8ab3e77006c7
|
||||
msgid "Trace Logs"
|
||||
msgstr "追踪日志"
|
||||
|
||||
#: ../../getting_started/observability.md:9 8a55f7a2b5a247d49728969f179bf50d
|
||||
msgid ""
|
||||
"DB-GPT writes some critical system runtime information to trace logs. By "
|
||||
"default, these are located in `logs/dbgpt*.jsonl`."
|
||||
msgstr ""
|
||||
"DB-GPT 会将一些系统运行的关键的信息写入到追踪日志中,默认情况下,在 `logs/dbgpt*.jsonl` 中。"
|
||||
|
||||
#: ../../getting_started/observability.md:11 5e2b847179e9427a8ae022b5338cbbd9
|
||||
msgid ""
|
||||
"DB-GPT also offers a command-line tool, `dbgpt trace`, to help you "
|
||||
"analyze these trace logs. You can see its specific usage with the command"
|
||||
" `dbgpt trace --help`."
|
||||
msgstr ""
|
||||
"DB-GPT 也提供了命令工具 `dbgpt trace` 命令来帮助你分析追踪日志,你可以使用命令 `dbgpt trace --help` 来查看具体的用法。"
|
||||
|
||||
#: ../../getting_started/observability.md:14 6e0c9c4ba6ac4eb49d9289b0d63f77fb
|
||||
msgid "查看对话详情"
|
||||
msgstr ""
|
||||
|
||||
#: ../../getting_started/observability.md:16 b9badbab74de47f192ff117d7d36fa72
|
||||
msgid ""
|
||||
"You can use the `dbgpt trace chat` command to view chat details. By "
|
||||
"default, it will display the latest chat message."
|
||||
msgstr ""
|
||||
"你可以使用 `dbgpt trace chat` 命令来查看对话信息,默认情况会显示你最新的一条对话信息。"
|
||||
|
||||
#: ../../getting_started/observability.md:18 55c7466bd80d43c9a355d87daf2a2be7
|
||||
msgid "Viewing Service Runtime Information"
|
||||
msgstr "查看服务运行信息"
|
||||
|
||||
#: ../../getting_started/observability.md:24
|
||||
#: ../../getting_started/observability.md:66
|
||||
#: ../../getting_started/observability.md:98
|
||||
#: ../../getting_started/observability.md:146
|
||||
#: ../../getting_started/observability.md:200
|
||||
#: ../../getting_started/observability.md:229
|
||||
#: ../../getting_started/observability.md:249
|
||||
#: ../../getting_started/observability.md:267 18907a58a0c3493aa24c17e367309471
|
||||
#: 387f08b6cd864a7682522b5a40863e79 5fe0baa5803d4ca5ad0e8cbd8a859c8c
|
||||
#: 7abbfdd996444999a24cbad852d2e545 957bdf6826e045608c8c3ebd06c8fe76
|
||||
#: acddf2cdf8c94bd6864d66739fa26459 f3fcaf2b47774779bad2feb3ef4318c4
|
||||
#: f99bb110024443f68cc8b7f19956eff4
|
||||
msgid "You will see an output like:"
|
||||
msgstr "你将会看到类似的输出:"
|
||||
|
||||
#: ../../getting_started/observability.md:60 5c8c213a5bac434bb3defe6611a03813
|
||||
msgid "Viewing the Latest Chat Message"
|
||||
msgstr "查看最近的一条对话信息"
|
||||
|
||||
#: ../../getting_started/observability.md:92 ce19873d8e754173849d14eaeab963d2
|
||||
msgid "Viewing Chat Details and Call Chain"
|
||||
msgstr "查看对话信息和调用链路"
|
||||
|
||||
#: ../../getting_started/observability.md:140 36bcc37971ce4d6682f1ea32e2e9a980
|
||||
msgid "Viewing Chat Details Based on trace_id"
|
||||
msgstr "根据 `trace_id` 查看对应的对话信息"
|
||||
|
||||
#: ../../getting_started/observability.md:172 c74968492f7544758c9d95fa831c4fcf
|
||||
msgid "More `chat` Usage"
|
||||
msgstr "更多 `chat` 用法"
|
||||
|
||||
#: ../../getting_started/observability.md:194 c2e5a7e7b1ee40fea15790d66b79eb11
|
||||
msgid "Viewing Call Tree Based on `trace_id`"
|
||||
msgstr "根据 `trace_id` 查看调用树"
|
||||
|
||||
#: ../../getting_started/observability.md:220 6bd64d2ad0ce442e8e81aa1ae7dd2189
|
||||
msgid "Listing Trace Information"
|
||||
msgstr "列出追踪信息"
|
||||
|
||||
#: ../../getting_started/observability.md:222 ce643441e8744ab09fcbd4081d2adb4a
|
||||
msgid "Listing All Trace Information"
|
||||
msgstr "列出全部追踪信息"
|
||||
|
||||
#: ../../getting_started/observability.md:243 374376d81ed54bc2a450505abfe7dc6d
|
||||
msgid "Listing Trace Information by Trace Type"
|
||||
msgstr "根据追踪类型列出追踪信息"
|
||||
|
||||
#: ../../getting_started/observability.md:261 8fc08a9c924d47309dc7062811c4fb62
|
||||
msgid "Searching Trace Information"
|
||||
msgstr "搜索追踪信息"
|
||||
|
||||
#: ../../getting_started/observability.md:281 3681413d196144389431422010a7e30f
|
||||
msgid "More `list` Usage"
|
||||
msgstr "更多 `list` 用法"
|
||||
|
@@ -174,6 +174,9 @@ def list(
|
||||
def tree(trace_id: str, files):
|
||||
"""Display trace links as a tree"""
|
||||
hierarchy = _view_trace_hierarchy(trace_id, files)
|
||||
if not hierarchy:
|
||||
_print_empty_message(files)
|
||||
return
|
||||
_print_trace_hierarchy(hierarchy)
|
||||
|
||||
|
||||
@@ -193,6 +196,14 @@ def tree(trace_id: str, files):
|
||||
is_flag=True,
|
||||
help="Display trace spans as a tree",
|
||||
)
|
||||
@click.option(
|
||||
"--hide_conv",
|
||||
required=False,
|
||||
type=bool,
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Hide your conversation details",
|
||||
)
|
||||
@click.option(
|
||||
"--hide_run_params",
|
||||
required=False,
|
||||
@@ -209,7 +220,14 @@ def tree(trace_id: str, files):
|
||||
help="The output format",
|
||||
)
|
||||
@click.argument("files", nargs=-1, type=click.Path(exists=False, readable=True))
|
||||
def chat(trace_id: str, tree: bool, hide_run_params: bool, output: str, files):
|
||||
def chat(
|
||||
trace_id: str,
|
||||
tree: bool,
|
||||
hide_conv: bool,
|
||||
hide_run_params: bool,
|
||||
output: str,
|
||||
files,
|
||||
):
|
||||
"""Show conversation details"""
|
||||
from prettytable import PrettyTable
|
||||
|
||||
@@ -220,6 +238,9 @@ def chat(trace_id: str, tree: bool, hide_run_params: bool, output: str, files):
|
||||
spans, key=lambda span: _parse_datetime(span["start_time"]), reverse=True
|
||||
)
|
||||
spans = [sp for sp in spans]
|
||||
if not spans:
|
||||
_print_empty_message(files)
|
||||
return
|
||||
service_spans = {}
|
||||
service_names = set(SpanTypeRunName.values())
|
||||
found_trace_id = None
|
||||
@@ -265,6 +286,8 @@ def chat(trace_id: str, tree: bool, hide_run_params: bool, output: str, files):
|
||||
else:
|
||||
for service_name, table in service_tables.items():
|
||||
print(table.get_formatted_string(out_format=output, **out_kwargs))
|
||||
if hide_conv:
|
||||
return
|
||||
|
||||
if not found_trace_id:
|
||||
print(f"Can't found conversation with trace_id: {trace_id}")
|
||||
@@ -375,6 +398,13 @@ def read_spans_from_files(files=None) -> Iterable[Dict]:
|
||||
yield json.loads(line)
|
||||
|
||||
|
||||
def _print_empty_message(files=None):
|
||||
if not files:
|
||||
files = [_DEFAULT_FILE_PATTERN]
|
||||
file_names = ",".join(files)
|
||||
print(f"No trace span records found in your tracer files: {file_names}")
|
||||
|
||||
|
||||
def _new_search_span_func(search: str):
|
||||
def func(span: Dict) -> bool:
|
||||
items = [span["trace_id"], span["span_id"], span["parent_span_id"]]
|
||||
@@ -450,6 +480,8 @@ def _view_trace_hierarchy(trace_id, files=None):
|
||||
"""Find and display the calls of the entire link based on the given trace_id"""
|
||||
spans = read_spans_from_files(files)
|
||||
trace_spans = [span for span in spans if span["trace_id"] == trace_id]
|
||||
if not trace_spans:
|
||||
return None
|
||||
hierarchy = _build_trace_hierarchy(trace_spans)
|
||||
return hierarchy
|
||||
|
||||
|
Reference in New Issue
Block a user