fix:client mypy error

This commit is contained in:
aries_ckt 2024-03-20 16:22:38 +08:00
parent 7bc5c59a89
commit f43abf3155
19 changed files with 1814 additions and 80 deletions

View File

@ -72,7 +72,6 @@ async def check_api_key(
@router.post("/v2/chat/completions", dependencies=[Depends(check_api_key)])
async def chat_completions(
request: ChatCompletionRequestBody = Body(),
flow_service: FlowService = Depends(get_chat_flow),
):
"""Chat V2 completions
Args:
@ -121,7 +120,9 @@ async def chat_completions(
media_type="text/event-stream",
)
elif (
request.chat_mode is None or request.chat_mode == ChatMode.CHAT_KNOWLEDGE.value
request.chat_mode is None
or request.chat_mode == ChatMode.CHAT_NORMAL.value
or request.chat_mode == ChatMode.CHAT_KNOWLEDGE.value
):
with root_tracer.start_span(
"get_chat_instance", span_type=SpanType.CHAT, metadata=request.dict()

View File

@ -1,21 +1,49 @@
"""App Client API."""
from dbgpt.client.client import Client
from typing import List
from dbgpt.client.client import Client, ClientException
from dbgpt.client.schemas import AppModel
from dbgpt.serve.core import Result
async def get_app(client: Client, app_id: str):
async def get_app(client: Client, app_id: str) -> AppModel:
"""Get an app.
Args:
client (Client): The dbgpt client.
app_id (str): The app id.
Returns:
AppModel: The app model.
Raises:
ClientException: If the request failed.
"""
return await client.get("/apps/" + app_id)
try:
res = await client.get("/apps/" + app_id)
result: Result = res.json()
if result["success"]:
return AppModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get app: {e}")
async def list_app(client: Client):
async def list_app(client: Client) -> List[AppModel]:
"""List apps.
Args:
client (Client): The dbgpt client.
Returns:
List[AppModel]: The list of app models.
Raises:
ClientException: If the request failed.
"""
return await client.get("/apps")
try:
res = await client.get("/apps")
result: Result = res.json()
if result["success"]:
return [AppModel(**app) for app in result["data"]["app_list"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list apps: {e}")

View File

@ -24,17 +24,11 @@ class ClientException(Exception):
reason: Optional[str], the reason for the exception.
http_resp: Optional[httpx.Response], the HTTP response object.
"""
reason = json.loads(reason)
if http_resp:
self.status = http_resp.status_code
self.reason = http_resp.content
self.body = http_resp.content
self.headers = None
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
self.status = status
self.reason = reason
self.http_resp = http_resp
self.headers = http_resp.headers if http_resp else None
self.body = http_resp.text if http_resp else None
def __str__(self):
"""Return the error message."""

View File

@ -1,55 +1,114 @@
"""this module contains the flow client functions."""
from dbgpt.client.client import Client
from typing import List
from dbgpt.client.client import Client, ClientException
from dbgpt.core.awel.flow.flow_factory import FlowPanel
from dbgpt.serve.core import Result
async def create_flow(client: Client, flow: FlowPanel):
async def create_flow(client: Client, flow: FlowPanel) -> FlowPanel:
"""Create a new flow.
Args:
client (Client): The dbgpt client.
flow (FlowPanel): The flow panel.
"""
return await client.get("/awel/flows", flow.dict())
try:
res = await client.get("/awel/flows", flow.dict())
result: Result = res.json()
if result["success"]:
return FlowPanel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to create flow: {e}")
async def update_flow(client: Client, flow: FlowPanel):
async def update_flow(client: Client, flow: FlowPanel) -> FlowPanel:
"""Update a flow.
Args:
client (Client): The dbgpt client.
flow (FlowPanel): The flow panel.
Returns:
FlowPanel: The flow panel.
Raises:
ClientException: If the request failed.
"""
return await client.put("/awel/flows", flow.dict())
try:
res = await client.put("/awel/flows", flow.dict())
result: Result = res.json()
if result["success"]:
return FlowPanel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to update flow: {e}")
async def delete_flow(client: Client, flow_id: str):
async def delete_flow(client: Client, flow_id: str) -> FlowPanel:
"""
Delete a flow.
Args:
client (Client): The dbgpt client.
flow_id (str): The flow id.
Returns:
FlowPanel: The flow panel.
Raises:
ClientException: If the request failed.
"""
return await client.get("/awel/flows/" + flow_id)
try:
res = await client.delete("/awel/flows/" + flow_id)
result: Result = res.json()
if result["success"]:
return FlowPanel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to delete flow: {e}")
async def get_flow(client: Client, flow_id: str):
async def get_flow(client: Client, flow_id: str) -> FlowPanel:
"""
Get a flow.
Args:
client (Client): The dbgpt client.
flow_id (str): The flow id.
Returns:
FlowPanel: The flow panel.
Raises:
ClientException: If the request failed.
"""
return await client.get("/awel/flows/" + flow_id)
try:
res = await client.get("/awel/flows/" + flow_id)
result: Result = res.json()
if result["success"]:
return FlowPanel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get flow: {e}")
async def list_flow(client: Client):
async def list_flow(client: Client) -> List[FlowPanel]:
"""
List flows.
Args:
client (Client): The dbgpt client.
Returns:
List[FlowPanel]: The list of flow panels.
Raises:
ClientException: If the request failed.
"""
return await client.get("/awel/flows")
try:
res = await client.get("/awel/flows")
result: Result = res.json()
if result["success"]:
return [FlowPanel(**flow) for flow in result["data"]["items"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list flows: {e}")

View File

@ -1,104 +1,220 @@
"""Knowledge API client."""
import json
from typing import List
from dbgpt.client.client import Client
from dbgpt.client.client import Client, ClientException
from dbgpt.client.schemas import DocumentModel, SpaceModel, SyncModel
from dbgpt.serve.core import Result
async def create_space(client: Client, app_model: SpaceModel):
async def create_space(client: Client, space_model: SpaceModel) -> SpaceModel:
"""Create a new space.
Args:
client (Client): The dbgpt client.
app_model (SpaceModel): The app model.
space_model (SpaceModel): The space model.
Returns:
SpaceModel: The space model.
Raises:
ClientException: If the request failed.
"""
return await client.post("/knowledge/spaces", app_model.dict())
try:
res = await client.post("/knowledge/spaces", space_model.dict())
result: Result = res.json()
if result["success"]:
return SpaceModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to create space: {e}")
async def update_space(client: Client, app_model: SpaceModel):
async def update_space(client: Client, space_model: SpaceModel) -> SpaceModel:
"""Update a document.
Args:
client (Client): The dbgpt client.
app_model (SpaceModel): The app model.
space_model (SpaceModel): The space model.
Returns:
SpaceModel: The space model.
Raises:
ClientException: If the request failed.
"""
return await client.put("/knowledge/spaces", app_model.dict())
try:
res = await client.put("/knowledge/spaces", space_model.dict())
result: Result = res.json()
if result["success"]:
return SpaceModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to update space: {e}")
async def delete_space(client: Client, space_id: str):
async def delete_space(client: Client, space_id: str) -> SpaceModel:
"""Delete a space.
Args:
client (Client): The dbgpt client.
app_id (str): The app id.
space_id (str): The space id.
Returns:
SpaceModel: The space model.
Raises:
ClientException: If the request failed.
"""
return await client.delete("/knowledge/spaces/" + space_id)
try:
res = await client.delete("/knowledge/spaces/" + space_id)
result: Result = res.json()
if result["success"]:
return SpaceModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to delete space: {e}")
async def get_space(client: Client, space_id: str):
async def get_space(client: Client, space_id: str) -> SpaceModel:
"""Get a document.
Args:
client (Client): The dbgpt client.
app_id (str): The app id.
space_id (str): The space id.
Returns:
SpaceModel: The space model.
Raises:
ClientException: If the request failed.
"""
return await client.get("/knowledge/spaces/" + space_id)
try:
res = await client.get("/knowledge/spaces/" + space_id)
result: Result = res.json()
if result["success"]:
return SpaceModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get space: {e}")
async def list_space(client: Client):
"""List apps.
async def list_space(client: Client) -> List[SpaceModel]:
"""List spaces.
Args:
client (Client): The dbgpt client.
Returns:
List[SpaceModel]: The list of space models.
Raises:
ClientException: If the request failed.
"""
return await client.get("/knowledge/spaces")
try:
res = await client.get("/knowledge/spaces")
result: Result = res.json()
if result["success"]:
return [SpaceModel(**space) for space in result["data"]["items"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list spaces: {e}")
async def create_document(client: Client, doc_model: DocumentModel):
"""Create a new space.
async def create_document(client: Client, doc_model: DocumentModel) -> DocumentModel:
"""Create a new document.
Args:
client (Client): The dbgpt client.
doc_model (SpaceModel): The document model.
"""
return await client.post_param("/knowledge/documents", doc_model.dict())
try:
res = await client.post_param("/knowledge/documents", doc_model.dict())
result: Result = res.json()
if result["success"]:
return DocumentModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to create document: {e}")
async def delete_document(client: Client, document_id: str):
async def delete_document(client: Client, document_id: str) -> DocumentModel:
"""Delete a document.
Args:
client (Client): The dbgpt client.
app_id (str): The app id.
document_id (str): The document id.
Returns:
DocumentModel: The document model.
Raises:
ClientException: If the request failed.
"""
return await client.delete("/knowledge/documents/" + document_id)
try:
res = await client.delete("/knowledge/documents/" + document_id)
result: Result = res.json()
if result["success"]:
return DocumentModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to delete document: {e}")
async def get_document(client: Client, document_id: str):
async def get_document(client: Client, document_id: str) -> DocumentModel:
"""Get a document.
Args:
client (Client): The dbgpt client.
app_id (str): The app id.
document_id (str): The document id.
Returns:
DocumentModel: The document model.
Raises:
ClientException: If the request failed.
"""
return await client.get("/knowledge/documents/" + document_id)
try:
res = await client.get("/knowledge/documents/" + document_id)
result: Result = res.json()
if result["success"]:
return DocumentModel(**result["data"])
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to get document: {e}")
async def list_document(client: Client):
async def list_document(client: Client) -> List[DocumentModel]:
"""List documents.
Args:
client (Client): The dbgpt client.
"""
return await client.get("/knowledge/documents")
try:
res = await client.get("/knowledge/documents")
result: Result = res.json()
if result["success"]:
return [DocumentModel(**document) for document in result["data"]["items"]]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list documents: {e}")
async def sync_document(client: Client, sync_model: SyncModel):
async def sync_document(client: Client, sync_model: SyncModel) -> List:
"""Sync document.
Args:
client (Client): The dbgpt client.
sync_model (SyncModel): The sync model.
Returns:
List: The list of document ids.
Raises:
ClientException: If the request failed.
"""
return await client.post(
"/knowledge/documents/sync", [json.loads(sync_model.json())]
)
try:
res = await client.post(
"/knowledge/documents/sync", [json.loads(sync_model.json())]
)
result: Result = res.json()
if result["success"]:
return result["data"]
else:
raise ClientException(status=result["err_code"], reason=result)
except Exception as e:
raise ClientException(f"Failed to list documents: {e}")

View File

@ -21,7 +21,7 @@ class ChatCompletionRequestBody(BaseModel):
messages: Union[str, List[str]] = Field(
..., description="User input messages", examples=["Hello", "How are you?"]
)
stream: bool = Field(default=False, description="Whether return stream")
stream: bool = Field(default=True, description="Whether return stream")
temperature: Optional[float] = Field(
default=None,
@ -174,6 +174,10 @@ class AppModel(BaseModel):
class SpaceModel(BaseModel):
"""Space model."""
id: str = Field(
default=None,
description="space id",
)
name: str = Field(
default=None,
description="knowledge space name",
@ -190,6 +194,10 @@ class SpaceModel(BaseModel):
default=None,
description="space owner",
)
context: Optional[str] = Field(
default=None,
description="space argument context",
)
class DocumentModel(BaseModel):

View File

@ -147,8 +147,8 @@ async def delete(uid: str, service: Service = Depends(get_service)) -> Result[No
Returns:
Result[None]: The response
"""
service.delete(uid)
return Result.succ(None)
inst = service.delete(uid)
return Result.succ(inst)
@router.get("/flows/{uid}")

View File

@ -15,11 +15,17 @@ class SpaceServeRequest(BaseModel):
id: Optional[int] = Field(None, description="The space id")
name: str = Field(None, description="The space name")
"""vector_type: vector type"""
vector_type: str = Field(None, description="The vector type")
vector_type: str = Field("Chroma", description="The vector type")
"""desc: description"""
desc: str = Field(None, description="The description")
desc: Optional[str] = Field(None, description="The description")
"""owner: owner"""
owner: str = Field(None, description="The owner")
owner: Optional[str] = Field(None, description="The owner")
"""context: argument context"""
context: Optional[str] = Field(None, description="The context")
"""gmt_created: created time"""
gmt_created: Optional[str] = Field(None, description="The created time")
"""gmt_modified: modified time"""
gmt_modified: Optional[str] = Field(None, description="The modified time")
class DocumentServeRequest(BaseModel):

View File

@ -38,7 +38,7 @@ class KnowledgeSpaceDao(BaseDao):
session.commit()
space_id = knowledge_space.id
session.close()
return space_id
return self.to_response(knowledge_space)
def get_knowledge_space(self, query: KnowledgeSpaceEntity):
"""Get knowledge space by query"""
@ -81,11 +81,21 @@ class KnowledgeSpaceDao(BaseDao):
def update_knowledge_space(self, space: KnowledgeSpaceEntity):
"""Update knowledge space"""
session = self.get_raw_session()
session.merge(space)
request = SpaceServeRequest(id=space.id)
update_request = self.to_request(space)
query = self._create_query_object(session, request)
entry = query.first()
if entry is None:
raise Exception("Invalid request")
for key, value in update_request.dict().items(): # type: ignore
if value is not None:
setattr(entry, key, value)
session.merge(entry)
session.commit()
session.close()
return True
return self.to_response(space)
def delete_knowledge_space(self, space: KnowledgeSpaceEntity):
"""Delete knowledge space"""
@ -127,6 +137,7 @@ class KnowledgeSpaceDao(BaseDao):
vector_type=entity.vector_type,
desc=entity.desc,
owner=entity.owner,
context=entity.context,
)
def to_response(self, entity: KnowledgeSpaceEntity) -> SpaceServeResponse:

View File

@ -145,9 +145,7 @@ class Service(BaseService[KnowledgeSpaceEntity, SpaceServeRequest, SpaceServeRes
status_code=400,
detail=f"no space name named {request.name}",
)
space = spaces[0]
query_request = {"id": space.id}
update_obj = self._dao.update(query_request, update_request=request)
update_obj = self._dao.update_knowledge_space(self._dao.from_request(request))
return update_obj
async def create_document(

188
docs/docs/api/app.md Normal file
View File

@ -0,0 +1,188 @@
# App
Get started with the App API
# Chat App
```python
POST /api/v2/chat/completions
```
### Examples
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
### Stream Chat App
<Tabs
defaultValue="python"
groupId="chat"
values={[
{label: 'Curl', value: 'curl'},
{label: 'Python', value: 'python'},
]
}>
<TabItem value="curl">
```shell
DBGPT_API_KEY="dbgpt"
APP_ID="{YOUR_APP_ID}"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
-d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"chat_mode\": \"chat_app\", \"chat_param\": "$APP_ID"}"
```
</TabItem>
<TabItem value="python">
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
APP_ID="{YOUR_APP_ID}"
client = Client(api_key=DBGPT_API_KEY)
response = client.chat_stream(messages="Introduce AWEL", model="chatgpt_proxyllm", chat_mode="chat_app", chat_param=APP_ID)
```
</TabItem>
</Tabs>
### Chat Completion Stream Response
```commandline
data: {"id": "109bfc28-fe87-452c-8e1f-d4fe43283b7d", "created": 1710919480, "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "```agent-plans\n[{\"name\": \"Introduce Awel\", \"num\": 2, \"status\": \"complete\", \"agent\": \"Human\", \"markdown\": \"```agent-messages\\n[{\\\"sender\\\": \\\"Summarizer\\\", \\\"receiver\\\": \\\"Human\\\", \\\"model\\\": \\\"chatgpt_proxyllm\\\", \\\"markdown\\\": \\\"Agentic Workflow Expression Language (AWEL) is a specialized language designed for developing large model applications with intelligent agent workflows. It offers flexibility and functionality, allowing developers to focus on business logic for LLMs applications without getting bogged down in model and environment details. AWEL uses a layered API design architecture, making it easier to work with. You can find examples and source code to get started with AWEL, and it supports various operators and environments. AWEL is a powerful tool for building native data applications through workflows and agents.\"}]\n```"}}]}
data: [DONE]
```
### Get App
```python
GET /api/v2/serve/apps/{app_id}
```
#### Query Parameters
________
<b>app_id</b> <font color="gray"> string </font> <font color="red"> Required </font>
app id
________
#### Response body
Return <a href="#the-app-object">App Object</a>
### List App
```python
GET /api/v2/serve/apps
```
#### Response body
Return <a href="#the-app-object">App Object</a> List
### The App Model
________
<b>id</b> <font color="gray"> string </font>
space id
________
<b>app_code</b> <font color="gray"> string </font>
app code
________
<b>app_name</b> <font color="gray"> string </font>
app name
________
<b>app_describe</b> <font color="gray"> string </font>
app describe
________
<b>team_mode</b> <font color="gray"> string </font>
team mode
________
<b>language</b> <font color="gray"> string </font>
language
________
<b>team_context</b> <font color="gray"> string </font>
team context
________
<b>user_code</b> <font color="gray"> string </font>
user code
________
<b>sys_code</b> <font color="gray"> string </font>
sys code
________
<b>is_collected</b> <font color="gray"> string </font>
is collected
________
<b>icon</b> <font color="gray"> string </font>
icon
________
<b>created_at</b> <font color="gray"> string </font>
created at
________
<b>updated_at</b> <font color="gray"> string </font>
updated at
________
<b>details</b> <font color="gray"> string </font>
app details List[AppDetailModel]
________
### The App Detail Model
________
<b>app_code</b> <font color="gray"> string </font>
app code
________
<b>app_name</b> <font color="gray"> string </font>
app name
________
<b>agent_name</b> <font color="gray"> string </font>
agent name
________
<b>node_id</b> <font color="gray"> string </font>
node id
________
<b>resources</b> <font color="gray"> string </font>
resources
________
<b>prompt_template</b> <font color="gray"> string </font>
prompt template
________
<b>llm_strategy</b> <font color="gray"> string </font>
llm strategy
________
<b>llm_strategy_value</b> <font color="gray"> string </font>
llm strategy value
________
<b>created_at</b> <font color="gray"> string </font>
created at
________
<b>updated_at</b> <font color="gray"> string </font>
updated at
________

280
docs/docs/api/chat.md Normal file
View File

@ -0,0 +1,280 @@
# Chat
Given a list of messages comprising a conversation, the model will return a response.
# Create Chat Completion
```python
POST /api/v2/chat/completions
```
### Examples
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
### Stream Chat Completion
<Tabs
defaultValue="python"
groupId="chat"
values={[
{label: 'Curl', value: 'curl'},
{label: 'Python', value: 'python'},
]
}>
<TabItem value="curl">
```shell
DBGPT_API_KEY="dbgpt"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
-d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"stream\": true}"
```
</TabItem>
<TabItem value="python">
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm")
```
</TabItem>
</Tabs>
### Chat Completion Stream Response
```commandline
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "Hello"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "!"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " How"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " can"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " I"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " assist"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " today"}}]}
data: {"id": "chatcmpl-ba6fb52e-e5b2-11ee-b031-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "?"}}]}
data: [DONE]
```
### Chat Completion
<Tabs
defaultValue="python"
groupId="chat"
values={[
{label: 'Curl', value: 'curl'},
{label: 'Python', value: 'python'},
]
}>
<TabItem value="curl">
```shell
DBGPT_API_KEY="dbgpt"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
-d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"stream\": false}"
```
</TabItem>
<TabItem value="python">
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
response = client.chat(messages="Hello", model="chatgpt_proxyllm")
```
</TabItem>
</Tabs>
### Chat Completion Response
```json
{
"id": "a8321543-52e9-47a5-a0b6-3d997463f6a3",
"object": "chat.completion",
"created": 1710826792,
"model": "chatgpt_proxyllm",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "Hello! How can I assist you today?"
},
"finish_reason": null
}
],
"usage": {
"prompt_tokens": 0,
"total_tokens": 0,
"completion_tokens": 0
}
}
```
### Request body
________
<b>messages</b> <font color="gray"> string </font> <font color="red"> Required </font>
A list of messages comprising the conversation so far. Example Python code.
________
<b>model</b> <font color="gray"> string </font> <font color="red"> Required </font>
ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API.
________
<b>chat_mode</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The DB-GPT chat mode, which can be one of the following: `chat_normal`, `chat_app`, `chat_knowledge`, `chat_flow`, default is `chat_normal`.
________
<b>chat_param</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The DB-GPT The chat param value of chat mode: `{app_id}`, `{space_id}`, `{flow_id}`, default is `None`.
________
<b>max_new_tokens</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The maximum number of tokens that can be generated in the chat completion.
The total length of input tokens and generated tokens is limited by the model's context length.
________
<b>stream</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
If set, partial message deltas will be sent.
Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a `data: [DONE]`
________
<b>temperature</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
________
<b>conv_uid</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The conversation id of the model inference, default is `None`
________
<b>span_id</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The span id of the model inference, default is `None`
________
<b>sys_code</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The system code, default is `None`
________
<b>user_name</b> <font color="gray"> string </font> <font color="red"> Optional </font>
The web server user name, default is `None`
________
### Chat Stream Response Body
________
<b>id</b> <font color="gray"> string </font>
conv_uid of the convsersation.
________
<b>model</b> <font color="gray"> string </font>
The model used for the chat completion.
________
<b>created</b> <font color="gray"> string </font>
The Unix timestamp (in seconds) of when the chat completion was created.
________
<b>choices</b> <font color="gray"> array </font>
A list of chat completion choices. Can be more than one if n is greater than 1.
- <b>index</b> <font color="gray"> integer </font>
The index of the choice in the list of choices.
- <b>delta</b> <font color="gray"> object </font>
The chat completion delta.
- <b>role</b> <font color="gray"> string </font>
The role of the speaker. Can be `user` or `assistant`.
- <b>content</b> <font color="gray"> string </font>
The content of the message.
- <b>finish_reason</b> <font color="gray"> string </font>
The reason the chat completion finished. Can be `max_tokens` or `stop`.
________
### Chat Response Body
________
<b>id</b> <font color="gray"> string </font>
conv_uid of the convsersation.
________
<b>model</b> <font color="gray"> string </font>
The model used for the chat completion.
________
<b>created</b> <font color="gray"> string </font>
The Unix timestamp (in seconds) of when the chat completion was created.
________
<b>object</b> <font color="gray"> string </font>
The object type of the chat completion.
________
<b>choices</b> <font color="gray"> array </font>
A list of chat completion choices. Can be more than one if n is greater than 1.
- <b>index</b> <font color="gray"> integer </font>
The index of the choice in the list of choices.
- <b>delta</b> <font color="gray"> object </font>
The chat completion delta.
- <b>role</b> <font color="gray"> string </font>
The role of the speaker. Can be `user` or `assistant`.
- <b>content</b> <font color="gray"> string </font>
The content of the message.
- <b>finish_reason</b> <font color="gray"> string </font>
The reason the chat completion finished. Can be `max_tokens` or `stop`.
________
<b>usage</b> <font color="gray"> object </font>
The usage statistics for the chat completion.
- <b>prompt_tokens</b> <font color="gray"> integer </font>
The number of tokens in the prompt.
- <b>total_tokens</b> <font color="gray"> integer </font>
The total number of tokens in the chat completion.
- <b>completion_tokens</b> <font color="gray"> integer </font>
The number of tokens in the chat completion.

306
docs/docs/api/flow.md Normal file
View File

@ -0,0 +1,306 @@
# Flow
Get started with the App API
# Chat Flow
```python
POST /api/v2/chat/completions
```
### Examples
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
### Stream Chat Flow
<Tabs
defaultValue="python"
groupId="chat"
values={[
{label: 'Curl', value: 'curl'},
{label: 'Python', value: 'python'},
]
}>
<TabItem value="curl">
```shell
DBGPT_API_KEY="dbgpt"
FLOW_ID="{YOUR_FLOW_ID}"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
-d "{\"messages\":\"Hello\",\"model\":\"chatgpt_proxyllm\", \"chat_mode\": \"chat_flow\", \"chat_param\": "$FLOW_ID"}"
```
</TabItem>
<TabItem value="python">
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
FLOW_ID="{YOUR_FLOW_ID}"
client = Client(api_key=DBGPT_API_KEY)
response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm", chat_mode="chat_flow", chat_param=FLOW_ID)
```
</TabItem>
</Tabs>
#### Chat Completion Stream Response
```commandline
data: {"id": "579f8862-fc4b-481e-af02-a127e6d036c8", "created": 1710918094, "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "\n\n"}}]}
```
### Create Flow
```python
POST /api/v2/serve/awel/flows
```
#### Request body
Request <a href="#the-flow-object">Flow Object</a>
#### Response body
Return <a href="#the-flow-object">Flow Object</a>
### Update Flow
PUT /api/v2/serve/awel/flows
#### Request body
Request <a href="#the-flow-object">Flow Object</a>
#### Response body
Return <a href="#the-flow-object">Flow Object</a>
### Delete Flow
```python
DELETE /api/v2/serve/awel/flows
```
<Tabs
defaultValue="curl_update_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_update_knowledge'},
{label: 'Python', value: 'python_update_knowledge'},
]
}>
<TabItem value="curl_update_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
FLOW_ID="{YOUR_FLOW_ID}"
curl -X DELETE "http://localhost:5000/api/v2/serve/knowledge/spaces/$FLOW_ID" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
</TabItem>
<TabItem value="python_update_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.flow import delete_flow
DBGPT_API_KEY = "dbgpt"
flow_id = "{your_flow_id}"
client = Client(api_key=DBGPT_API_KEY)
res = await delete_flow(client=client, flow_id=flow_id)
```
</TabItem>
</Tabs>
#### Delete Parameters
________
<b>uid</b> <font color="gray"> string </font> <font color="red"> Required </font>
flow id
________
#### Response body
Return <a href="#the-flow-object">Flow Object</a>
### Get Flow
```python
GET /api/v2/serve/awel/flows/{flow_id}
```
<Tabs
defaultValue="curl_get_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_get_knowledge'},
{label: 'Python', value: 'python_get_knowledge'},
]
}>
<TabItem value="curl_get_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
FLOW_ID="{YOUR_FLOW_ID}"
curl --location --request GET 'http://localhost:5000/api/v2/serve/awel/flows/$FLOW_ID' \
--header 'Authorization: Bearer $DBGPT_API_KEY'
```
</TabItem>
<TabItem value="python_get_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import get_flow
DBGPT_API_KEY = "dbgpt"
flow_id = "{your_flow_id}"
client = Client(api_key=DBGPT_API_KEY)
res = await get_flow(client=client, flow_id=flow_id)
```
</TabItem>
</Tabs>
#### Query Parameters
________
<b>uid</b> <font color="gray"> string </font> <font color="red"> Required </font>
flow id
________
#### Response body
Return <a href="#the-flow-object">Flow Object</a>
### List Flow
```python
GET /api/v2/serve/awel/flows
```
<Tabs
defaultValue="curl_list_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_list_knowledge'},
{label: 'Python', value: 'python_list_knowledge'},
]
}>
<TabItem value="curl_list_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
curl -X GET "http://localhost:5000/api/v2/serve/awel/flows" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
</TabItem>
<TabItem value="python_list_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.flow import list_flow
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await list_flow(client=client)
```
</TabItem>
</Tabs>
#### Response body
Return <a href="#the-flow-object">Flow Object</a>
### The Flow Object
________
<b>uid</b> <font color="gray">string</font>
The unique id for the flow.
________
<b>name</b> <font color="gray">string</font>
The name of the flow.
________
<b>description</b> <font color="gray">string</font>
The description of the flow.
________
<b>label</b> <font color="gray">string</font>
The label of the flow.
________
<b>flow_category</b> <font color="gray">string</font>
The category of the flow. Default is FlowCategory.COMMON.
________
<b>flow_data</b> <font color="gray">object</font>
The flow data.
________
<b>state</b> <font color="gray">string</font>
The state of the flow.Default is INITIALIZING.
________
<b>error_message</b> <font color="gray">string</font>
The error message of the flow.
________
<b>source</b> <font color="gray">string</font>
The source of the flow. Default is DBGPT-WEB.
________
<b>source_url</b> <font color="gray">string</font>
The source url of the flow.
________
<b>version</b> <font color="gray">string</font>
The version of the flow. Default is 0.1.0.
________
<b>editable</b> <font color="gray">boolean</font>
Whether the flow is editable. Default is True.
________
<b>user_name</b> <font color="gray">string</font>
The user name of the flow.
________
<b>sys_code</b> <font color="gray">string</font>
The system code of the flow.
________
<b>dag_id</b> <font color="gray">string</font>
The dag id of the flow.
________
<b>gmt_created</b> <font color="gray">string</font>
The created time of the flow.
________
<b>gmt_modified</b> <font color="gray">string</font>
The modified time of the flow.
________

View File

@ -0,0 +1,37 @@
# Introduction
This is the introduction to the DB-GPT API documentation. You can interact with the API through HTTP requests from any language, via our official Python Client bindings.
# Authentication
The DB-GPT API uses API keys for authentication. Visit your API Keys page to retrieve the API key you'll use in your requests.
Production requests must be routed through your own backend server where your API key can be securely loaded from an environment variable or key management service.
All API requests should include your API key in an Authorization HTTP header as follows:
```http
Authorization: Bearer DBGPT_API_KEY
```
Example with the DB-GPT API curl command:
```bash
curl "http://localhost:5000/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
Example with the DB-GPT Client Python package:
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
```
Set the API Key in .env file as follows:
:::info note
API_KEYS - The list of API keys that are allowed to access the API. Each of the below are an option, separated by commas.
:::
```python
API_KEYS=dbgpt
```

657
docs/docs/api/knowledge.md Normal file
View File

@ -0,0 +1,657 @@
# Knowledge
Get started with the Knowledge API
# Chat Knowledge Space
```python
POST /api/v2/chat/completions
```
### Examples
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
### Chat Knowledge
<Tabs
defaultValue="python"
groupId="chat"
values={[
{label: 'Curl', value: 'curl'},
{label: 'Python', value: 'python'},
]
}>
<TabItem value="curl">
```shell
DBGPT_API_KEY="dbgpt"
SPACE_NAME="{YOUR_SPACE_NAME}"
curl --location --request POST 'http://127.0.0.1:5000/api/v2/chat/completions' \
--header 'Authorization: Bearer $DBGPT_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{
"model": "chatgpt_proxyllm",
"messages": "introduce awel",
"chat_mode":"chat_knowledge",
"chat_param":$SPACE_NAME
}'
```
</TabItem>
<TabItem value="python">
```python
from dbgpt.client.client import Client
DBGPT_API_KEY = "dbgpt"
SPACE_NAME="{YOUR_SPACE_NAME}"
client = Client(api_key=DBGPT_API_KEY)
response = client.chat_stream(messages="Hello", model="chatgpt_proxyllm", chat_mode="chat_knowledge", chat_param=SPACE_NAME)
```
</TabItem>
</Tabs>
#### Chat Completion Response
```json
{
"id": "acb050ab-eb2c-4754-97e4-6f3b94b7dac2",
"object": "chat.completion",
"created": 1710917272,
"model": "chatgpt_proxyllm",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "Agentic Workflow Expression Language (AWEL) is a specialized language designed for developing large model applications with intelligent agent workflows. It offers flexibility and functionality, allowing developers to focus on business logic for LLMs applications without getting bogged down in model and environment details. AWEL uses a layered API design architecture, making it easier to work with. You can find examples and source code to get started with AWEL, and it supports various operators and environments. AWEL is a powerful tool for building native data applications through workflows and agents."
},
"finish_reason": null
}
],
"usage": {
"prompt_tokens": 0,
"total_tokens": 0,
"completion_tokens": 0
}
}
```
#### Chat Completion Stream Response
```commandline
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "AW"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " which"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " stands"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " for"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Ag"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "entic"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Workflow"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Expression"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Language"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " is"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " a"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " powerful"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " tool"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " designed"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " for"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " developing"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " large"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " model"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " applications"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " It"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " simpl"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "ifies"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " the"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " process"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " by"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " allowing"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " developers"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " to"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " focus"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " on"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " business"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " logic"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " without"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " getting"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " bog"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "ged"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " down"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " in"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " complex"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " model"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " environment"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " details"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " AW"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " offers"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " great"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " functionality"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " flexibility"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " through"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " its"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " layered"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " API"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " design"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " architecture"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " It"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " provides"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " a"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " set"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " of"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " intelligent"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " agent"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " workflow"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " expression"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " language"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " that"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " enhances"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " efficiency"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " in"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " application"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " development"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " If"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " want"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " to"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " learn"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " more"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " about"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " AW"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "EL"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " you"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " can"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " check"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " out"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " the"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " built"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "-in"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " examples"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " resources"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " available"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " on"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " platforms"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " like"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Github"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " Docker"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "hub"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": ","}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " and"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": " more"}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "."}}]}
data: {"id": "chatcmpl-86f60a0c-e686-11ee-9322-acde48001122", "model": "chatgpt_proxyllm", "choices": [{"index": 0, "delta": {"role": "assistant", "content": "\n\n<references title=\"References\" references=\"[{&quot;name&quot;: &quot;AWEL_URL&quot;, &quot;chunks&quot;: [{&quot;id&quot;: 2526, &quot;content&quot;: &quot;Agentic Workflow Expression Language(AWEL) is a set of intelligent agent workflow expression language specially designed for large model applicationdevelopment. It provides great functionality and flexibility. Through the AWEL API, you can focus on the development of business logic for LLMs applicationswithout paying attention to cumbersome model and environment details.AWEL adopts a layered API design. AWEL's layered API design architecture is shown in the figure below.AWEL Design&quot;, &quot;meta_info&quot;: &quot;{'source': 'https://docs.dbgpt.site/docs/latest/awel/', 'title': 'AWEL(Agentic Workflow Expression Language) | DB-GPT', 'description': 'Agentic Workflow Expression Language(AWEL) is a set of intelligent agent workflow expression language specially designed for large model application', 'language': 'en-US'}&quot;, &quot;recall_score&quot;: 0.6579902643967029}, {&quot;id&quot;: 2531, &quot;content&quot;: &quot;ExamplesThe preliminary version of AWEL has alse been released, and we have provided some built-in usage examples.OperatorsExample of API-RAGYou can find source code from examples/awel/simple_rag_example.py&quot;, &quot;meta_info&quot;: &quot;{'source': 'https://docs.dbgpt.site/docs/latest/awel/', 'title': 'AWEL(Agentic Workflow Expression Language) | DB-GPT', 'description': 'Agentic Workflow Expression Language(AWEL) is a set of intelligent agent workflow expression language specially designed for large model application', 'language': 'en-US'}&quot;, &quot;recall_score&quot;: 0.5997033286385491}, {&quot;id&quot;: 2538, &quot;content&quot;: &quot;Stand-alone environmentRay environmentPreviousWhy use AWEL?NextReleased V0.5.0 | Develop native data applications through workflows and agentsAWEL DesignExamplesOperatorsExample of API-RAGAgentFream ExampleDSL ExampleCurrently supported operatorsExecutable environmentCommunityDiscordDockerhubGithubGithubHuggingFaceMoreHacker NewsTwitterCopyright © 2024 DB-GPT&quot;, &quot;meta_info&quot;: &quot;{'source': 'https://docs.dbgpt.site/docs/latest/awel/', 'title': 'AWEL(Agentic Workflow Expression Language) | DB-GPT', 'description': 'Agentic Workflow Expression Language(AWEL) is a set of intelligent agent workflow expression language specially designed for large model application', 'language': 'en-US'}&quot;, &quot;recall_score&quot;: 0.5980204530753225}]}]\" />"}}]}
data: [DONE]
```
### Create Knowledge Space
```python
POST /api/v2/serve/knowledge/spaces
```
<Tabs
defaultValue="curl_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_knowledge'},
{label: 'Python', value: 'python_knowledge'},
]
}>
<TabItem value="curl_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
curl --location --request POST 'http://localhost:5000/api/v2/serve/knowledge/spaces' \
--header 'Authorization: Bearer $DBGPT_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{"desc": "for client space desc", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma"
}'
```
</TabItem>
<TabItem value="python_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import create_space
from dbgpt.client.schemas import SpaceModel
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await create_space(client,SpaceModel(
name="test_space",
vector_type="Chroma",
desc="for client space",
owner="dbgpt"))
```
</TabItem>
</Tabs>
#### Request body
________
<b>name</b> <font color="gray"> string </font> <font color="red"> Required </font>
knowledge space name
________
<b>vector_type</b> <font color="gray"> string </font> <font color="red"> Required </font>
vector db type, `Chroma`, `Milvus`, default is `Chroma`
________
<b>desc</b> <font color="gray"> string </font> <font color="red"> Optional </font>
description of the knowledge space
________
<b>owner</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The owner of the knowledge space
________
<b>context</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The context of the knowledge space argument
________
#### Response body
Return <a href="#the-space-object">Space Object</a>
### Update Knowledge Space
```python
PUT /api/v2/serve/knowledge/spaces
```
<Tabs
defaultValue="curl_update_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_update_knowledge'},
{label: 'Python', value: 'python_update_knowledge'},
]
}>
<TabItem value="curl_update_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
curl --location --request PUT 'http://localhost:5000/api/v2/serve/knowledge/spaces' \
--header 'Authorization: Bearer $DBGPT_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{"desc": "for client space desc v2", "id": "49", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma"
}'
```
</TabItem>
<TabItem value="python_update_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import update_space
from dbgpt.client.schemas import SpaceModel
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await update_space(client, SpaceModel(
name="test_space",
vector_type="Chroma",
desc="for client space update",
owner="dbgpt"))
```
</TabItem>
</Tabs>
#### Request body
________
<b>id</b> <font color="gray"> string </font> <font color="red"> Required </font>
knowledge space id
________
<b>name</b> <font color="gray"> string </font> <font color="red"> Required </font>
knowledge space name
________
<b>vector_type</b> <font color="gray"> string </font> <font color="red"> Optional </font>
vector db type, `Chroma`, `Milvus`, default is `Chroma`
________
<b>desc</b> <font color="gray"> string </font> <font color="red"> Optional </font>
description of the knowledge space
________
<b>owner</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The owner of the knowledge space
________
<b>context</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The context of the knowledge space argument
________
#### Response body
Return <a href="#the-space-object">Space Object</a>
### Delete Knowledge Space
```python
DELETE /api/v2/serve/knowledge/spaces
```
<Tabs
defaultValue="curl_update_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_update_knowledge'},
{label: 'Python', value: 'python_update_knowledge'},
]
}>
<TabItem value="curl_update_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
SPACE_ID="{YOUR_SPACE_ID}"
curl -X DELETE "http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
```
</TabItem>
<TabItem value="python_update_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import delete_space
DBGPT_API_KEY = "dbgpt"
space_id = "{your_space_id}"
client = Client(api_key=DBGPT_API_KEY)
res = await delete_space(client=client, space_id=space_id)
```
</TabItem>
</Tabs>
#### Delete Parameters
________
<b>id</b> <font color="gray"> string </font> <font color="red"> Required </font>
knowledge space id
________
#### Response body
Return <a href="#the-space-object">Space Object</a>
### Get Knowledge Space
```python
GET /api/v2/serve/knowledge/spaces/{space_id}
```
<Tabs
defaultValue="curl_get_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_get_knowledge'},
{label: 'Python', value: 'python_get_knowledge'},
]
}>
<TabItem value="curl_get_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
SPACE_ID="{YOUR_SPACE_ID}"
curl --location --request GET 'http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID' \
--header 'Authorization: Bearer $DBGPT_API_KEY'
```
</TabItem>
<TabItem value="python_get_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import get_space
DBGPT_API_KEY = "dbgpt"
space_id = "{your_space_id}"
client = Client(api_key=DBGPT_API_KEY)
res = await get_space(client=client, space_id=space_id)
```
</TabItem>
</Tabs>
#### Query Parameters
________
<b>id</b> <font color="gray"> string </font> <font color="red"> Required </font>
knowledge space id
________
#### Response body
Return <a href="#the-space-object">Space Object</a>
### List Knowledge Space
```python
GET /api/v2/serve/knowledge/spaces
```
<Tabs
defaultValue="curl_list_knowledge"
groupId="chat1"
values={[
{label: 'Curl', value: 'curl_list_knowledge'},
{label: 'Python', value: 'python_list_knowledge'},
]
}>
<TabItem value="curl_list_knowledge">
```shell
DBGPT_API_KEY="dbgpt"
curl --location --request GET 'http://localhost:5000/api/v2/serve/knowledge/spaces' \
--header 'Authorization: Bearer dbgpt'
```
</TabItem>
<TabItem value="python_list_knowledge">
```python
from dbgpt.client.client import Client
from dbgpt.client.knowledge import list_space
DBGPT_API_KEY = "dbgpt"
space_id = "{your_space_id}"
client = Client(api_key=DBGPT_API_KEY)
res = await list_space(client=client)
```
</TabItem>
</Tabs>
#### Response body
Return <a href="#the-space-object">Space Object</a> List
### The Space Object
________
<b>id</b> <font color="gray"> string </font>
space id
________
<b>name</b> <font color="gray"> string </font>
knowledge space name
________
<b>vector_type</b> <font color="gray"> string </font>
vector db type, `Chroma`, `Milvus`, default is `Chroma`
________
<b>desc</b> <font color="gray"> string </font> <font color="red"> Optional </font>
description of the knowledge space
________
<b>owner</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The owner of the knowledge space
________
<b>context</b> <font color="gray"> integer </font> <font color="red"> Optional </font>
The context of the knowledge space argument
________

View File

@ -356,6 +356,39 @@ const sidebars = {
},
},
{
type: "category",
label: "API Reference",
collapsed: false,
collapsible: false,
items: [
{
type: 'doc',
id: 'api/introduction'
},
{
type: 'doc',
id: 'api/chat'
},
{
type: 'doc',
id: 'api/app'
},
{
type: 'doc',
id: 'api/flow'
},
{
type: 'doc',
id: 'api/knowledge'
},
],
link: {
type: 'generated-index',
slug: "api",
},
},
{
type: "category",
label: "Modules",

View File

@ -27,7 +27,7 @@ async def main():
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await list_app(client)
print(res.json())
print(res)
if __name__ == "__main__":

View File

@ -1,6 +1,5 @@
import asyncio
from dbgpt.client.app import list_app
from dbgpt.client.client import Client
from dbgpt.client.flow import list_flow
@ -40,7 +39,7 @@ async def main():
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await list_flow(client)
print(res.json())
print(res)
if __name__ == "__main__":

View File

@ -1,7 +1,8 @@
import asyncio
from dbgpt.client.client import Client
from dbgpt.client.knowledge import list_space
from dbgpt.client.knowledge import create_space
from dbgpt.client.schemas import SpaceModel
"""Client: Simple Knowledge CRUD example
@ -72,9 +73,20 @@ async def main():
DBGPT_API_KEY = "dbgpt"
client = Client(api_key=DBGPT_API_KEY)
res = await create_space(
client,
SpaceModel(
name="test_space_1",
vector_type="Chroma",
desc="for client space desc",
owner="dbgpt",
),
)
print(res)
# list all spaces
res = await list_space(client)
print(res.json())
# res = await list_space(client)
# print(res)
# get space
# res = await get_space(client, space_id='5')
@ -86,7 +98,8 @@ async def main():
# res = await update_space(client, SpaceModel(name="test_space", vector_type="Chroma", desc="for client space333", owner="dbgpt"))
# delete space
# res = await delete_space(client, space_id='37')
# res = await delete_space(client, space_id='31')
# print(res)
# list all documents
# res = await list_document(client)
@ -102,7 +115,7 @@ async def main():
# , doc_file=('your_file_name', open('{your_file_path}', 'rb'))))
# sync document
# res = await sync_document(client, sync_model=SyncModel(doc_id="153", space_id="40", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic")))
# res = await sync_document(client, sync_model=SyncModel(doc_id="157", space_id="49", model_name="text2vec", chunk_parameters=ChunkParameters(chunk_strategy="Automatic")))
if __name__ == "__main__":