refactor: Modify default webserver port to 5670 (#1410)

This commit is contained in:
Fangyin Cheng 2024-04-12 11:47:24 +08:00 committed by GitHub
parent aea575e0b4
commit c3ae1915d2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 58 additions and 62 deletions

View File

@ -10,7 +10,7 @@
#*******************************************************************#
#** Webserver Port **#
#*******************************************************************#
WEB_SERVER_PORT=7860
# DBGPT_WEBSERVER_PORT=5670
#*******************************************************************#
#*** LLM PROVIDER ***#

View File

@ -26,7 +26,7 @@ class Config(metaclass=Singleton):
# Gradio language version: en, zh
self.LANGUAGE = os.getenv("LANGUAGE", "en")
self.WEB_SERVER_PORT = int(os.getenv("WEB_SERVER_PORT", 5000))
self.DBGPT_WEBSERVER_PORT = int(os.getenv("DBGPT_WEBSERVER_PORT", 5670))
self.debug_mode = False
self.skip_reprompt = False

View File

@ -254,14 +254,10 @@ class RetrieveSummaryAssistantAgent(ConversableAgent):
reply_message.success = is_success
return reply_message
async def verify(
self,
message: AgentMessage,
sender: Agent,
reviewer: Optional[Agent] = None,
**kwargs,
async def correctness_check(
self, message: AgentMessage
) -> Tuple[bool, Optional[str]]:
"""Verify the correctness of the message."""
"""Verify the correctness of the results."""
action_report = message.action_report
task_result = ""
if action_report:

View File

@ -140,9 +140,9 @@ def initialize_app(param: WebServerParameters = None, args: List[str] = None):
model_name = param.model_name or CFG.LLM_MODEL
param.model_name = model_name
param.port = param.port or CFG.WEB_SERVER_PORT
param.port = param.port or CFG.DBGPT_WEBSERVER_PORT
if not param.port:
param.port = 5000
param.port = 5670
print(param)

View File

@ -6,7 +6,7 @@ import click
from dbgpt.configs.model_config import DATASETS_DIR
_DEFAULT_API_ADDRESS: str = "http://127.0.0.1:5000"
_DEFAULT_API_ADDRESS: str = "http://127.0.0.1:5670"
API_ADDRESS: str = _DEFAULT_API_ADDRESS
logger = logging.getLogger("dbgpt_cli")

View File

@ -61,7 +61,7 @@ class Client:
Args:
api_base: Optional[str], a full URL for the DB-GPT API.
Defaults to the `http://localhost:5000/api/v2`.
Defaults to the `http://localhost:5670/api/v2`.
api_key: Optional[str], The dbgpt api key to use for authentication.
Defaults to None.
timeout: Optional[httpx._types.TimeoutTypes]: The timeout to use.
@ -77,14 +77,14 @@ class Client:
from dbgpt.client import Client
DBGPT_API_BASE = "http://localhost:5000/api/v2"
DBGPT_API_BASE = "http://localhost:5670/api/v2"
DBGPT_API_KEY = "dbgpt"
client = Client(api_base=DBGPT_API_BASE, api_key=DBGPT_API_KEY)
client.chat(model="chatgpt_proxyllm", messages="Hello?")
"""
if not api_base:
api_base = os.getenv(
"DBGPT_API_BASE", f"http://localhost:5000/{CLIENT_API_PATH}/{version}"
"DBGPT_API_BASE", f"http://localhost:5670/{CLIENT_API_PATH}/{version}"
)
if not api_key:
api_key = os.getenv("DBGPT_API_KEY")
@ -146,7 +146,7 @@ class Client:
from dbgpt.client import Client
DBGPT_API_BASE = "http://localhost:5000/api/v2"
DBGPT_API_BASE = "http://localhost:5670/api/v2"
DBGPT_API_KEY = "dbgpt"
client = Client(api_base=DBGPT_API_BASE, api_key=DBGPT_API_KEY)
res = await client.chat(model="chatgpt_proxyllm", messages="Hello?")
@ -222,7 +222,7 @@ class Client:
from dbgpt.client import Client
DBGPT_API_BASE = "http://localhost:5000/api/v2"
DBGPT_API_BASE = "http://localhost:5670/api/v2"
DBGPT_API_KEY = "dbgpt"
client = Client(api_base=DBGPT_API_BASE, api_key=DBGPT_API_KEY)
res = await client.chat_stream(model="chatgpt_proxyllm", messages="Hello?")

View File

@ -1022,7 +1022,7 @@ def initialize_worker_manager_in_client(
model_path: str = None,
run_locally: bool = True,
controller_addr: str = None,
local_port: int = 5000,
local_port: int = 5670,
embedding_model_name: str = None,
embedding_model_path: str = None,
start_listener: Callable[["WorkerManager"], None] = None,

View File

@ -23,7 +23,7 @@ model_path = LLM_MODEL_CONFIG[model_name]
# or vllm
model_type = "huggingface"
controller_addr = "http://127.0.0.1:5000"
controller_addr = "http://127.0.0.1:5670"
result_csv_file = None

View File

@ -66,7 +66,7 @@ def _run_current_with_gunicorn(app: str, config_path: str, kwargs: Dict):
env_to_app.update(os.environ)
app_env = EnvArgumentParser._kwargs_to_env_key_value(kwargs)
env_to_app.update(app_env)
cmd = f"uvicorn {app} --host 0.0.0.0 --port 5000"
cmd = f"uvicorn {app} --host 0.0.0.0 --port 5670"
if "windows" in platform.system().lower():
raise Exception("Not support on windows")
else: # macOS, Linux, and other Unix-like systems
@ -137,8 +137,8 @@ def _get_ports_by_cmdline_part(service_keys: List[str]) -> List[int]:
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
# Sort ports with preference for 8000 and 5000
ports.sort(key=lambda x: (x != 8000, x != 5000, x))
# Sort ports with preference for 8000 and 5670
ports.sort(key=lambda x: (x != 8000, x != 5670, x))
return ports

View File

@ -36,7 +36,7 @@ services:
env_file:
- .env.template
ports:
- 5000:5000/tcp
- 5670:5670/tcp
# webserver may be failed, it must wait all sqls in /docker-entrypoint-initdb.d execute finish.
restart: unless-stopped
networks:

View File

@ -62,6 +62,6 @@ RUN (if [ "${LANGUAGE}" = "zh" ]; \
fi;)
ENV PYTHONPATH "/app:$PYTHONPATH"
EXPOSE 5000
EXPOSE 5670
CMD ["dbgpt", "start", "webserver"]

View File

@ -30,7 +30,7 @@ import TabItem from '@theme/TabItem';
DBGPT_API_KEY=dbgpt
APP_ID={YOUR_APP_ID}
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -87,7 +87,7 @@ GET /api/v2/serve/apps/{app_id}
```shell
DBGPT_API_KEY=dbgpt
APP_ID={YOUR_APP_ID}
curl -X GET "http://localhost:5000/api/v2/serve/apps/$APP_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/apps/$APP_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>
@ -139,7 +139,7 @@ GET /api/v2/serve/apps
```shell
DBGPT_API_KEY=dbgpt
curl -X GET 'http://localhost:5000/api/v2/serve/apps' -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET 'http://localhost:5670/api/v2/serve/apps' -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>

View File

@ -30,7 +30,7 @@ import TabItem from '@theme/TabItem';
```shell
DBGPT_API_KEY="dbgpt"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -94,7 +94,7 @@ data: [DONE]
```shell
DBGPT_API_KEY="dbgpt"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \

View File

@ -30,7 +30,7 @@ import TabItem from '@theme/TabItem';
DBGPT_API_KEY=dbgpt
DB_NAME="{your_db_name}"
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -126,7 +126,7 @@ DELETE /api/v2/serve/datasources
DBGPT_API_KEY=dbgpt
DATASOURCE_ID={YOUR_DATASOURCE_ID}
curl -X DELETE "http://localhost:5000/api/v2/serve/datasources/$DATASOURCE_ID" \
curl -X DELETE "http://localhost:5670/api/v2/serve/datasources/$DATASOURCE_ID" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
@ -180,7 +180,7 @@ GET /api/v2/serve/datasources/{datasource_id}
DBGPT_API_KEY=dbgpt
DATASOURCE_ID={YOUR_DATASOURCE_ID}
curl -X GET "http://localhost:5000/api/v2/serve/datasources/$DATASOURCE_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/datasources/$DATASOURCE_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>
@ -234,7 +234,7 @@ GET /api/v2/serve/datasources
```shell
DBGPT_API_KEY=dbgpt
curl -X GET "http://localhost:5000/api/v2/serve/datasources" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/datasources" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>

View File

@ -30,7 +30,7 @@ import TabItem from '@theme/TabItem';
DBGPT_API_KEY=dbgpt
FLOW_ID={YOUR_FLOW_ID}
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -107,7 +107,7 @@ DELETE /api/v2/serve/awel/flows
DBGPT_API_KEY=dbgpt
FLOW_ID={YOUR_FLOW_ID}
curl -X DELETE "http://localhost:5000/api/v2/serve/awel/flows/$FLOW_ID" \
curl -X DELETE "http://localhost:5670/api/v2/serve/awel/flows/$FLOW_ID" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
@ -161,7 +161,7 @@ GET /api/v2/serve/awel/flows/{flow_id}
DBGPT_API_KEY=dbgpt
FLOW_ID={YOUR_FLOW_ID}
curl -X GET "http://localhost:5000/api/v2/serve/awel/flows/$FLOW_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/awel/flows/$FLOW_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>
@ -215,7 +215,7 @@ GET /api/v2/serve/awel/flows
```shell
DBGPT_API_KEY=dbgpt
curl -X GET "http://localhost:5000/api/v2/serve/awel/flows" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/awel/flows" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>

View File

@ -15,7 +15,7 @@ All API requests should include your API key in an Authorization HTTP header as
Example with the DB-GPT API curl command:
```bash
curl "http://localhost:5000/api/v2/chat/completions" \
curl "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
```
Example with the DB-GPT Client Python package:

View File

@ -30,7 +30,7 @@ import TabItem from '@theme/TabItem';
DBGPT_API_KEY=dbgpt
SPACE_NAME={YOUR_SPACE_NAME}
curl -X POST "http://localhost:5000/api/v2/chat/completions" \
curl -X POST "http://localhost:5670/api/v2/chat/completions" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -334,7 +334,7 @@ POST /api/v2/serve/knowledge/spaces
```shell
DBGPT_API_KEY="dbgpt"
curl --location --request POST 'http://localhost:5000/api/v2/serve/knowledge/spaces' \
curl --location --request POST 'http://localhost:5670/api/v2/serve/knowledge/spaces' \
--header 'Authorization: Bearer $DBGPT_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{"desc": "for client space desc", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma"
@ -410,7 +410,7 @@ PUT /api/v2/serve/knowledge/spaces
```shell
DBGPT_API_KEY="dbgpt"
curl --location --request PUT 'http://localhost:5000/api/v2/serve/knowledge/spaces' \
curl --location --request PUT 'http://localhost:5670/api/v2/serve/knowledge/spaces' \
--header 'Authorization: Bearer $DBGPT_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{"desc": "for client space desc v2", "id": "49", "name": "test_space_2", "owner": "dbgpt", "vector_type": "Chroma"
@ -493,7 +493,7 @@ DELETE /api/v2/serve/knowledge/spaces
DBGPT_API_KEY=dbgpt
SPACE_ID={YOUR_SPACE_ID}
curl -X DELETE "http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID" \
curl -X DELETE "http://localhost:5670/api/v2/serve/knowledge/spaces/$SPACE_ID" \
-H "Authorization: Bearer $DBGPT_API_KEY" \
-H "accept: application/json" \
-H "Content-Type: application/json" \
@ -548,7 +548,7 @@ GET /api/v2/serve/knowledge/spaces/{space_id}
```shell
DBGPT_API_KEY=dbgpt
SPACE_ID={YOUR_SPACE_ID}
curl -X GET "http://localhost:5000/api/v2/serve/knowledge/spaces/$SPACE_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET "http://localhost:5670/api/v2/serve/knowledge/spaces/$SPACE_ID" -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>
@ -600,7 +600,7 @@ GET /api/v2/serve/knowledge/spaces
```shell
DBGPT_API_KEY=dbgpt
curl -X GET 'http://localhost:5000/api/v2/serve/knowledge/spaces' -H "Authorization: Bearer $DBGPT_API_KEY"
curl -X GET 'http://localhost:5670/api/v2/serve/knowledge/spaces' -H "Authorization: Bearer $DBGPT_API_KEY"
```
</TabItem>

View File

@ -27,7 +27,7 @@ print(completion.choices[0].message.content)
## Application service layer API
The service layer API refers to the API exposed on port 5000 after starting the webserver, which is mainly focused on the application layer. It can be divided into the following parts according to categories
The service layer API refers to the API exposed on port 5670 after starting the webserver, which is mainly focused on the application layer. It can be divided into the following parts according to categories
- Chat API
- Editor API
@ -37,7 +37,7 @@ The service layer API refers to the API exposed on port 5000 after starting the
- Model API
:::info
Note: After starting the webserver, open http://127.0.0.1:5000/docs to view details
Note: After starting the webserver, open http://127.0.0.1:5670/docs to view details
Regarding the service layer API, in terms of strategy in the early days, we maintained the principle of minimum availability and openness. APIs that are stably exposed to the outside world will carry version information, such as
- /api/v1/
@ -164,5 +164,5 @@ Currently, due to frequent changes in Knowledge and Prompt, the relevant APIs ar
:::
More detailed interface parameters can be viewed at `http://127.0.0.1:5000/docs`
More detailed interface parameters can be viewed at `http://127.0.0.1:5670/docs`

View File

@ -52,7 +52,7 @@ Usage: dbgpt knowledge [OPTIONS] COMMAND [ARGS]...
Options:
--address TEXT Address of the Api server(If not set, try to read from
environment variable: API_ADDRESS). [default:
http://127.0.0.1:5000]
http://127.0.0.1:5670]
--help Show this message and exit.
Commands:
@ -374,7 +374,7 @@ INFO: Uvicorn running on http://0.0.0.0:8000 (Press CTRL+C to quit)
```
#### Webserver command
The front-end service can be started through `dbgpt start webserver`, the default port is 5000, and can be accessed through `http://127.0.0.1:5000`
The front-end service can be started through `dbgpt start webserver`, the default port is 5670, and can be accessed through `http://127.0.0.1:5670`
```python
~ dbgpt start webserver --help

View File

@ -58,7 +58,7 @@ with DAG("simple_dag_example") as dag:
Before performing access verification, the project needs to be started first: `python dbgpt/app/dbgpt_server.py`
```bash
% curl -X GET http://127.0.0.1:5000/api/v1/awel/trigger/examples/hello\?name\=zhangsan
% curl -X GET http://127.0.0.1:5670/api/v1/awel/trigger/examples/hello\?name\=zhangsan
"Hello, zhangsan, your age is 18"
```

View File

@ -15,7 +15,7 @@ You can try to use gradio's [network](https://github.com/gradio-app/gradio/blob/
import secrets
from gradio import networking
token=secrets.token_urlsafe(32)
local_port=5000
local_port=5670
url = networking.setup_tunnel('0.0.0.0', local_port, token)
print(f'Public url: {url}')
time.sleep(60 * 60 * 24)

View File

@ -44,7 +44,7 @@ You can view the specific usage through the command `bash docker/build_all_image
```python
docker run --ipc host --gpus all -d \
-p 5000:5000 \
-p 5670:5670 \
-e LOCAL_DB_TYPE=sqlite \
-e LOCAL_DB_PATH=data/default_sqlite.db \
-e LLM_MODEL=vicuna-13b-v1.5 \
@ -53,7 +53,7 @@ docker run --ipc host --gpus all -d \
--name dbgpt \
eosphorosai/dbgpt
```
Open the browser and visit [http://localhost:5000](http://localhost:5000)
Open the browser and visit [http://localhost:5670](http://localhost:5670)
- `-e LLM_MODEL=vicuna-13b-v1.5`, which means the base model uses `vicuna-13b-v1.5`. For more model usage, you can view the configuration in `/pilot/configs/model_config.LLM_MODEL_CONFIG`.
- `-v /data/models:/app/models`, specifies the model file to be mounted. The directory `/data/models` is mounted in `/app/models` of the container. Of course, it can be replaced with other paths.
@ -67,7 +67,7 @@ docker logs dbgpt -f
```python
docker run --ipc host --gpus all -d -p 3306:3306 \
-p 5000:5000 \
-p 5670:5670 \
-e LOCAL_DB_HOST=127.0.0.1 \
-e LOCAL_DB_PASSWORD=aa123456 \
-e MYSQL_ROOT_PASSWORD=aa123456 \
@ -77,7 +77,7 @@ docker run --ipc host --gpus all -d -p 3306:3306 \
--name db-gpt-allinone \
db-gpt-allinone
```
Open the browser and visit [http://localhost:5000](http://localhost:5000)
Open the browser and visit [http://localhost:5670](http://localhost:5670)
- `-e LLM_MODEL=vicuna-13b-v1.5`, which means the base model uses `vicuna-13b-v1.5`. For more model usage, you can view the configuration in `/pilot/configs/model_config.LLM_MODEL_CONFIG`.
- `-v /data/models:/app/models`, specifies the model file to be mounted. The directory `/data/models` is mounted in `/app/models` of the container. Of course, it can be replaced with other paths.
@ -92,7 +92,7 @@ docker logs db-gpt-allinone -f
PROXY_API_KEY="You api key"
PROXY_SERVER_URL="https://api.openai.com/v1/chat/completions"
docker run --gpus all -d -p 3306:3306 \
-p 5000:5000 \
-p 5670:5670 \
-e LOCAL_DB_HOST=127.0.0.1 \
-e LOCAL_DB_PASSWORD=aa123456 \
-e MYSQL_ROOT_PASSWORD=aa123456 \
@ -107,6 +107,6 @@ db-gpt-allinone
- `-e LLM_MODEL=proxyllm`, set the model to serve the third-party model service API, which can be openai or fastchat interface.
- `-v /data/models/text2vec-large-chinese:/app/models/text2vec-large-chinese`, sets the knowledge base embedding model to `text2vec`
Open the browser and visit [http://localhost:5000](http://localhost:5000)
Open the browser and visit [http://localhost:5670](http://localhost:5670)

View File

@ -23,4 +23,4 @@ For more configuration content, you can view the `docker-compose.yml` file
## Visit
Open the browser and visit [http://localhost:5000](http://localhost:5000)
Open the browser and visit [http://localhost:5670](http://localhost:5670)

View File

@ -458,4 +458,4 @@ python pilot/server/dbgpt_server.py
:::
## Visit website
Open the browser and visit [`http://localhost:5000`](http://localhost:5000)
Open the browser and visit [`http://localhost:5670`](http://localhost:5670)

View File

@ -168,13 +168,13 @@ python pilot/server/dbgpt_server.py
## Visit website
#### 1. Production model:
Open the browser and visit [`http://localhost:5000`](http://localhost:5000)
Open the browser and visit [`http://localhost:5670`](http://localhost:5670)
#### 2. Development mode:
```
cd web & npm install
cp .env.template .env
// set the API_BASE_URL to your DB-GPT server address, it usually is http://localhost:5000
// set the API_BASE_URL to your DB-GPT server address, it usually is http://localhost:5670
npm run dev
```
Open the browser and visit [`http://localhost:3000`](http://localhost:3000)

View File

@ -21,7 +21,7 @@
.. code-block:: shell
DBGPT_SERVER="http://127.0.0.1:5000"
DBGPT_SERVER="http://127.0.0.1:5555"
curl -X POST $DBGPT_SERVER/api/v1/awel/trigger/examples/rag/rewrite \
-H "Content-Type: application/json" -d '{
"query": "compare curry and james",