mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-25 13:06:53 +00:00
added tunnel
This commit is contained in:
parent
c5b61e65b9
commit
c4bafe455a
@ -1,23 +1,23 @@
|
|||||||
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
|
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
git \
|
git \
|
||||||
python3 \
|
python3 \
|
||||||
pip
|
pip
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY . /app
|
|
||||||
|
|
||||||
|
|
||||||
# upgrade pip
|
# upgrade pip
|
||||||
RUN pip3 install --upgrade pip
|
RUN pip3 install --upgrade pip
|
||||||
|
|
||||||
|
COPY ./requirements.txt /app/requirements.txt
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
RUN python3 -m spacy download zh_core_web_sm
|
RUN python3 -m spacy download zh_core_web_sm
|
||||||
|
|
||||||
EXPOSE 7860
|
|
||||||
|
|
||||||
CMD ["python3", "pilot/server/webserver.py"]
|
COPY . /app
|
||||||
|
|
||||||
|
EXPOSE 7860
|
||||||
|
EXPOSE 8000
|
@ -1,21 +0,0 @@
|
|||||||
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
git \
|
|
||||||
python3 \
|
|
||||||
pip
|
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY . /app
|
|
||||||
|
|
||||||
|
|
||||||
# upgrade pip
|
|
||||||
RUN pip3 install --upgrade pip
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
CMD ["python3", "pilot/server/llmserver.py"]
|
|
@ -16,11 +16,16 @@ services:
|
|||||||
webserver:
|
webserver:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile-webserver
|
dockerfile: Dockerfile
|
||||||
|
command: python3 pilot/server/webserver.py
|
||||||
environment:
|
environment:
|
||||||
- MODEL_SERVER=http://llmserver:8000
|
- MODEL_SERVER=http://llmserver:8000
|
||||||
- LOCAL_DB_HOST=db
|
- LOCAL_DB_HOST=db
|
||||||
- WEB_SERVER_PORT=7860
|
- WEB_SERVER_PORT=7860
|
||||||
|
- ALLOWLISTED_PLUGINS=db_dashboard
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
|
- llmserver
|
||||||
volumes:
|
volumes:
|
||||||
- ./models:/app/models
|
- ./models:/app/models
|
||||||
- ./plugins:/app/plugins
|
- ./plugins:/app/plugins
|
||||||
@ -28,16 +33,19 @@ services:
|
|||||||
env_file:
|
env_file:
|
||||||
- .env.template
|
- .env.template
|
||||||
ports:
|
ports:
|
||||||
- 7860:7860
|
- 7860:7860/tcp
|
||||||
expose:
|
expose:
|
||||||
- 7860
|
- 7860/tcp
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
llmserver:
|
llmserver:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile-llmserver
|
dockerfile: Dockerfile
|
||||||
|
command: python3 pilot/server/llmserver.py
|
||||||
environment:
|
environment:
|
||||||
- LOCAL_DB_HOST=db
|
- LOCAL_DB_HOST=db
|
||||||
|
depends_on:
|
||||||
|
- db
|
||||||
volumes:
|
volumes:
|
||||||
- ./models:/app/models
|
- ./models:/app/models
|
||||||
env_file:
|
env_file:
|
||||||
@ -52,7 +60,15 @@ services:
|
|||||||
- driver: nvidia
|
- driver: nvidia
|
||||||
device_ids: ['0']
|
device_ids: ['0']
|
||||||
capabilities: [gpu]
|
capabilities: [gpu]
|
||||||
|
tunnel:
|
||||||
|
image: cloudflare/cloudflared:latest
|
||||||
|
container_name: cloudflared-tunnel
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
- TUNNEL_URL=http://webserver:7860
|
||||||
|
command: tunnel --no-autoupdate
|
||||||
|
depends_on:
|
||||||
|
- webserver
|
||||||
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
Loading…
Reference in New Issue
Block a user