From aeace47057bac4d418850ac0c167d48a80c1332c Mon Sep 17 00:00:00 2001 From: Koh Meng Hui Date: Wed, 16 Oct 2024 20:24:11 +0800 Subject: [PATCH] Fix 503 when private gpt gets ollama service When running private gpt with external ollama API, ollama service returns 503 on startup because ollama service (traefik) might not be ready. - Add healthcheck to ollama service to test for connection to external ollama - private-gpt-ollama service depends on ollama being service_healthy --- docker-compose.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docker-compose.yaml b/docker-compose.yaml index e1e8b88d..d81286d8 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -29,7 +29,8 @@ services: - ollama-cuda - ollama-api depends_on: - - ollama + ollama: + condition: service_healthy # Private-GPT service for the local mode # This service builds from a local Dockerfile and runs the application in local mode. @@ -60,6 +61,12 @@ services: # This will route requests to the Ollama service based on the profile. ollama: image: traefik:v2.10 + healthcheck: + test: ["CMD", "sh", "-c", "wget -q --spider http://ollama:11434 || exit 1"] + interval: 10s + retries: 3 + start_period: 5s + timeout: 5s ports: - "8080:8080" command: