version: '3.8' services: app: build: . ports: - "8080:8080" volumes: - ./data:/app/data - ./config.yaml:/app/config.yaml depends_on: - litellm environment: - GIN_MODE=release - TZ=UTC networks: - byom-network healthcheck: test: ["CMD", "wget", "--spider", "http://localhost:8080/health"] interval: 30s timeout: 10s retries: 3 litellm: image: ghcr.io/berriai/litellm:main ports: - "8000:8000" environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - MODEL_CONFIG_PATH=/app/model_config.yaml volumes: - ./model_config.yaml:/app/model_config.yaml networks: - byom-network healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3