-
Notifications
You must be signed in to change notification settings - Fork 26
Expand file tree
/
Copy pathcompose.yaml
More file actions
25 lines (24 loc) · 812 Bytes
/
compose.yaml
File metadata and controls
25 lines (24 loc) · 812 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
services:
app:
build:
context: ./app
dockerfile: Dockerfile
ports:
- "8000:8000"
restart: always
environment:
- ENDPOINT_URL=http://llm/api/v1/chat/completions # endpoint to the Provider Service
- MODEL=anthropic.claude-3-haiku-20240307-v1:0 # LLM model ID used in the Provider Service
- OPENAI_API_KEY=FAKE_TOKEN # the actual value will be ignored when using the Provider Service
healthcheck:
test: ["CMD", "python3", "-c", "import sys, urllib.request; urllib.request.urlopen(sys.argv[1]).read()", "http://localhost:8000/"]
interval: 30s
timeout: 5s
retries: 3
start_period: 5s
# Provider Service
# This service is used to route requests to the LLM API
llm:
provider:
type: model
x-defang-llm: true