-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
36 lines (25 loc) · 1012 Bytes
/
.env.example
File metadata and controls
36 lines (25 loc) · 1012 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
MODEL_NAME=texttinyllama
# Lambda Cloud + README SSH tunnel: http://127.0.0.1:8000
VLLM_BASE_URL=http://127.0.0.1:8000
VLLM_TLS_VERIFY=true
VLLM_SERVER_PROFILE=baseline
GPU_HOURLY_COST_USD=2.5
LAMBDA_CLOUD_API_KEY=
LAMBDA_INSTANCE_TYPE=
LAMBDA_COST_USE_API=true
# gateway.py bind address (nginx upstream targets this port; add 8766, … for multiple workers)
GATEWAY_HOST=127.0.0.1
GATEWAY_PORT=8765
# Crew / OpenAI client → nginx LB (monitoring/nginx-gateway-lb.conf). Must match listen port there.
GATEWAY_LB_HOST=127.0.0.1
GATEWAY_LB_PORT=8780
# Explicit base wins; if unset, crew defaults to http://GATEWAY_LB_HOST:GATEWAY_LB_PORT/v1
GATEWAY_OPENAI_BASE=http://127.0.0.1:8780/v1
# Set false only if you point crew straight at gateway.py (no nginx)
# GATEWAY_USE_LOAD_BALANCER=false
GATEWAY_METRICS_PORT=9101
OTEL_EXPORTER_OTLP_ENDPOINT=http://127.0.0.1:4317
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=http://127.0.0.1:4317
OTEL_SERVICE_NAME=gateway
OTEL_TRACES_EXPORTER=none
CREWAI_TRACING_ENABLED=true