-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathrun-llama-stack.yaml
More file actions
83 lines (74 loc) · 2.16 KB
/
run-llama-stack.yaml
File metadata and controls
83 lines (74 loc) · 2.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
version: '3'
image_name: ollama-llama-stack-config
apis:
- agents
- inference
- safety
- telemetry
- tool_runtime
- vector_io
logging:
level: DEBUG # Set root logger to DEBUG
category_levels:
llama_stack: DEBUG # Enable DEBUG for all llama_stack modules
llama_stack.providers.remote.inference.vllm: DEBUG
llama_stack.providers.inline.agents.meta_reference: DEBUG
llama_stack.providers.inline.agents.meta_reference.agent_instance: DEBUG
llama_stack.providers.inline.vector_io.faiss: DEBUG
llama_stack.providers.inline.telemetry.meta_reference: DEBUG
llama_stack.core: DEBUG
llama_stack.apis: DEBUG
uvicorn: DEBUG
uvicorn.access: INFO # Keep HTTP requests at INFO to reduce noise
fastapi: DEBUG
providers:
vector_io:
- config:
kvstore:
db_path: /tmp/faiss_store.db
type: sqlite
provider_id: faiss
provider_type: inline::faiss
agents:
- config:
persistence_store:
db_path: /tmp/agents_store.db
namespace: null
type: sqlite
responses_store:
db_path: /tmp/responses_store.db
type: sqlite
provider_id: meta-reference
provider_type: inline::meta-reference
inference:
- provider_id: vllm-inference
provider_type: remote::vllm
config:
url: ${env.VLLM_URL:=http://localhost:8000/v1}
max_tokens: ${env.VLLM_MAX_TOKENS:=4096}
api_token: ${env.VLLM_API_TOKEN:=fake}
tls_verify: ${env.VLLM_TLS_VERIFY:=false}
- provider_id: google-vertex
provider_type: remote::vertexai
config:
project: ${env.VERTEXAI_PROJECT}
region: ${env.VERTEXAI_REGION:=us-east5}
tool_runtime:
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
config: {}
module: null
telemetry:
- config:
service_name: 'llama-stack'
sinks: console,sqlite
sqlite_db_path: /tmp/trace_store.db
provider_id: meta-reference
provider_type: inline::meta-reference
metadata_store:
type: sqlite
db_path: /tmp/registry.db
namespace: null
inference_store:
type: sqlite
db_path: /tmp/inference_store.db