forked from ace-step/ACE-Step-1.5
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy path.env.example
More file actions
51 lines (44 loc) · 1.9 KB
/
.env.example
File metadata and controls
51 lines (44 loc) · 1.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
# ACE-Step Environment Configuration
# Copy this file to .env and modify as needed
# ==================== Model Settings ====================
# DiT model path
ACESTEP_CONFIG_PATH=acestep-v15-turbo
# LM model path (used when LLM is enabled)
# Available: acestep-5Hz-lm-0.6B, acestep-5Hz-lm-1.7B, acestep-5Hz-lm-4B
ACESTEP_LM_MODEL_PATH=acestep-5Hz-lm-1.7B
# Device selection: auto, cuda, cpu, xpu
ACESTEP_DEVICE=auto
# LM backend: vllm (faster) or pt (PyTorch native)
ACESTEP_LM_BACKEND=vllm
# ==================== LLM Initialization ====================
# Controls whether to initialize the Language Model (LLM/5Hz LM)
#
# Flow: GPU detection (full) → ACESTEP_INIT_LLM override → Model loading
# GPU optimizations (offload, quantization, batch limits) are ALWAYS applied.
# ACESTEP_INIT_LLM only overrides the "should we try to load LLM" decision.
#
# Values:
# auto (or empty) = Use GPU auto-detection result (recommended)
# true/1/yes = Force enable LLM after GPU detection (may cause OOM)
# false/0/no = Force disable LLM (pure DiT mode, faster)
#
# Examples:
# ACESTEP_INIT_LLM=auto # Let GPU detection decide (recommended)
# ACESTEP_INIT_LLM= # Same as auto
# ACESTEP_INIT_LLM=true # Force enable even on low VRAM GPU
# ACESTEP_INIT_LLM=false # Force disable for pure DiT mode
#
# When LLM is disabled, these features are unavailable:
# - Thinking mode (thinking=true)
# - Chain-of-Thought caption/language detection
# - Sample mode (generate from description)
# - Format mode (LLM-enhanced input)
#
# Default: auto (based on GPU VRAM detection)
ACESTEP_INIT_LLM=auto
# ==================== Download Settings ====================
# Preferred download source: auto, huggingface, modelscope
# ACESTEP_DOWNLOAD_SOURCE=auto
# ==================== API Server Settings ====================
# API key for authentication (optional)
# ACESTEP_API_KEY=sk-your-secret-key