-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathenv.example
More file actions
105 lines (77 loc) · 3.44 KB
/
env.example
File metadata and controls
105 lines (77 loc) · 3.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# Migration Accelerator Environment Configuration
# Copy this file to .env and fill in your actual credentials
# ==============================================================================
# SNOWFLAKE CONNECTION
# ==============================================================================
# Account identifier (e.g., xy12345.us-east-1 or xy12345)
SNOWFLAKE_ACCOUNT=your_account_identifier
# User credentials
SNOWFLAKE_USER=your_username
SNOWFLAKE_PASSWORD=your_password
# Database and schema context (REQUIRED - no defaults)
SNOWFLAKE_DATABASE=LVDMS
SNOWFLAKE_SCHEMA=LVDMS
# Warehouse (optional - defaults to COMPUTE_WH)
SNOWFLAKE_WAREHOUSE=COMPUTE_WH
# Role (optional - defaults to SYSADMIN)
SNOWFLAKE_ROLE=SYSADMIN
# Region (optional - only if your account requires explicit region)
# SNOWFLAKE_REGION=us-east-1
# ==============================================================================
# DATABRICKS CONNECTION (OAuth M2M - Service Principal)
# ==============================================================================
# Databricks workspace URL
DATABRICKS_HOST=https://your-workspace.cloud.databricks.com
# OAuth Machine-to-Machine authentication
DATABRICKS_CLIENT_ID=your_client_id
DATABRICKS_CLIENT_SECRET=your_client_secret
# ==============================================================================
# UNITY CATALOG CONFIGURATION (REQUIRED - no defaults in code)
# ==============================================================================
# Unity Catalog name
UC_CATALOG=qubika_partner_solutions
# Unity Catalog schema
UC_SCHEMA=migration_accelerator
# Unity Catalog volume for raw artifacts
UC_RAW_VOLUME=snowflake_artifacts_raw
# ==============================================================================
# SECRETS CONFIGURATION
# ==============================================================================
# Databricks secrets scope name
SECRETS_SCOPE=migration-accelerator
# ==============================================================================
# LLM CONFIGURATION
# ==============================================================================
# Databricks Model Serving endpoint for translation
DBX_ENDPOINT=databricks-llama-4-maverick
# LLM parameters (optional)
# DDL_TEMPERATURE=0.1
# DDL_MAX_TOKENS=2000
# ==============================================================================
# PROCESSING CONFIGURATION
# ==============================================================================
# Batch size for artifact processing
DDL_BATCH_SIZE=8
# Output format: sql, json, or combined
DDL_OUTPUT_FORMAT=sql
# Output directory (REQUIRED - set to your Volume path)
DDL_OUTPUT_DIR=/Volumes/qubika_partner_solutions/migration_accelerator/outputs
# Local DBFS mount for local development
LOCAL_DBFS_MOUNT=./ddl_output
# ==============================================================================
# OBSERVABILITY (optional)
# ==============================================================================
# LangSmith tracing
LANGSMITH_TRACING=true
LANGSMITH_PROJECT=databricks-migration-accelerator
# LANGSMITH_ENDPOINT=https://api.smith.langchain.com
# LANGSMITH_API_KEY=your_langsmith_api_key
# Logging level
LOG_LEVEL=INFO
DDL_VERBOSE_LOGGING=true
DDL_DEBUG=false
# ==============================================================================
# DATABRICKS JOB EXECUTOR (optional)
# ==============================================================================
# Job ID to execute via the Job Executor UI
# DATABRICKS_JOB_ID=123456