-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig.py
More file actions
69 lines (57 loc) · 1.62 KB
/
config.py
File metadata and controls
69 lines (57 loc) · 1.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
"""
Configuration for DSPy language models.
"""
import os
import dspy
def configure_lm(
provider: str = "openai",
model: str = "gpt-4o-mini",
api_key: str = None,
**kwargs
):
"""
Configure the DSPy language model.
Args:
provider: LLM provider (e.g., 'openai', 'anthropic', 'together')
model: Model name (e.g., 'gpt-4o-mini', 'claude-3-5-sonnet-20241022')
api_key: Optional API key (defaults to environment variable)
**kwargs: Additional arguments for the LM
Returns:
Configured DSPy LM instance
"""
# Construct the model string
if "/" in model:
model_string = model
else:
model_string = f"{provider}/{model}"
# Create and configure the LM
lm = dspy.LM(model_string, **kwargs)
dspy.configure(lm=lm)
return lm
def get_default_lm():
"""
Get the default language model configuration.
Uses OpenAI GPT-5-mini by default with retry logic for rate limits.
Set OPENAI_API_KEY environment variable before running.
"""
return configure_lm(
provider="openai",
model="gpt-5-mini",
num_retries=5, # Retry up to 5 times on rate limit errors
timeout=60.0 # 60 second timeout per request
)
# Example configurations for different providers
PROVIDER_CONFIGS = {
"openai": {
"model": "gpt-5-mini",
"env_var": "OPENAI_API_KEY"
},
"anthropic": {
"model": "claude-3-5-sonnet-20241022",
"env_var": "ANTHROPIC_API_KEY"
},
"together": {
"model": "meta-llama/Llama-3-70b-chat-hf",
"env_var": "TOGETHER_API_KEY"
}
}