Skip to content

Commit 47f10c8

Browse files
committed
refactor: extract magic numbers to constants in AI providers
- Extract all magic numbers (URLs, versions, defaults, keys) to class constants - Improve code maintainability and readability - Make configuration values easier to update - Applied to all AI providers: Anthropic, Microsoft, OpenAI, Google, Ollama
1 parent 6622aec commit 47f10c8

File tree

5 files changed

+116
-54
lines changed

5 files changed

+116
-54
lines changed

src/ai/providers/AnthropicProvider.php

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,24 @@
1212
*/
1313
class AnthropicProvider extends BaseAiProvider
1414
{
15-
protected string $apiUrl = 'https://api.anthropic.com/v1/messages';
15+
private const string API_URL = 'https://api.anthropic.com/v1/messages';
16+
private const string DEFAULT_MODEL = 'claude-3-5-sonnet-20241022';
17+
private const float DEFAULT_TEMPERATURE = 0.7;
18+
private const int DEFAULT_MAX_TOKENS = 2000;
19+
private const string API_VERSION = '2023-06-01';
20+
private const string HEADER_API_KEY = 'x-api-key';
21+
private const string HEADER_VERSION = 'anthropic-version';
22+
private const string MESSAGE_ROLE_USER = 'user';
23+
private const string RESPONSE_KEY_CONTENT = 'content';
24+
private const string RESPONSE_KEY_TEXT = 'text';
25+
26+
protected string $apiUrl = self::API_URL;
1627

1728
protected function initializeDefaults(): void
1829
{
19-
$this->model = $this->config->getProviderSetting('anthropic', 'model', 'claude-3-5-sonnet-20241022');
20-
$this->temperature = (float)$this->config->getProviderSetting('anthropic', 'temperature', 0.7);
21-
$this->maxTokens = (int)$this->config->getProviderSetting('anthropic', 'max_tokens', 2000);
30+
$this->model = $this->config->getProviderSetting('anthropic', 'model', self::DEFAULT_MODEL);
31+
$this->temperature = (float)$this->config->getProviderSetting('anthropic', 'temperature', self::DEFAULT_TEMPERATURE);
32+
$this->maxTokens = (int)$this->config->getProviderSetting('anthropic', 'max_tokens', self::DEFAULT_MAX_TOKENS);
2233
}
2334

2435
public function getProviderName(): string
@@ -77,28 +88,28 @@ protected function callApi(string $systemPrompt, string $userPrompt): string
7788
'system' => $systemPrompt,
7889
'messages' => [
7990
[
80-
'role' => 'user',
91+
'role' => self::MESSAGE_ROLE_USER,
8192
'content' => $userPrompt,
8293
],
8394
],
8495
'temperature' => $this->temperature,
8596
];
8697

8798
$headers = [
88-
'x-api-key' => $apiKey,
89-
'anthropic-version' => '2023-06-01',
99+
self::HEADER_API_KEY => $apiKey,
100+
self::HEADER_VERSION => self::API_VERSION,
90101
];
91102

92103
$response = $this->makeRequest($this->apiUrl, $data, $headers);
93104

94-
if (!isset($response['content'][0]['text'])) {
105+
if (!isset($response[self::RESPONSE_KEY_CONTENT][0][self::RESPONSE_KEY_TEXT])) {
95106
throw new QueryException(
96107
'Invalid response format from Anthropic API',
97108
0
98109
);
99110
}
100111

101-
return (string)$response['content'][0]['text'];
112+
return (string)$response[self::RESPONSE_KEY_CONTENT][0][self::RESPONSE_KEY_TEXT];
102113
}
103114

104115
protected function buildSystemPrompt(string $type): string

src/ai/providers/GoogleProvider.php

Lines changed: 29 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,27 @@
1212
*/
1313
class GoogleProvider extends BaseAiProvider
1414
{
15-
protected string $apiUrl = 'https://generativelanguage.googleapis.com/v1beta/models/%s:generateContent'; // v1beta is the latest supported version
15+
private const string API_URL = 'https://generativelanguage.googleapis.com/v1beta/models/%s:generateContent'; // v1beta is the latest supported version
16+
private const string DEFAULT_MODEL = 'gemini-pro';
17+
private const float DEFAULT_TEMPERATURE = 0.7;
18+
private const int DEFAULT_MAX_TOKENS = 2000;
19+
private const string URL_PARAM_KEY = '?key=';
20+
private const string REQUEST_KEY_CONTENTS = 'contents';
21+
private const string REQUEST_KEY_PARTS = 'parts';
22+
private const string REQUEST_KEY_TEXT = 'text';
23+
private const string REQUEST_KEY_SYSTEM_INSTRUCTION = 'systemInstruction';
24+
private const string REQUEST_KEY_GENERATION_CONFIG = 'generationConfig';
25+
private const string REQUEST_KEY_MAX_OUTPUT_TOKENS = 'maxOutputTokens';
26+
private const string RESPONSE_KEY_CANDIDATES = 'candidates';
27+
private const string RESPONSE_KEY_CONTENT = 'content';
28+
29+
protected string $apiUrl = self::API_URL;
1630

1731
protected function initializeDefaults(): void
1832
{
19-
$this->model = $this->config->getProviderSetting('google', 'model', 'gemini-pro');
20-
$this->temperature = (float)$this->config->getProviderSetting('google', 'temperature', 0.7);
21-
$this->maxTokens = (int)$this->config->getProviderSetting('google', 'max_tokens', 2000);
33+
$this->model = $this->config->getProviderSetting('google', 'model', self::DEFAULT_MODEL);
34+
$this->temperature = (float)$this->config->getProviderSetting('google', 'temperature', self::DEFAULT_TEMPERATURE);
35+
$this->maxTokens = (int)$this->config->getProviderSetting('google', 'max_tokens', self::DEFAULT_MAX_TOKENS);
2236
}
2337

2438
public function getProviderName(): string
@@ -71,41 +85,41 @@ protected function callApi(string $prompt, string $systemInstruction): string
7185
throw new QueryException('Google API key not configured', 0);
7286
}
7387

74-
$url = sprintf($this->apiUrl, $this->model) . '?key=' . urlencode($apiKey);
88+
$url = sprintf($this->apiUrl, $this->model) . self::URL_PARAM_KEY . urlencode($apiKey);
7589

7690
$data = [
77-
'contents' => [
91+
self::REQUEST_KEY_CONTENTS => [
7892
[
79-
'parts' => [
93+
self::REQUEST_KEY_PARTS => [
8094
[
81-
'text' => $prompt,
95+
self::REQUEST_KEY_TEXT => $prompt,
8296
],
8397
],
8498
],
8599
],
86-
'systemInstruction' => [
87-
'parts' => [
100+
self::REQUEST_KEY_SYSTEM_INSTRUCTION => [
101+
self::REQUEST_KEY_PARTS => [
88102
[
89-
'text' => $systemInstruction,
103+
self::REQUEST_KEY_TEXT => $systemInstruction,
90104
],
91105
],
92106
],
93-
'generationConfig' => [
107+
self::REQUEST_KEY_GENERATION_CONFIG => [
94108
'temperature' => $this->temperature,
95-
'maxOutputTokens' => $this->maxTokens,
109+
self::REQUEST_KEY_MAX_OUTPUT_TOKENS => $this->maxTokens,
96110
],
97111
];
98112

99113
$response = $this->makeRequest($url, $data);
100114

101-
if (!isset($response['candidates'][0]['content']['parts'][0]['text'])) {
115+
if (!isset($response[self::RESPONSE_KEY_CANDIDATES][0][self::RESPONSE_KEY_CONTENT][self::REQUEST_KEY_PARTS][0][self::REQUEST_KEY_TEXT])) {
102116
throw new QueryException(
103117
'Invalid response format from Google API',
104118
0
105119
);
106120
}
107121

108-
return (string)$response['candidates'][0]['content']['parts'][0]['text'];
122+
return (string)$response[self::RESPONSE_KEY_CANDIDATES][0][self::RESPONSE_KEY_CONTENT][self::REQUEST_KEY_PARTS][0][self::REQUEST_KEY_TEXT];
109123
}
110124

111125
protected function buildSystemPrompt(string $type): string

src/ai/providers/MicrosoftProvider.php

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,21 +12,35 @@
1212
*/
1313
class MicrosoftProvider extends BaseAiProvider
1414
{
15+
private const string API_VERSION = '2024-10-21'; // Latest stable version
16+
private const string DEFAULT_DEPLOYMENT = 'gpt-4';
17+
private const float DEFAULT_TEMPERATURE = 0.7;
18+
private const int DEFAULT_MAX_TOKENS = 2000;
19+
private const string URL_PATH_DEPLOYMENTS = '/openai/deployments/';
20+
private const string URL_PATH_CHAT_COMPLETIONS = '/chat/completions';
21+
private const string URL_PARAM_API_VERSION = 'api-version=';
22+
private const string HEADER_API_KEY = 'api-key';
23+
private const string MESSAGE_ROLE_SYSTEM = 'system';
24+
private const string MESSAGE_ROLE_USER = 'user';
25+
private const string RESPONSE_KEY_CHOICES = 'choices';
26+
private const string RESPONSE_KEY_MESSAGE = 'message';
27+
private const string RESPONSE_KEY_CONTENT = 'content';
28+
1529
protected string $apiUrl = '';
16-
protected string $apiVersion = '2024-10-21'; // Latest stable version
30+
protected string $apiVersion = self::API_VERSION;
1731

1832
protected function initializeDefaults(): void
1933
{
2034
$endpoint = $this->config->getProviderSetting('microsoft', 'endpoint', '');
21-
$deployment = $this->config->getProviderSetting('microsoft', 'deployment', 'gpt-4');
35+
$deployment = $this->config->getProviderSetting('microsoft', 'deployment', self::DEFAULT_DEPLOYMENT);
2236
$this->model = $deployment;
2337

2438
if ($endpoint !== '') {
25-
$this->apiUrl = rtrim($endpoint, '/') . '/openai/deployments/' . urlencode($deployment) . '/chat/completions?api-version=' . $this->apiVersion;
39+
$this->apiUrl = rtrim($endpoint, '/') . self::URL_PATH_DEPLOYMENTS . urlencode($deployment) . self::URL_PATH_CHAT_COMPLETIONS . '?' . self::URL_PARAM_API_VERSION . $this->apiVersion;
2640
}
2741

28-
$this->temperature = (float)$this->config->getProviderSetting('microsoft', 'temperature', 0.7);
29-
$this->maxTokens = (int)$this->config->getProviderSetting('microsoft', 'max_tokens', 2000);
42+
$this->temperature = (float)$this->config->getProviderSetting('microsoft', 'temperature', self::DEFAULT_TEMPERATURE);
43+
$this->maxTokens = (int)$this->config->getProviderSetting('microsoft', 'max_tokens', self::DEFAULT_MAX_TOKENS);
3044
}
3145

3246
public function getProviderName(): string
@@ -86,11 +100,11 @@ protected function callApi(string $systemPrompt, string $userPrompt): string
86100
$data = [
87101
'messages' => [
88102
[
89-
'role' => 'system',
103+
'role' => self::MESSAGE_ROLE_SYSTEM,
90104
'content' => $systemPrompt,
91105
],
92106
[
93-
'role' => 'user',
107+
'role' => self::MESSAGE_ROLE_USER,
94108
'content' => $userPrompt,
95109
],
96110
],
@@ -99,19 +113,19 @@ protected function callApi(string $systemPrompt, string $userPrompt): string
99113
];
100114

101115
$headers = [
102-
'api-key' => $apiKey,
116+
self::HEADER_API_KEY => $apiKey,
103117
];
104118

105119
$response = $this->makeRequest($this->apiUrl, $data, $headers);
106120

107-
if (!isset($response['choices'][0]['message']['content'])) {
121+
if (!isset($response[self::RESPONSE_KEY_CHOICES][0][self::RESPONSE_KEY_MESSAGE][self::RESPONSE_KEY_CONTENT])) {
108122
throw new QueryException(
109123
'Invalid response format from Microsoft API',
110124
0
111125
);
112126
}
113127

114-
return (string)$response['choices'][0]['message']['content'];
128+
return (string)$response[self::RESPONSE_KEY_CHOICES][0][self::RESPONSE_KEY_MESSAGE][self::RESPONSE_KEY_CONTENT];
115129
}
116130

117131
protected function buildSystemPrompt(string $type): string

src/ai/providers/OllamaProvider.php

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,16 +12,27 @@
1212
*/
1313
class OllamaProvider extends BaseAiProvider
1414
{
15+
private const string DEFAULT_MODEL = 'deepseek-coder:6.7b';
16+
private const float DEFAULT_TEMPERATURE = 0.7;
17+
private const int DEFAULT_MAX_TOKENS = 2000;
18+
private const string API_PATH_GENERATE = '/api/generate';
19+
private const string REQUEST_KEY_MODEL = 'model';
20+
private const string REQUEST_KEY_PROMPT = 'prompt';
21+
private const string REQUEST_KEY_STREAM = 'stream';
22+
private const string REQUEST_KEY_OPTIONS = 'options';
23+
private const string REQUEST_KEY_NUM_PREDICT = 'num_predict';
24+
private const string RESPONSE_KEY_RESPONSE = 'response';
25+
1526
protected string $apiUrl = '';
1627

1728
protected function initializeDefaults(): void
1829
{
19-
$this->model = $this->config->getProviderSetting('ollama', 'model', 'deepseek-coder:6.7b');
20-
$this->temperature = (float)$this->config->getProviderSetting('ollama', 'temperature', 0.7);
21-
$this->maxTokens = (int)$this->config->getProviderSetting('ollama', 'max_tokens', 2000);
30+
$this->model = $this->config->getProviderSetting('ollama', 'model', self::DEFAULT_MODEL);
31+
$this->temperature = (float)$this->config->getProviderSetting('ollama', 'temperature', self::DEFAULT_TEMPERATURE);
32+
$this->maxTokens = (int)$this->config->getProviderSetting('ollama', 'max_tokens', self::DEFAULT_MAX_TOKENS);
2233

2334
$baseUrl = $this->config->getOllamaUrl();
24-
$this->apiUrl = rtrim($baseUrl, '/') . '/api/generate';
35+
$this->apiUrl = rtrim($baseUrl, '/') . self::API_PATH_GENERATE;
2536
}
2637

2738
public function getProviderName(): string
@@ -68,25 +79,25 @@ public function suggestOptimizations(array $analysis, array $context = []): stri
6879
protected function callApi(string $prompt): string
6980
{
7081
$data = [
71-
'model' => $this->model,
72-
'prompt' => $prompt,
73-
'stream' => false,
74-
'options' => [
82+
self::REQUEST_KEY_MODEL => $this->model,
83+
self::REQUEST_KEY_PROMPT => $prompt,
84+
self::REQUEST_KEY_STREAM => false,
85+
self::REQUEST_KEY_OPTIONS => [
7586
'temperature' => $this->temperature,
76-
'num_predict' => $this->maxTokens,
87+
self::REQUEST_KEY_NUM_PREDICT => $this->maxTokens,
7788
],
7889
];
7990

8091
$response = $this->makeRequest($this->apiUrl, $data);
8192

82-
if (!isset($response['response'])) {
93+
if (!isset($response[self::RESPONSE_KEY_RESPONSE])) {
8394
throw new QueryException(
8495
'Invalid response format from Ollama API',
8596
0
8697
);
8798
}
8899

89-
return (string)$response['response'];
100+
return (string)$response[self::RESPONSE_KEY_RESPONSE];
90101
}
91102

92103
protected function buildSystemPrompt(string $type): string

src/ai/providers/OpenAiProvider.php

Lines changed: 21 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,25 @@
1212
*/
1313
class OpenAiProvider extends BaseAiProvider
1414
{
15-
protected string $apiUrl = 'https://api.openai.com/v1/chat/completions';
15+
private const string API_URL = 'https://api.openai.com/v1/chat/completions';
16+
private const string DEFAULT_MODEL = 'gpt-4o-mini';
17+
private const float DEFAULT_TEMPERATURE = 0.7;
18+
private const int DEFAULT_MAX_TOKENS = 2000;
19+
private const string HEADER_AUTHORIZATION = 'Authorization';
20+
private const string HEADER_BEARER_PREFIX = 'Bearer ';
21+
private const string MESSAGE_ROLE_SYSTEM = 'system';
22+
private const string MESSAGE_ROLE_USER = 'user';
23+
private const string RESPONSE_KEY_CHOICES = 'choices';
24+
private const string RESPONSE_KEY_MESSAGE = 'message';
25+
private const string RESPONSE_KEY_CONTENT = 'content';
26+
27+
protected string $apiUrl = self::API_URL;
1628

1729
protected function initializeDefaults(): void
1830
{
19-
$this->model = $this->config->getProviderSetting('openai', 'model', 'gpt-4o-mini');
20-
$this->temperature = (float)$this->config->getProviderSetting('openai', 'temperature', 0.7);
21-
$this->maxTokens = (int)$this->config->getProviderSetting('openai', 'max_tokens', 2000);
31+
$this->model = $this->config->getProviderSetting('openai', 'model', self::DEFAULT_MODEL);
32+
$this->temperature = (float)$this->config->getProviderSetting('openai', 'temperature', self::DEFAULT_TEMPERATURE);
33+
$this->maxTokens = (int)$this->config->getProviderSetting('openai', 'max_tokens', self::DEFAULT_MAX_TOKENS);
2234
}
2335

2436
public function getProviderName(): string
@@ -80,11 +92,11 @@ protected function callApi(string $systemPrompt, string $userPrompt): string
8092
'model' => $this->model,
8193
'messages' => [
8294
[
83-
'role' => 'system',
95+
'role' => self::MESSAGE_ROLE_SYSTEM,
8496
'content' => $systemPrompt,
8597
],
8698
[
87-
'role' => 'user',
99+
'role' => self::MESSAGE_ROLE_USER,
88100
'content' => $userPrompt,
89101
],
90102
],
@@ -93,19 +105,19 @@ protected function callApi(string $systemPrompt, string $userPrompt): string
93105
];
94106

95107
$headers = [
96-
'Authorization' => "Bearer {$apiKey}",
108+
self::HEADER_AUTHORIZATION => self::HEADER_BEARER_PREFIX . $apiKey,
97109
];
98110

99111
$response = $this->makeRequest($this->apiUrl, $data, $headers);
100112

101-
if (!isset($response['choices'][0]['message']['content'])) {
113+
if (!isset($response[self::RESPONSE_KEY_CHOICES][0][self::RESPONSE_KEY_MESSAGE][self::RESPONSE_KEY_CONTENT])) {
102114
throw new QueryException(
103115
'Invalid response format from OpenAI API',
104116
0
105117
);
106118
}
107119

108-
return (string)$response['choices'][0]['message']['content'];
120+
return (string)$response[self::RESPONSE_KEY_CHOICES][0][self::RESPONSE_KEY_MESSAGE][self::RESPONSE_KEY_CONTENT];
109121
}
110122

111123
/**

0 commit comments

Comments
 (0)