Skip to content

Commit 34d13f3

Browse files
committed
Add SUMMARY_MAX_TOKENS configuration and update max_tokens usage in API calls
1 parent 0d601b4 commit 34d13f3

File tree

3 files changed

+17
-3
lines changed

3 files changed

+17
-3
lines changed

README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -197,6 +197,11 @@ SUMMARY_SYSTEM_PROMPT="As GLaDOS, provide a concise and analytical summary of th
197197
# Default value: 0.4
198198
SUMMARY_TEMPERATURE=0.4
199199

200+
# Summary max tokens
201+
# Set a maximum number of tokens the LLM will use to generate a response.
202+
# Default value: 1000
203+
SUMMARY_MAX_TOKENS=1000
204+
200205
# Redis host
201206
# Redis instance IP address or domain used for API and webpage caching which will override the default filesystem caching method
202207
# Default value: (empty)

classes/webpage-analyzer.php

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -575,7 +575,7 @@ public function getSummary() {
575575
$curl_options = array(
576576
CURLOPT_CUSTOMREQUEST => 'POST',
577577
CURLOPT_POSTFIELDS => json_encode(array(
578-
'max_tokens' => 1000,
578+
'max_tokens' => SUMMARY_MAX_TOKENS,
579579
'temperature' => SUMMARY_TEMPERATURE,
580580
'model' => $gemini_model,
581581
'messages' => [
@@ -629,7 +629,7 @@ public function getSummary() {
629629
$curl_options = array(
630630
CURLOPT_CUSTOMREQUEST => 'POST',
631631
CURLOPT_POSTFIELDS => json_encode(array(
632-
'max_tokens' => 150,
632+
'max_tokens' => SUMMARY_MAX_TOKENS,
633633
'temperature' => SUMMARY_TEMPERATURE,
634634
'model' => $openai_model,
635635
'messages' => [

config.php

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212

1313
// Set defaults
14-
const UPVOTE_RSS_VERSION = '1.2.5';
14+
const UPVOTE_RSS_VERSION = '1.3.0';
1515
const DEFAULT_PLATFORM = 'lemmy';
1616
const DEFAULT_HACKER_NEWS_INSTANCE = 'news.ycombinator.com';
1717
const DEFAULT_HACKER_NEWS_COMMUNITY = 'beststories';
@@ -309,6 +309,15 @@
309309
define('SUMMARY_TEMPERATURE', $summary_temperature);
310310

311311

312+
// Summary max tokens
313+
$summary_max_tokens = $_SERVER["SUMMARY_MAX_TOKENS"] ?? $_ENV["SUMMARY_MAX_TOKENS"] ?? 1000;
314+
$summary_max_tokens = intval($summary_max_tokens);
315+
if ($summary_max_tokens < 0) {
316+
$summary_max_tokens = 1000;
317+
}
318+
define('SUMMARY_MAX_TOKENS', $summary_max_tokens);
319+
320+
312321
// Comments
313322
$include_comments = false;
314323
$comments = 0;

0 commit comments

Comments
 (0)