Skip to content

Commit f6ea4fc

Browse files
committed
v2.2.6
1 parent 7a6cbf2 commit f6ea4fc

File tree

3 files changed

+20
-11
lines changed

3 files changed

+20
-11
lines changed

deno.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@nshiab/journalism",
3-
"version": "2.2.5",
3+
"version": "2.2.6",
44
"exports": {
55
".": "./src/index.ts",
66
"./web": "./src/web.ts"
@@ -19,7 +19,7 @@
1919
},
2020
"nodeModulesDir": "auto",
2121
"imports": {
22-
"@nshiab/journalism-ai": "jsr:@nshiab/journalism-ai@^1.2.6",
22+
"@nshiab/journalism-ai": "jsr:@nshiab/journalism-ai@^1.2.11",
2323
"@nshiab/journalism-climate": "jsr:@nshiab/journalism-climate@^1.0.4",
2424
"@nshiab/journalism-dataviz": "jsr:@nshiab/journalism-dataviz@^1.0.3",
2525
"@nshiab/journalism-extras": "jsr:@nshiab/journalism-extras@^1.0.2",

deno.lock

Lines changed: 8 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

llm.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -658,6 +658,7 @@ async function askAI(
658658
thinkingBudget?: number;
659659
thinkingLevel?: "minimal" | "low" | "medium" | "high";
660660
includeThoughts?: boolean;
661+
temperature?: number;
661662
detailedResponse: true;
662663
geminiParameters?: Partial<GenerateContentParameters>;
663664
ollamaParameters?: Partial<ChatRequest>;
@@ -754,6 +755,10 @@ async function askAI(
754755
- **`options.includeThoughts`**: - If `true`, includes the AI's reasoning
755756
thoughts in the output when using a thinking budget or thinking level.
756757
Defaults to `false`.
758+
- **`options.temperature`**: - Sets the temperature for response generation,
759+
controlling the randomness of the output. A value of 0 (default) makes the
760+
output more deterministic, while higher values (e.g., 0.7) increase creativity
761+
and variability.`.
757762
- **`options.detailedResponse`**: - If `true`, returns an object containing both
758763
the response and metadata (tokens, cost, duration, etc.). Defaults to `false`.
759764
- **`options.geminiParameters`**: - Additional parameters to pass to the Gemini
@@ -1079,6 +1084,7 @@ async function askAI(
10791084
thinkingBudget?: number;
10801085
thinkingLevel?: "minimal" | "low" | "medium" | "high";
10811086
includeThoughts?: boolean;
1087+
temperature?: number;
10821088
detailedResponse?: false;
10831089
geminiParameters?: Partial<GenerateContentParameters>;
10841090
ollamaParameters?: Partial<ChatRequest>;
@@ -1159,6 +1165,10 @@ async function askAI(
11591165
- **`options.includeThoughts`**: - If `true`, includes the AI's reasoning
11601166
thoughts in the output when using a thinking budget or thinking level.
11611167
Defaults to `false`.
1168+
- **`options.temperature`**: - Sets the temperature for response generation,
1169+
controlling the randomness of the output. A value of 0 (default) makes the
1170+
output more deterministic, while higher values (e.g., 0.7) increase creativity
1171+
and variability.`.
11621172
- **`options.detailedResponse`**: - If `true`, returns an object containing both
11631173
the response and metadata (tokens, cost, duration, etc.). Defaults to `false`.
11641174
- **`options.geminiParameters`**: - Additional parameters to pass to the Gemini

0 commit comments

Comments
 (0)