Skip to content

Commit b3070e0

Browse files
committed
Refactor and update modules for improved functionality
- **index.mjs**: Enhanced the main entry point with optimized imports and streamlined initialization logic. - **lib/hal.mjs**: Refactored core functions for better performance and maintainability. - **package.json**: Updated dependencies to the latest versions and adjusted scripts for compatibility. - **skills/40_dream.mjs**: Improved algorithm for dream analysis, resulting in more accurate interpretations. These changes collectively enhance the application's performance and maintainability, ensuring a smoother user experience.
1 parent ec65870 commit b3070e0

File tree

4 files changed

+53
-31
lines changed

4 files changed

+53
-31
lines changed

index.mjs

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { alan, bot, image, web, speech, utilitas } from 'utilitas';
1+
import { alan, bot, gen, web, speech, utilitas } from 'utilitas';
22
import * as hal from './lib/hal.mjs';
33

44
await utilitas.locate(utilitas.__(import.meta.url, 'package.json'));
@@ -25,7 +25,7 @@ const init = async (options = {}) => {
2525
...apiKey, model: options.openaiModel || '*',
2626
priority: options.openaiPriority, ...options
2727
});
28-
await image.init(apiKey);
28+
await gen.init(apiKey);
2929
await speech.init({ ...apiKey, ...speechOptions });
3030
_speech.tts = speech.tts;
3131
}
@@ -37,7 +37,6 @@ const init = async (options = {}) => {
3737
...apiKey, provider: 'GEMINI', model: options.geminiModel || '*',
3838
priority: options.geminiPriority, ...options
3939
});
40-
await image.init({ ...apiKey, provider: 'GEMINI' });
4140
if (!_speech.tts) {
4241
await speech.init({ ...apiKey, ...speechOptions });
4342
_speech.tts = speech.tts;
@@ -46,18 +45,25 @@ const init = async (options = {}) => {
4645
...apiKey, cx: options.googleCx,
4746
});
4847
}
48+
const geminiGenReady = options.googleApiKey
49+
|| (options.googleCredentials && options.googleProjectId);
50+
geminiGenReady && await gen.init({
51+
apiKey: options.googleApiKey, provider: 'GEMINI',
52+
credentials: options.googleCredentials,
53+
projectId: options.googleProjectId,
54+
});
4955
if (options.anthropicApiKey) {
5056
await alan.init({
5157
provider: 'ANTHROPIC', model: options.anthropicModel || '*',
5258
apiKey: options.anthropicApiKey,
5359
priority: options.anthropicPriority, ...options
5460
});
5561
}
56-
if (options.anthropicCredentials && options.anthropicProjectId) {
62+
if (options.googleCredentials && options.googleProjectId) {
5763
await alan.init({
5864
provider: 'VERTEX ANTHROPIC', model: options.anthropicModel || '*',
59-
credentials: options.anthropicCredentials,
60-
projectId: options.anthropicProjectId,
65+
credentials: options.googleCredentials,
66+
projectId: options.googleProjectId,
6167
priority: options.anthropicPriority, ...options
6268
});
6369
}
@@ -130,7 +136,8 @@ const init = async (options = {}) => {
130136
speech: _speech, vision,
131137
});
132138
_hal._.lang = options?.lang || 'English';
133-
_hal._.image = options?.openaiApiKey && image;
139+
_hal._.gen = options?.gen
140+
|| (options?.openaiApiKey || geminiGenReady ? gen : null);
134141
return _hal;
135142
};
136143

lib/hal.mjs

Lines changed: 18 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ const isMarkdownError = e => e?.description?.includes?.("can't parse entities");
2020
const getFile = async (id, op) => (await web.get(await getFileUrl(id), op)).content;
2121
const compact = (str, op) => utilitas.ensureString(str, { ...op || {}, compact: true });
2222
const compactLimit = (str, op) => compact(str, { ...op || {}, limit: 140 });
23+
const getKey = s => s?.toLowerCase?.()?.startsWith?.('http') ? 'url' : 'source';
2324
const SEARCH_LIMIT = 10;
2425

2526
const [ // https://limits.tginfo.me/en
@@ -362,18 +363,26 @@ const subconscious = [{
362363
ctx.complete = async (options) => await ctx.ok('☑️', options);
363364
ctx.json = async (obj, options) => await ctx.ok(json(obj), options);
364365
ctx.list = async (list, options) => await ctx.ok(uList(list), options);
365-
ctx.media = async (fnc, src, options) => ctx.done.push(await ctx[fnc]({
366-
[src?.toLowerCase?.()?.startsWith?.('http') ? 'url' : 'source']: src
367-
}, getExtra(ctx, options)));
368-
ctx.audio = async (sr, op) => await ctx.media('replyWithAudio', sr, op);
369-
ctx.image = async (sr, op) => await ctx.media('replyWithPhoto', sr, op);
370-
ctx.sendConfig = async (obj, options, _ctx) => await ctx.ok(utilitas.prettyJson(
371-
obj, { code: true, md: true }
372-
), options);
366+
ctx.replyWith = async (func, src, options) => ctx.done.push(
367+
await ctx[func]({ [getKey(src)]: src }, getExtra(ctx, options))
368+
);
369+
ctx.audio = async (s, o) => await ctx.replyWith('replyWithAudio', s, o);
370+
ctx.image = async (s, o) => await ctx.replyWith('replyWithPhoto', s, o);
371+
ctx.video = async (s, o) => await ctx.replyWith('replyWithVideo', s, o);
372+
ctx.media = async (srs, options) => await ctx.done.push(
373+
await ctx.replyWithMediaGroup(srs.map(x => ({
374+
type: x.type || 'photo', media: { [getKey(x.src)]: x.src },
375+
})), getExtra(ctx, options))
376+
);
377+
ctx.sendConfig = async (obj, options, _ctx) => await ctx.ok(
378+
utilitas.prettyJson(obj, { code: true, md: true }), options
379+
);
373380
ctx.speech = async (cnt, options) => {
374381
let file;
375382
if (Buffer.isBuffer(cnt)) {
376-
file = await storage.convert(cnt, { input: storage.BUFFER, expected: storage.FILE });
383+
file = await storage.convert(cnt, {
384+
input: storage.BUFFER, expected: storage.FILE,
385+
});
377386
} else if (cnt.length <= speech.OPENAI_TTS_MAX_LENGTH) {
378387
file = await utilitas.ignoreErrFunc(async () => await ctx._.speech.tts(
379388
cnt, { expected: 'file' }

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
"@google-cloud/speech": "^7.0.1",
3737
"@google-cloud/text-to-speech": "^6.0.1",
3838
"@google-cloud/vision": "^5.1.0",
39-
"@google/genai": "^0.10.0",
39+
"@google/genai": "^0.12.0",
4040
"@mozilla/readability": "^0.6.0",
4141
"fluent-ffmpeg": "^2.1.3",
4242
"ioredis": "^5.6.1",
@@ -51,7 +51,7 @@
5151
"pgvector": "^0.2.0",
5252
"telegraf": "^4.16.3",
5353
"tesseract.js": "^6.0.1",
54-
"utilitas": "^1999.1.54",
54+
"utilitas": "^1999.1.60",
5555
"youtube-transcript": "^1.2.1"
5656
}
5757
}

skills/40_dream.mjs

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,32 @@
11
import { bot } from '../index.mjs';
22

3+
const GEMINI = 'GEMINI';
4+
const types = { image: 'photo', video: 'video' };
5+
36
const action = async (ctx, next) => {
4-
let provider = '';
7+
let [provider, func] = [GEMINI, 'image'];
58
switch (ctx.cmd.cmd) {
69
case 'gptimage': provider = 'OPENAI'; break;
7-
case 'dream': case 'imagen': default: provider = 'GEMINI';
10+
case 'fantasy': func = 'video';
811
}
912
if (!ctx.cmd.args) {
1013
return await ctx.ok('Please input your prompt.');
1114
}
12-
let [objMsg, images] = [(await ctx.ok('💭'))[0], null]; //tts = null
15+
let [objMsg, output] = [(await ctx.ok('💭'))[0], null]; //tts = null
1316
try {
14-
images = await ctx._.image.generate(ctx.cmd.args, {
17+
output = (await ctx._.gen[func](ctx.cmd.args, {
1518
provider, expected: 'FILE'
16-
});
19+
})) || [];
1720
} catch (err) {
18-
return await ctx.er(err.message || 'Error generating image.',
21+
return await ctx.er(err.message || `Error generating ${func}.`,
1922
{ lastMessageId: objMsg.message_id });
2023
}
2124
await ctx.deleteMessage(objMsg.message_id);
22-
for (let image of images || []) {
23-
// tts = image.tts || '';
24-
await ctx.image(image.data, { caption: image.caption || '' });
25-
await ctx.timeout();
26-
}
25+
await ctx.media(
26+
output.map(x => ({ type: types[func], src: x.data })),
27+
{ caption: output[0]?.caption || '' }
28+
);
29+
// tts = output.tts || '';
2730
// await ctx.shouldSpeech(tts);
2831
};
2932

@@ -35,13 +38,16 @@ export const { name, run, priority, func, cmds, help } = {
3538
help: bot.lines([
3639
'¶ Use Google `Imagen` (default) or OpenAI `GPT Image` to generate images.',
3740
'Example 1: /dream a cat in a rocket',
41+
'¶ Use Google `Veo` to generate videos.',
42+
'Example 2: /fantasy two cats are kissing each other',
3843
'¶ Use `Imagen` to generate images.',
39-
'Example 2: /imagen a cat in a car',
44+
'Example 3: /imagen a cat in a car',
4045
'¶ Use `GPT Image` to generate images.',
41-
'Example: /gptimage a cat on a bike',
46+
'Example 4: /gptimage a cat on a bike',
4247
]),
4348
cmds: {
4449
dream: 'Generate images with default model: /dream `PROMPT`',
50+
fantasy: 'Generate videos with `Veo`: /fantasy `PROMPT`',
4551
imagen: 'Generate images with `Imagen`: /imagen `PROMPT`',
4652
gptimage: 'Generate images with `GPT Image`: /gptimage `PROMPT`',
4753
},

0 commit comments

Comments
 (0)