|
6292 | 6292 | } |
6293 | 6293 | </wpm-descriptor></code-fragment><code-fragment data-type="text/markdown" name="README"># AI Helpers |
6294 | 6294 |
|
6295 | | -The AI Helpers API provides a set of utilities to interact with the OpenAI API. It provides functions for recording audio, transcribing audio to text, and generating text responses using the OpenAI API. |
| 6295 | +The AI Helpers API provides a set of utilities to interact with the OpenAI API. It provides functions for recording audio, transcribing audio to text, and generating text responses using the OpenAI API or an LiteLLM proxy server. By default the OpenAI API is used. To switch to the LiteLLM helper remove the `default` class from the OpenAI helper and add it to the LiteLLM helper. |
6296 | 6296 |
|
6297 | 6297 |
|
6298 | 6298 | ## Import |
|
6390 | 6390 | return gptData; |
6391 | 6391 | }; |
6392 | 6392 |
|
| 6393 | +export const getGPTContent = (gptData) => gptData.choices[0].message.content; |
| 6394 | +</code-fragment><code-fragment data-type="text/javascript+babel" name="AI Helpers LiteLLM">const LITE_LLM_BASE_URL = ''; |
| 6395 | +const API_KEY_COOKIE_NAME = 'LITE_LLM_API_KEY_PAGE_SESSION'; |
| 6396 | + |
| 6397 | +const retrieveApiKey = () => { |
| 6398 | + let apiKey; |
| 6399 | + |
| 6400 | + function getCookie(name) { |
| 6401 | + const nameEQ = name + '='; |
| 6402 | + const ca = document.cookie.split(';'); |
| 6403 | + for (let i = 0; i < ca.length; i++) { |
| 6404 | + let c = ca[i]; |
| 6405 | + while (c.charAt(0) === ' ') c = c.substring(1, c.length); |
| 6406 | + if (c.indexOf(nameEQ) === 0) return decodeURIComponent(c.substring(nameEQ.length, c.length)); |
| 6407 | + } |
| 6408 | + return null; |
| 6409 | + } |
| 6410 | + |
| 6411 | + function setCookie(name, value) { |
| 6412 | + const path = window.location.pathname; |
| 6413 | + document.cookie = `${name}=${encodeURIComponent(value)}; path=${path}; SameSite=Lax`; |
| 6414 | + } |
| 6415 | + |
| 6416 | + apiKey = getCookie(API_KEY_COOKIE_NAME); |
| 6417 | + if (!apiKey) { |
| 6418 | + apiKey = prompt('Enter your API key for LiteLLM:'); |
| 6419 | + if (apiKey) { |
| 6420 | + setCookie(API_KEY_COOKIE_NAME, apiKey); |
| 6421 | + console.log('API key saved to cookie for this page session.'); |
| 6422 | + } else { |
| 6423 | + throw new Error('API key not provided'); |
| 6424 | + } |
| 6425 | + } else { |
| 6426 | + console.log('API key loaded from cookie.'); |
| 6427 | + } |
| 6428 | + |
| 6429 | + return apiKey; |
| 6430 | +} |
| 6431 | + |
| 6432 | + |
| 6433 | +export const recordAudio = (duration) => { |
| 6434 | + return new Promise((resolve, reject) => { |
| 6435 | + let chunks = []; |
| 6436 | + |
| 6437 | + navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => { |
| 6438 | + const mediaRecorder = new MediaRecorder(stream); |
| 6439 | + |
| 6440 | + mediaRecorder.ondataavailable = (event) => { |
| 6441 | + chunks.push(event.data); |
| 6442 | + }; |
| 6443 | + |
| 6444 | + mediaRecorder.onstop = () => { |
| 6445 | + const audioBlob = new Blob(chunks, { type: 'audio/wav' }); |
| 6446 | + resolve(audioBlob); |
| 6447 | + }; |
| 6448 | + |
| 6449 | + mediaRecorder.start(); |
| 6450 | + |
| 6451 | + setTimeout(() => { |
| 6452 | + mediaRecorder.stop(); |
| 6453 | + }, duration); |
| 6454 | + }).catch(error => { |
| 6455 | + reject(error); |
| 6456 | + }); |
| 6457 | + }); |
| 6458 | +}; |
| 6459 | + |
| 6460 | +export const transcribeAudio = async (duration, audioBlob = false, recordingEndedCallback = false) => { |
| 6461 | + const apiKey = retrieveApiKey(); |
| 6462 | + |
| 6463 | + let audioBlobToTranscribe = audioBlob; |
| 6464 | + if (!audioBlobToTranscribe) { |
| 6465 | + audioBlobToTranscribe = await recordAudio(duration); |
| 6466 | + } |
| 6467 | + |
| 6468 | + if (recordingEndedCallback) recordingEndedCallback(); |
| 6469 | + |
| 6470 | + const formData = new FormData(); |
| 6471 | + formData.append('file', audioBlobToTranscribe, 'recording.wav'); |
| 6472 | + formData.append('model', 'whisper-1'); |
| 6473 | + |
| 6474 | + const whisperResponse = await fetch(`${LITE_LLM_BASE_URL}/v1/audio/transcriptions`, { |
| 6475 | + method: 'POST', |
| 6476 | + mode: 'cors', |
| 6477 | + headers: { 'Authorization': `Bearer ${apiKey}` }, |
| 6478 | + body: formData, |
| 6479 | + }); |
| 6480 | + const whisperData = await whisperResponse.json(); |
| 6481 | + |
| 6482 | + return whisperData.text; |
| 6483 | +}; |
| 6484 | + |
| 6485 | +export const sendGPTPrompt = async (body) => { |
| 6486 | + if (!window.API_KEY) { |
| 6487 | + window.API_KEY = prompt('Please enter your OpenAI API key:'); |
| 6488 | + if (!window.API_KEY) { |
| 6489 | + console.error('An API_KEY is required to transcribe audio.'); |
| 6490 | + return; |
| 6491 | + } |
| 6492 | + } |
| 6493 | + |
| 6494 | + const options = { |
| 6495 | + method: 'POST', |
| 6496 | + mode: 'cors', |
| 6497 | + headers: { |
| 6498 | + 'Accept': 'application/json', |
| 6499 | + 'Content-Type': 'application/json', |
| 6500 | + 'Authorization': `Bearer ${API_KEY}` |
| 6501 | + }, |
| 6502 | + body: JSON.stringify(body) |
| 6503 | + }; |
| 6504 | + console.log('OpenAI API call options:', options); |
| 6505 | + |
| 6506 | + const gptResponse = await fetch(`${LITE_LLM_BASE_URL}/v1/chat/completions`, options); |
| 6507 | + const gptData = await gptResponse.json(); |
| 6508 | + return gptData; |
| 6509 | +}; |
| 6510 | + |
6393 | 6511 | export const getGPTContent = (gptData) => gptData.choices[0].message.content; |
6394 | 6512 | </code-fragment></wpm-package></code-folder><code-folder name="Basic"><code-fragment data-type="text/markdown" name="README"># Basic |
6395 | 6513 |
|
|
18008 | 18126 | <ModelUploader /> |
18009 | 18127 | </Varv>, 1200); |
18010 | 18128 | } |
| 18129 | +</code-fragment><code-fragment auto data-type="text/javascript" name="Init Manager Concept">const initConcept = async () => { |
| 18130 | + try { |
| 18131 | + const managers = await VarvEngine.lookupInstances('ModelManager'); |
| 18132 | + if (managers.length === 0) { |
| 18133 | + await VarvEngine.getConceptFromType('ModelManager').create(null, {}); |
| 18134 | + } |
| 18135 | + } catch (e) { |
| 18136 | + // Ignore |
| 18137 | + } |
| 18138 | +}; |
| 18139 | + |
| 18140 | +VarvEngine.registerEventCallback('engineReloaded', () => { |
| 18141 | + initConcept(); |
| 18142 | +}); |
18011 | 18143 | </code-fragment></wpm-package><wpm-package id="ModelShelf" name="ModelShelf"><code-fragment data-type="wpm/descriptor"><wpm-descriptor>{ |
18012 | 18144 | "description": "A model gallery shelf for all uploaded models.", |
18013 | 18145 | "dependencies": [ |
|
0 commit comments