|
15 | 15 | 'use strict'; |
16 | 16 |
|
17 | 17 | const {assert} = require('chai'); |
18 | | -const sinon = require('sinon'); |
19 | | -const {describe, it, beforeEach, afterEach} = require('mocha'); |
| 18 | +const {describe, it} = require('mocha'); |
| 19 | +const proxyquire = require('proxyquire'); |
20 | 20 |
|
21 | | -const sample = require('../live/live-ground-ragengine-with-txt'); |
22 | 21 | const {delay} = require('./util'); |
23 | 22 |
|
24 | 23 | describe('live-ground-ragengine-with-txt', () => { |
25 | | - let mockClient, mockSession; |
26 | | - |
27 | | - beforeEach(() => { |
28 | | - mockSession = { |
29 | | - async *receive() { |
30 | | - yield { |
31 | | - text: 'In December 2023, Google launched Gemini, their "most capable and general model". It\'s multimodal, meaning it understands and combines different types of information like text, code, audio, images, and video.', |
32 | | - }; |
33 | | - }, |
34 | | - sendClientContent: sinon.stub().resolves(), |
35 | | - close: sinon.stub().resolves(), |
| 24 | + it('should return text from mocked RAG session', async function () { |
| 25 | + const fakeSession = { |
| 26 | + sendClientContent: async () => {}, |
| 27 | + close: async () => {}, |
36 | 28 | }; |
37 | 29 |
|
38 | | - mockClient = { |
39 | | - aio: { |
40 | | - live: { |
41 | | - connect: sinon.stub().resolves(mockSession), |
| 30 | + const mockClient = { |
| 31 | + live: { |
| 32 | + connect: async (opts = {}) => { |
| 33 | + setImmediate(() => |
| 34 | + opts.callbacks.onmessage({ |
| 35 | + text: 'In December 2023, Google launched Gemini...', |
| 36 | + serverContent: {turnComplete: false}, |
| 37 | + }) |
| 38 | + ); |
| 39 | + setImmediate(() => |
| 40 | + opts.callbacks.onmessage({ |
| 41 | + text: 'Mock final message.', |
| 42 | + serverContent: {turnComplete: true}, |
| 43 | + }) |
| 44 | + ); |
| 45 | + |
| 46 | + return fakeSession; |
42 | 47 | }, |
43 | 48 | }, |
44 | 49 | }; |
45 | 50 |
|
46 | | - sinon.stub(require('@google/genai'), 'GoogleGenAI').returns(mockClient); |
47 | | - }); |
48 | | - |
49 | | - afterEach(() => { |
50 | | - sinon.restore(); |
51 | | - }); |
| 51 | + const sample = proxyquire('../live/live-ground-ragengine-with-txt', { |
| 52 | + '@google/genai': { |
| 53 | + GoogleGenAI: function () { |
| 54 | + return mockClient; |
| 55 | + }, |
| 56 | + Modality: {TEXT: 'TEXT'}, |
| 57 | + }, |
| 58 | + }); |
52 | 59 |
|
53 | | - it('should return text from mocked RAG session', async function () { |
54 | | - this.timeout(180000); |
| 60 | + this.timeout(10000); |
55 | 61 | this.retries(4); |
56 | 62 | await delay(this.test); |
57 | | - |
58 | 63 | const output = await sample.generateLiveRagTextResponse(); |
59 | | - |
60 | 64 | console.log('Generated output:', output); |
61 | | - |
62 | 65 | assert(output.length > 0); |
63 | 66 | }); |
64 | 67 | }); |
0 commit comments