|
| 1 | +import {combineStreams, waitForEvent, readStreamAsString} from '../stream.js'; |
| 2 | +import * as assert from 'node:assert/strict'; |
| 3 | +import {test} from 'node:test'; |
| 4 | +import {EventEmitter} from 'node:events'; |
| 5 | +import {Readable} from 'node:stream'; |
| 6 | + |
| 7 | +test('waitForEvent', async (t) => { |
| 8 | + await t.test('waits for event to fire', async () => { |
| 9 | + const emitter = new EventEmitter(); |
| 10 | + const waiter = waitForEvent(emitter, 'foo'); |
| 11 | + emitter.emit('foo'); |
| 12 | + await waiter; |
| 13 | + }); |
| 14 | +}); |
| 15 | + |
| 16 | +test('readStreamAsString', async (t) => { |
| 17 | + await t.test('rejects on error', async () => { |
| 18 | + const streamError = new Error('fudge'); |
| 19 | + const stream = new Readable({ |
| 20 | + read() { |
| 21 | + this.destroy(streamError); |
| 22 | + } |
| 23 | + }); |
| 24 | + try { |
| 25 | + await readStreamAsString(stream); |
| 26 | + assert.fail('expected to throw'); |
| 27 | + } catch (err) { |
| 28 | + assert.equal(err, streamError); |
| 29 | + } |
| 30 | + }); |
| 31 | + |
| 32 | + await t.test('resolves to concatenated data', async () => { |
| 33 | + const stream = Readable.from(['foo', 'bar']); |
| 34 | + const result = await readStreamAsString(stream); |
| 35 | + assert.equal(result, 'foobar'); |
| 36 | + }); |
| 37 | + |
| 38 | + await t.test('handles buffer data', async () => { |
| 39 | + const stream = new Readable({ |
| 40 | + read() { |
| 41 | + this.push(Buffer.from('foo')); |
| 42 | + this.push(Buffer.from('bar')); |
| 43 | + this.push(null); |
| 44 | + } |
| 45 | + }); |
| 46 | + const result = await readStreamAsString(stream); |
| 47 | + assert.equal(result, 'foobar'); |
| 48 | + }); |
| 49 | +}); |
| 50 | + |
| 51 | +test('combineStreams', async (t) => { |
| 52 | + await t.test('works with a single stream', async () => { |
| 53 | + const stream = Readable.from(['foo', 'bar']); |
| 54 | + const combined = combineStreams([stream]); |
| 55 | + const chunks: string[] = []; |
| 56 | + combined.on('data', (chunk: Buffer) => { |
| 57 | + chunks.push(chunk.toString()); |
| 58 | + }); |
| 59 | + await waitForEvent(combined, 'end'); |
| 60 | + assert.deepEqual(chunks, ['foo', 'bar']); |
| 61 | + }); |
| 62 | + |
| 63 | + await t.test('works with multiple streams', async () => { |
| 64 | + const stream0 = Readable.from(['foo']); |
| 65 | + const stream1 = Readable.from(['bar', 'baz']); |
| 66 | + const combined = combineStreams([stream0, stream1]); |
| 67 | + const chunks: string[] = []; |
| 68 | + combined.on('data', (chunk: Buffer) => { |
| 69 | + chunks.push(chunk.toString()); |
| 70 | + }); |
| 71 | + await waitForEvent(combined, 'end'); |
| 72 | + assert.deepEqual(chunks, ['foo', 'bar', 'baz']); |
| 73 | + }); |
| 74 | +}); |
0 commit comments