Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion packages/react-on-rails-pro/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -75,5 +75,9 @@
"bugs": {
"url": "https://github.com/shakacode/react_on_rails/issues"
},
"homepage": "https://github.com/shakacode/react_on_rails#readme"
"homepage": "https://github.com/shakacode/react_on_rails#readme",
"devDependencies": {
"@types/mock-fs": "^4.13.4",
"mock-fs": "^5.5.0"
}
}
59 changes: 59 additions & 0 deletions packages/react-on-rails-pro/tests/AsyncQueue.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import * as EventEmitter from 'node:events';

class AsyncQueue<T> {
private eventEmitter = new EventEmitter<{ data: any, end: any }>();

Check failure on line 4 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Unexpected any. Specify a different type

Check failure on line 4 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Replace `,` with `;`

Check failure on line 4 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Unexpected any. Specify a different type
private buffer: T[] = [];

Check failure on line 5 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Expected blank line between class members
private isEnded = false;

Check failure on line 6 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Expected blank line between class members

enqueue(value: T) {
if (this.isEnded) {
throw new Error("Queue Ended");

Check failure on line 10 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Replace `"Queue·Ended"` with `'Queue·Ended'`
}

if (this.eventEmitter.listenerCount('data') > 0) {
this.eventEmitter.emit('data', value);
} else {
this.buffer.push(value);
}
}

end() {
this.isEnded = true;
this.eventEmitter.emit('end');
}

dequeue() {
return new Promise<T>((resolve, reject) => {
const bufferValueIfExist = this.buffer.shift();
if (bufferValueIfExist) {
resolve(bufferValueIfExist);
} else if (this.isEnded) {
reject(new Error("Queue Ended"));

Check failure on line 31 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Replace `"Queue·Ended"` with `'Queue·Ended'`
} else {
let teardown = () => {}

Check failure on line 33 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Insert `;`
const onData = (value: T) => {
resolve(value);
teardown();
}

Check failure on line 37 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Insert `;`

Check failure on line 38 in packages/react-on-rails-pro/tests/AsyncQueue.ts

View workflow job for this annotation

GitHub Actions / build

Delete `········`
const onEnd = () => {
reject(new Error("Queue Ended"));
teardown();
}

this.eventEmitter.on('data', onData);
this.eventEmitter.on('end', onEnd);
teardown = () => {
this.eventEmitter.off('data', onData);
this.eventEmitter.off('end', onEnd);
}
}
})
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Dequeue must honor falsy buffered values.
buffer.shift() can legitimately return '', 0, or false, but the current truthiness check falls through to the “queue ended” branch and rejects even though data is buffered. Compare against undefined instead so every enqueued value is delivered.

-      const bufferValueIfExist = this.buffer.shift();
-      if (bufferValueIfExist) {
-        resolve(bufferValueIfExist);
+      const bufferValue = this.buffer.shift();
+      if (bufferValue !== undefined) {
+        resolve(bufferValue);
       } else if (this.isEnded) {
         reject(new Error("Queue Ended"));
       } else {
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const bufferValueIfExist = this.buffer.shift();
if (bufferValueIfExist) {
resolve(bufferValueIfExist);
} else if (this.isEnded) {
reject(new Error("Queue Ended"));
} else {
let teardown = () => {}
const onData = (value: T) => {
resolve(value);
teardown();
}
const onEnd = () => {
reject(new Error("Queue Ended"));
teardown();
}
this.eventEmitter.on('data', onData);
this.eventEmitter.on('end', onEnd);
teardown = () => {
this.eventEmitter.off('data', onData);
this.eventEmitter.off('end', onEnd);
}
}
})
}
const bufferValue = this.buffer.shift();
if (bufferValue !== undefined) {
resolve(bufferValue);
} else if (this.isEnded) {
reject(new Error("Queue Ended"));
} else {
let teardown = () => {}
const onData = (value: T) => {
resolve(value);
teardown();
}
const onEnd = () => {
reject(new Error("Queue Ended"));
teardown();
}
this.eventEmitter.on('data', onData);
this.eventEmitter.on('end', onEnd);
teardown = () => {
this.eventEmitter.off('data', onData);
this.eventEmitter.off('end', onEnd);
}
}
})
}
🤖 Prompt for AI Agents
In packages/react-on-rails-pro/tests/AsyncQueue.ts around lines 27 to 52, the
current truthiness check on the shifted buffer value causes legitimate falsy
values (0, '', false) to be treated as missing and triggers the "Queue Ended"
rejection; change the check to compare explicitly against undefined (e.g. const
bufferValue = this.buffer.shift(); if (bufferValue !== undefined) {
resolve(bufferValue); }) so that all queued values, including falsy ones, are
delivered while preserving the existing end/emit logic.


toString() {
return ""
}
}

export default AsyncQueue;
33 changes: 33 additions & 0 deletions packages/react-on-rails-pro/tests/StreamReader.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { PassThrough, Readable } from 'node:stream';
import AsyncQueue from './AsyncQueue';

class StreamReader {
private asyncQueue: AsyncQueue<string>;

constructor(pipeableStream: Pick<Readable, 'pipe'>) {
this.asyncQueue = new AsyncQueue();
const decoder = new TextDecoder();

const readableStream = new PassThrough();
pipeableStream.pipe(readableStream);

readableStream.on('data', (chunk) => {
const decodedChunk = decoder.decode(chunk);
this.asyncQueue.enqueue(decodedChunk);
});

if (readableStream.closed) {
this.asyncQueue.end();
} else {
readableStream.on('end', () => {
this.asyncQueue.end();
});
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Use streaming TextDecoder decoding and flush at end.
Decoding each chunk with decoder.decode(chunk) resets the decoder state, so any multi-byte character split across chunks will be corrupted or replaced. We also drop the final buffered bytes because we never flush at end. Decode with { stream: true }, flush once on end, and only then close the queue; this keeps UTF‑8 intact and preserves the tail chunk.

-    readableStream.on('data', (chunk) => {
-      const decodedChunk = decoder.decode(chunk);
-      this.asyncQueue.enqueue(decodedChunk);
-    });
-
-    if (readableStream.closed) {
-      this.asyncQueue.end();
-    } else {
-      readableStream.on('end', () => {
-        this.asyncQueue.end();
-      });
-    }
+    readableStream.on('data', (chunk) => {
+      const decodedChunk = decoder.decode(chunk, { stream: true });
+      if (decodedChunk.length > 0) {
+        this.asyncQueue.enqueue(decodedChunk);
+      }
+    });
+
+    readableStream.on('end', () => {
+      const flushed = decoder.decode();
+      if (flushed.length > 0) {
+        this.asyncQueue.enqueue(flushed);
+      }
+      this.asyncQueue.end();
+    });
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
readableStream.on('data', (chunk) => {
const decodedChunk = decoder.decode(chunk);
this.asyncQueue.enqueue(decodedChunk);
});
if (readableStream.closed) {
this.asyncQueue.end();
} else {
readableStream.on('end', () => {
this.asyncQueue.end();
});
}
readableStream.on('data', (chunk) => {
const decodedChunk = decoder.decode(chunk, { stream: true });
if (decodedChunk.length > 0) {
this.asyncQueue.enqueue(decodedChunk);
}
});
readableStream.on('end', () => {
const flushed = decoder.decode();
if (flushed.length > 0) {
this.asyncQueue.enqueue(flushed);
}
this.asyncQueue.end();
});
🤖 Prompt for AI Agents
In packages/react-on-rails-pro/tests/StreamReader.ts around lines 14–25, the
code decodes each chunk with decoder.decode(chunk) which resets decoder state
and breaks multi-byte characters; change to decoder.decode(chunk, { stream: true
}) for each 'data' chunk, and on both the immediate closed path and the 'end'
handler call decoder.decode() (or decoder.decode(undefined)) once to flush any
buffered bytes, enqueue the flushed string if non-empty, then call
this.asyncQueue.end().

}

nextChunk() {
return this.asyncQueue.dequeue();
}
}

export default StreamReader;
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
/**
* @jest-environment node
*/
/// <reference types="react/experimental" />

import * as React from 'react';
import { PassThrough, Readable, Transform } from 'node:stream';
import { text } from 'node:stream/consumers';
import { Suspense, PropsWithChildren } from 'react';

import * as path from 'path';
import * as mock from 'mock-fs';

import ReactOnRails, { RailsContextWithServerStreamingCapabilities } from '../src/ReactOnRailsRSC';
import AsyncQueue from './AsyncQueue';
import StreamReader from './StreamReader';
import removeRSCChunkStack from './utils/removeRSCChunkStack';

const manifestFileDirectory = path.resolve(__dirname, '../src')
const clientManifestPath = path.join(manifestFileDirectory, 'react-client-manifest.json');

mock({
[clientManifestPath]: JSON.stringify({
filePathToModuleMetadata: {},
moduleLoading: { prefix: '', crossOrigin: null },
}),
});

afterAll(() => mock.restore());

const AsyncQueueItem = async ({ asyncQueue, children }: PropsWithChildren<{asyncQueue: AsyncQueue<string>}>) => {
const value = await asyncQueue.dequeue();

return (
<>
<p>Data: {value}</p>
{children}
</>
)
}

const AsyncQueueContainer = ({ asyncQueue }: { asyncQueue: AsyncQueue<string> }) => {
return (
<div>
<h1>Async Queue</h1>
<Suspense fallback={<p>Loading Item1</p>}>
<AsyncQueueItem asyncQueue={asyncQueue}>
<Suspense fallback={<p>Loading Item2</p>}>
<AsyncQueueItem asyncQueue={asyncQueue}>
<Suspense fallback={<p>Loading Item3</p>}>
<AsyncQueueItem asyncQueue={asyncQueue} />
</Suspense>
</AsyncQueueItem>
</Suspense>
</AsyncQueueItem>
</Suspense>
</div>
)
}

ReactOnRails.register({ AsyncQueueContainer });

const renderComponent = (props: Record<string, unknown>) => {
return ReactOnRails.serverRenderRSCReactComponent({
railsContext: {
reactClientManifestFileName: 'react-client-manifest.json',
reactServerClientManifestFileName: 'react-server-client-manifest.json',
} as unknown as RailsContextWithServerStreamingCapabilities,
name: 'AsyncQueueContainer',
renderingReturnsPromises: true,
throwJsErrors: true,
domNodeId: 'dom-id',
props,
});
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Provide a real RailsContextWithServerStreamingCapabilities implementation.
This helper reuses the same incomplete railsContext mock: no getRSCPayloadStream, no addPostSSRHook, and it still references the removed reactClientManifestFileName. The unsafe as unknown as masks the type error, but the first production call to either missing method will explode. Extract a properly typed mock (shared with the other test) that actually implements the streaming hooks—using buildServerRenderer/createReactOutput to supply a Readable payload and a no-op addPostSSRHook—and drop the deprecated manifest field. Then pass that typed object directly so the compiler enforces the contract.

🤖 Prompt for AI Agents
In packages/react-on-rails-pro/tests/concurrentRSCPayloadGeneration.rsc.test.tsx
around lines 63–74, the test is using an unsafe incomplete railsContext mock
(casting with as unknown as) that lacks getRSCPayloadStream and addPostSSRHook
and still contains the removed reactClientManifestFileName; replace this with a
real RailsContextWithServerStreamingCapabilities object: extract a properly
typed mock shared with the other test, remove the deprecated
reactClientManifestFileName field, implement getRSCPayloadStream to return a
Readable payload (use the existing buildServerRenderer/createReactOutput helpers
to produce the stream) and implement addPostSSRHook as a no-op, then pass that
typed object directly (no unsafe cast) so the compiler enforces the contract.


const createParallelRenders = (size: number) => {
const asyncQueues = new Array(size).fill(null).map(() => new AsyncQueue<string>());
const streams = asyncQueues.map((asyncQueue) => {
return renderComponent({ asyncQueue });
});
const readers = streams.map(stream => new StreamReader(stream));

const enqueue = (value: string) => asyncQueues.forEach(asyncQueues => asyncQueues.enqueue(value));

const expectNextChunk = (nextChunk: string) => Promise.all(
readers.map(async (reader) => {
const chunk = await reader.nextChunk();
expect(removeRSCChunkStack(chunk)).toEqual(removeRSCChunkStack(nextChunk));
})
);

const expectEndOfStream = () => Promise.all(
readers.map(reader => expect(reader.nextChunk()).rejects.toThrow(/Queue Ended/))
);

return { enqueue, expectNextChunk, expectEndOfStream };
}

test('Renders concurrent rsc streams as single rsc stream', async () => {
expect.assertions(258);
const asyncQueue = new AsyncQueue<string>();
const stream = renderComponent({ asyncQueue });
const reader = new StreamReader(stream);

const chunks: string[] = [];
let chunk = await reader.nextChunk()
chunks.push(chunk);
expect(chunk).toContain("Async Queue");
expect(chunk).toContain("Loading Item2");
expect(chunk).not.toContain("Random Value");

asyncQueue.enqueue("Random Value1");
chunk = await reader.nextChunk();
chunks.push(chunk);
expect(chunk).toContain("Random Value1");

asyncQueue.enqueue("Random Value2");
chunk = await reader.nextChunk();
chunks.push(chunk);
expect(chunk).toContain("Random Value2");

asyncQueue.enqueue("Random Value3");
chunk = await reader.nextChunk();
chunks.push(chunk);
expect(chunk).toContain("Random Value3");

await expect(reader.nextChunk()).rejects.toThrow(/Queue Ended/);

const { enqueue, expectNextChunk, expectEndOfStream } = createParallelRenders(50);

expect(chunks).toHaveLength(4);
await expectNextChunk(chunks[0]!);
enqueue("Random Value1");
await expectNextChunk(chunks[1]!);
enqueue("Random Value2");
await expectNextChunk(chunks[2]!);
enqueue("Random Value3");
await expectNextChunk(chunks[3]!);
await expectEndOfStream();
});
Loading
Loading