Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions lib/react_on_rails/helper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -587,6 +587,15 @@ def server_rendered_react_component(render_options)
# It doesn't make any transformation, it listens and raises error if a chunk has errors
chunk_json_result
end

result.rescue do |err|
# This error came from the renderer
raise ReactOnRails::PrerenderError.new(component_name: react_component_name,
# Sanitize as this might be browser logged
props: sanitized_props_string(props),
err: err,
js_code: js_code)
end
Comment on lines 591 to 598
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion | 🟠 Major

Consider extracting duplicate error-raising logic.

The error-raising code in this rescue block (lines 591-598) duplicates the rescue block at lines 573-580. Both construct identical ReactOnRails::PrerenderError objects with the same parameters.

Extract the error construction into a helper method:

def build_prerender_error(react_component_name, props, err, js_code)
  ReactOnRails::PrerenderError.new(
    component_name: react_component_name,
    # Sanitize as this might be browser logged
    props: sanitized_props_string(props),
    err: err,
    js_code: js_code
  )
end

Then use it in both places:

     rescue StandardError => err
       # This error came from the renderer
-      raise ReactOnRails::PrerenderError.new(component_name: react_component_name,
-                                             # Sanitize as this might be browser logged
-                                             props: sanitized_props_string(props),
-                                             err: err,
-                                             js_code: js_code)
+      raise build_prerender_error(react_component_name, props, err, js_code)
     end

     if render_options.streaming?
       # ... transform block ...
       
       result.rescue do |err|
         # This error came from the renderer
-        raise ReactOnRails::PrerenderError.new(component_name: react_component_name,
-                                                 # Sanitize as this might be browser logged
-                                               props: sanitized_props_string(props),
-                                               err: err,
-                                               js_code: js_code)
+        raise build_prerender_error(react_component_name, props, err, js_code)
       end
🤖 Prompt for AI Agents
In lib/react_on_rails/helper.rb around lines 573-580 and 591-598, the rescue
blocks duplicate construction of a ReactOnRails::PrerenderError object; extract
that construction into a private helper method (e.g. build_prerender_error) that
accepts react_component_name, props, err, and js_code, returns the new
ReactOnRails::PrerenderError with sanitized props, and replace both rescue
blocks to call this helper to raise the error instead of inlining the object
creation.

elsif result["hasErrors"] && render_options.raise_on_prerender_error
raise_prerender_error(result, react_component_name, props, js_code)
end
Expand Down
57 changes: 32 additions & 25 deletions react_on_rails_pro/lib/react_on_rails_pro/request.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ class Request # rubocop:disable Metrics/ClassLength
class << self
def reset_connection
@connection&.close
@connection_without_retries&.close
@connection = create_connection
@connection_without_retries = create_connection(enable_retries: false)
end

def render_code(path, js_code, send_bundle)
Expand Down Expand Up @@ -84,29 +82,17 @@ def asset_exists_on_vm_renderer?(filename)

private

# NOTE: We maintain two separate HTTP connection pools to handle streaming vs non-streaming requests.
# This doubles the memory footprint (e.g., if renderer_http_pool_size is 10, we use 20 total connections).
# This tradeoff is acceptable to prevent body duplication in streaming responses.

def connection
@connection ||= create_connection
end

def connection_without_retries
@connection_without_retries ||= create_connection(enable_retries: false)
end

def perform_request(path, **post_options) # rubocop:disable Metrics/AbcSize,Metrics/CyclomaticComplexity,Metrics/PerceivedComplexity
# For streaming requests, use connection without retries to prevent body duplication
# The StreamRequest class handles retries properly by starting fresh requests
conn = post_options[:stream] ? connection_without_retries : connection

def perform_request(path, **post_options) # rubocop:disable Metrics/AbcSize,Metrics/CyclomaticComplexity
available_retries = ReactOnRailsPro.configuration.renderer_request_retry_limit
retry_request = true
while retry_request
begin
start_time = Time.now
response = conn.post(path, **post_options)
response = connection.post(path, **post_options)
raise response.error if response.is_a?(HTTPX::ErrorResponse)

request_time = Time.now - start_time
Expand Down Expand Up @@ -231,20 +217,41 @@ def common_form_data
ReactOnRailsPro::Utils.common_form_data
end

def create_connection(enable_retries: true)
def create_connection
url = ReactOnRailsPro.configuration.renderer_url
Rails.logger.info do
"[ReactOnRailsPro] Setting up Node Renderer connection to #{url}"
end

http_client = HTTPX
# For persistent connections we want retries,
# so the requests don't just fail if the other side closes the connection
# https://honeyryderchuck.gitlab.io/httpx/wiki/Persistent
# However, for streaming requests, retries cause body duplication
# See https://github.com/shakacode/react_on_rails/issues/1895
http_client = http_client.plugin(:retries, max_retries: 1, retry_change_requests: true) if enable_retries
http_client
HTTPX
# For persistent connections we want retries,
# so the requests don't just fail if the other side closes the connection
# https://honeyryderchuck.gitlab.io/httpx/wiki/Persistent
.plugin(
:retries, max_retries: 1,
retry_change_requests: true,
# Official HTTPx docs says that we should use the retry_on option to decide if teh request should be retried or not
# However, HTTPx assumes that connection errors such as timeout error should be retried by default and it doesn't consider retry_on block at all at that case
# So, we have to do the following trick to avoid retries when a Timeout error happens while streaming a component
# If the streamed component returned any chunks, it shouldn't retry on errors, as it would cause page duplication
# The SSR-generated html will be written to the page two times in this case
retry_after: ->(request, response) do
if (request.stream.instance_variable_get(:@react_on_rails_received_first_chunk))
e = response.error
raise ReactOnRailsPro::Error, "An error happened during server side render streaming of a component.\n" \
"Original error:\n#{e}\n#{e.backtrace}"
end

Rails.logger.info do
"[ReactOnRailsPro] An error happneding while making a request to the Node Renderer.\n" \
"Error: #{response.error}.\n" \
"Retrying by HTTPX \"retries\" plugin..."
end
# The retry_after block expects to return a delay to wait before retrying the request
# nil means no waiting delay
nil
end
)
.plugin(:stream)
# See https://www.rubydoc.info/gems/httpx/1.3.3/HTTPX%2FOptions:initialize for the available options
.with(
Expand Down
34 changes: 27 additions & 7 deletions react_on_rails_pro/lib/react_on_rails_pro/stream_request.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ def initialize(component)
# @param position [Symbol] The position of the chunk in the stream (:first, :middle, or :last)
# The position parameter is used by actions that add content to the beginning or end of the stream
@actions = [] # List to store all actions
@rescue_blocks = []
end

# Add a prepend action
Expand Down Expand Up @@ -39,27 +40,45 @@ def append
self # Return self to allow chaining
end

def rescue(&block)
@rescue_blocks << block
self # Return self to allow chaining
end

def handle_chunk(chunk, position)
@actions.reduce(chunk) do |acc, action|
action.call(acc, position)
end
end

def each_chunk
return enum_for(:each_chunk) unless block_given?
def each_chunk(&block)
return enum_for(:each_chunk) unless block

first_chunk = true
@component.each_chunk do |chunk|
position = first_chunk ? :first : :middle
modified_chunk = handle_chunk(chunk, position)
yield modified_chunk
block.call(modified_chunk)
first_chunk = false
end

# The last chunk contains the append content after the transformation
# All transformations are applied to the append content
last_chunk = handle_chunk("", :last)
yield last_chunk unless last_chunk.empty?
block.call(last_chunk) unless last_chunk.empty?
rescue StandardError => err
current_error = err
rescue_block_index = 0
while current_error.present? && (rescue_block_index < @rescue_blocks.size)
begin
@rescue_blocks[rescue_block_index].call(current_error, &block)
current_error = nil
rescue StandardError => inner_error
current_error = inner_error
end
rescue_block_index += 1
end
raise current_error if current_error.present?
end
end

Expand All @@ -75,9 +94,6 @@ def each_chunk(&block)

send_bundle = false
error_body = +""
# Retry logic for streaming requests is handled here by starting fresh requests.
# The HTTPx connection used for streaming has retries disabled (see Request#connection_without_retries)
# to prevent body duplication when partial chunks are already sent to the client.
loop do
stream_response = @request_executor.call(send_bundle)

Expand All @@ -89,6 +105,9 @@ def each_chunk(&block)
break
rescue HTTPX::HTTPError => e
send_bundle = handle_http_error(e, error_body, send_bundle)
rescue HTTPX::ReadTimeoutError => e
raise ReactOnRailsPro::Error, "Time out error while server side render streaming a component.\n" \
"Original error:\n#{e}\n#{e.backtrace}"
end
end

Expand Down Expand Up @@ -135,6 +154,7 @@ def loop_response_lines(response)
line = "".b

response.each do |chunk|
response.instance_variable_set(:@react_on_rails_received_first_chunk, true)
line << chunk

while (idx = line.index("\n"))
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,6 @@ def response; end
def mock_request_and_response(mock_chunks = chunks, count: 1)
# Reset connection instance variables to ensure clean state for tests
ReactOnRailsPro::Request.instance_variable_set(:@connection, nil)
ReactOnRailsPro::Request.instance_variable_set(:@connection_without_retries, nil)
original_httpx_plugin = HTTPX.method(:plugin)
allow(HTTPX).to receive(:plugin) do |*args|
original_httpx_plugin.call(:mock_stream).plugin(*args)
Expand Down
27 changes: 0 additions & 27 deletions react_on_rails_pro/spec/react_on_rails_pro/request_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -194,32 +194,5 @@
expect(mocked_block).not_to have_received(:call)
end
end

it "does not use HTTPx retries plugin for streaming requests to prevent body duplication" do
# This test verifies the fix for https://github.com/shakacode/react_on_rails/issues/1895
# When streaming requests encounter connection errors mid-transmission, HTTPx retries
# would cause body duplication because partial chunks are already sent to the client.
# The StreamRequest class handles retries properly by starting fresh requests.

# Reset connections to ensure we're using a fresh connection
described_class.reset_connection

# Trigger a streaming request
mock_streaming_response(render_full_url, 200) do |yielder|
yielder.call("Test chunk\n")
end

stream = described_class.render_code_as_stream("/render", "console.log('test');", is_rsc_payload: false)
chunks = []
stream.each_chunk { |chunk| chunks << chunk }

# Verify that the streaming request completed successfully
expect(chunks).to eq(["Test chunk"])

# Verify that the connection_without_retries was created
# by checking that a connection was created with retries disabled
connection_without_retries = described_class.send(:connection_without_retries)
expect(connection_without_retries).to be_a(HTTPX::Session)
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,93 @@
expect(chunks.last).to end_with("-end")
end
end

describe "#rescue" do
it "catches the error happens inside the component" do
allow(mock_component).to receive(:each_chunk).and_raise(StandardError.new "Fake Error")
mocked_block = mock_block

stream_decorator.rescue(&mocked_block.block)
chunks = []
expect { stream_decorator.each_chunk { |chunk| chunks << chunk } }.not_to raise_error

expect(mocked_block).to have_received(:call) do |error|
expect(error).to be_a(StandardError)
expect(error.message).to eq("Fake Error")
end
expect(chunks).to eq([])
end

it "catches the error happens inside subsequent component calls" do
allow(mock_component).to receive(:each_chunk).and_yield("Chunk1").and_raise(ArgumentError.new "Fake Error")
mocked_block = mock_block

stream_decorator.rescue(&mocked_block.block)
chunks = []
expect { stream_decorator.each_chunk { |chunk| chunks << chunk } }.not_to raise_error

expect(mocked_block).to have_received(:call) do |error|
expect(chunks).to eq(["Chunk1"])
expect(error).to be_a(ArgumentError)
expect(error.message).to eq("Fake Error")
end
expect(chunks).to eq(["Chunk1"])
end

it "can yield values to the stream" do
allow(mock_component).to receive(:each_chunk).and_yield("Chunk1").and_raise(ArgumentError.new "Fake Error")
mocked_block = mock_block

stream_decorator.rescue(&mocked_block.block)
chunks = []
expect { stream_decorator.each_chunk { |chunk| chunks << chunk } }.not_to raise_error

expect(mocked_block).to have_received(:call) do |error, &inner_block|
expect(chunks).to eq(["Chunk1"])
expect(error).to be_a(ArgumentError)
expect(error.message).to eq("Fake Error")

inner_block.call "Chunk from rescue block"
inner_block.call "Chunk2 from rescue block"
end
expect(chunks).to eq(["Chunk1", "Chunk from rescue block", "Chunk2 from rescue block"])
end

it "can convert the error into another error" do
allow(mock_component).to receive(:each_chunk).and_raise(StandardError.new "Fake Error")
mocked_block = mock_block do |error|
expect(error).to be_a(StandardError)
expect(error.message).to eq("Fake Error")
raise ArgumentError.new "Another Error"
end

stream_decorator.rescue(&mocked_block.block)
chunks = []
expect { stream_decorator.each_chunk { |chunk| chunks << chunk } }.to raise_error(ArgumentError, "Another Error")
expect(chunks).to eq([])
end

it "chains multiple rescue blocks" do
allow(mock_component).to receive(:each_chunk).and_yield("Chunk1").and_raise(StandardError.new "Fake Error")
fist_rescue_block = mock_block do |error, &block|
expect(error).to be_a(StandardError)
expect(error.message).to eq("Fake Error")
block.call "Chunk from first rescue block"
raise ArgumentError.new "Another Error"
end

second_rescue_block = mock_block do |error, &block|
expect(error).to be_a(ArgumentError)
expect(error.message).to eq("Another Error")
block.call "Chunk from second rescue block"
end

stream_decorator.rescue(&fist_rescue_block.block)
stream_decorator.rescue(&second_rescue_block.block)
chunks = []
expect { stream_decorator.each_chunk { |chunk| chunks << chunk } }.not_to raise_error

expect(chunks).to eq(["Chunk1", "Chunk from first rescue block", "Chunk from second rescue block"])
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@ module MockBlockHelper
# mocked_block = mock_block
# testing_method_taking_block(&mocked_block.block)
# expect(mocked_block).to have_received(:call).with(1, 2, 3)
def mock_block(return_value: nil)
def mock_block(&block)
double("BlockMock").tap do |mock| # rubocop:disable RSpec/VerifiedDoubles
allow(mock).to receive(:call) { return_value }
allow(mock).to receive(:call) do |*args, &inner_block|
block.call(*args, &inner_block) if block
end
def mock.block
method(:call).to_proc
end
Expand Down
Loading