diff --git a/CHANGELOG.md b/CHANGELOG.md index 78f397647..e50c7449f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -46,6 +46,7 @@ ### Internal - Unify Logs and Metrics implementations ([#2826](https://github.com/getsentry/sentry-ruby/pull/2826)) +- Unify LogEventBuffer and MetricEventBuffer logic ([#2830](https://github.com/getsentry/sentry-ruby/pull/2830)) ## 6.2.0 diff --git a/sentry-ruby/lib/sentry/client.rb b/sentry-ruby/lib/sentry/client.rb index 57c2f74e9..e196b8a04 100644 --- a/sentry-ruby/lib/sentry/client.rb +++ b/sentry-ruby/lib/sentry/client.rb @@ -107,7 +107,7 @@ def capture_event(event, scope, hint = {}) # @return [LogEvent] def buffer_log_event(event, scope) return unless event.is_a?(LogEvent) - @log_event_buffer.add_event(scope.apply_to_telemetry(event)) + @log_event_buffer.add_item(scope.apply_to_telemetry(event)) event end @@ -117,7 +117,7 @@ def buffer_log_event(event, scope) def buffer_metric_event(event, scope) return unless event.is_a?(MetricEvent) event = scope.apply_to_telemetry(event) - @metric_event_buffer.add_metric(event) + @metric_event_buffer.add_item(event) event end @@ -295,104 +295,6 @@ def send_event(event, hint = nil) raise end - # Send an envelope with batched logs - # @param log_events [Array] the log events to be sent - # @api private - # @return [void] - def send_logs(log_events) - envelope = Envelope.new( - event_id: Sentry::Utils.uuid, - sent_at: Sentry.utc_now.iso8601, - dsn: configuration.dsn, - sdk: Sentry.sdk_meta - ) - - discarded_count = 0 - envelope_items = [] - - if configuration.before_send_log - log_events.each do |log_event| - processed_log_event = configuration.before_send_log.call(log_event) - - if processed_log_event - envelope_items << processed_log_event.to_h - else - discarded_count += 1 - end - end - - envelope_items - else - envelope_items = log_events.map(&:to_h) - end - - envelope.add_item( - { - type: "log", - item_count: envelope_items.size, - content_type: "application/vnd.sentry.items.log+json" - }, - { items: envelope_items } - ) - - send_envelope(envelope) - - unless discarded_count.zero? - transport.record_lost_event(:before_send, "log_item", num: discarded_count) - end - end - - # Send an envelope with batched metrics - # @param metrics [Array] the metrics to send - # @api private - # @return [void] - def send_metrics(metrics) - return if metrics.nil? || metrics.empty? - - envelope = Envelope.new( - event_id: Sentry::Utils.uuid, - sent_at: Sentry.utc_now.iso8601, - dsn: configuration.dsn, - sdk: Sentry.sdk_meta - ) - - discarded_count = 0 - envelope_items = [] - - if configuration.before_send_metric - metrics.each do |metric| - processed_metric = configuration.before_send_metric.call(metric) - - if processed_metric - envelope_items << processed_metric.to_h - else - discarded_count += 1 - end - end - - envelope_items - else - envelope_items = metrics.map(&:to_h) - end - - return if envelope_items.empty? - - envelope.add_item( - { - type: "trace_metric", - item_count: envelope_items.size, - content_type: "application/vnd.sentry.items.trace-metric+json" - }, - { items: envelope_items } - ) - - send_envelope(envelope) - - unless discarded_count.zero? - transport.record_lost_event(:before_send, "metric", num: discarded_count) - end - end - # Send an envelope directly to Sentry. # @param envelope [Envelope] the envelope to be sent. # @return [void] diff --git a/sentry-ruby/lib/sentry/envelope/item.rb b/sentry-ruby/lib/sentry/envelope/item.rb index 9e9f73834..3f6d5f9bb 100644 --- a/sentry-ruby/lib/sentry/envelope/item.rb +++ b/sentry-ruby/lib/sentry/envelope/item.rb @@ -15,7 +15,8 @@ class Envelope::Item # rate limits and client reports use the data_category rather than envelope item type def self.data_category(type) case type - when "session", "attachment", "transaction", "profile", "span", "log", "trace_metric" then type + when "session", "attachment", "transaction", "profile", "span", "trace_metric" then type + when "log" then "log_item" when "sessions" then "session" when "check_in" then "monitor" when "event" then "error" diff --git a/sentry-ruby/lib/sentry/log_event_buffer.rb b/sentry-ruby/lib/sentry/log_event_buffer.rb index 670fb4e2f..fbd3e0f81 100644 --- a/sentry-ruby/lib/sentry/log_event_buffer.rb +++ b/sentry-ruby/lib/sentry/log_event_buffer.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require "sentry/threaded_periodic_worker" +require "sentry/telemetry_event_buffer" module Sentry # LogEventBuffer buffers log events and sends them to Sentry in a single envelope. @@ -8,75 +8,19 @@ module Sentry # This is used internally by the `Sentry::Client`. # # @!visibility private - class LogEventBuffer < ThreadedPeriodicWorker - FLUSH_INTERVAL = 5 # seconds + class LogEventBuffer < TelemetryEventBuffer DEFAULT_MAX_EVENTS = 100 - # @!visibility private - attr_reader :pending_events - def initialize(configuration, client) - super(configuration.sdk_logger, FLUSH_INTERVAL) - - @client = client - @pending_events = [] - @max_events = configuration.max_log_events || DEFAULT_MAX_EVENTS - @mutex = Mutex.new - - log_debug("[Logging] Initialized buffer with max_events=#{@max_events}, flush_interval=#{FLUSH_INTERVAL}s") - end - - def start - ensure_thread - self - end - - def flush - @mutex.synchronize do - return if empty? - - log_debug("[LogEventBuffer] flushing #{size} log events") - - send_events - end - - log_debug("[LogEventBuffer] flushed #{size} log events") - - self - end - alias_method :run, :flush - - def add_event(event) - raise ArgumentError, "expected a LogEvent, got #{event.class}" unless event.is_a?(LogEvent) - - @mutex.synchronize do - @pending_events << event - send_events if size >= @max_events - end - - self - end - - def empty? - @pending_events.empty? - end - - def size - @pending_events.size - end - - def clear! - @pending_events.clear - end - - private - - def send_events - @client.send_logs(@pending_events) - rescue => e - log_debug("[LogEventBuffer] Failed to send logs: #{e.message}") - ensure - clear! + super( + configuration, + client, + event_class: LogEvent, + max_items: configuration.max_log_events || DEFAULT_MAX_EVENTS, + envelope_type: "log", + envelope_content_type: "application/vnd.sentry.items.log+json", + before_send: configuration.before_send_log + ) end end end diff --git a/sentry-ruby/lib/sentry/metric_event_buffer.rb b/sentry-ruby/lib/sentry/metric_event_buffer.rb index 64eb42156..b3b0a4c70 100644 --- a/sentry-ruby/lib/sentry/metric_event_buffer.rb +++ b/sentry-ruby/lib/sentry/metric_event_buffer.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require "sentry/threaded_periodic_worker" +require "sentry/telemetry_event_buffer" module Sentry # MetricEventBuffer buffers metric events and sends them to Sentry in a single envelope. @@ -8,71 +8,19 @@ module Sentry # This is used internally by the `Sentry::Client`. # # @!visibility private - class MetricEventBuffer < ThreadedPeriodicWorker - FLUSH_INTERVAL = 5 # seconds + class MetricEventBuffer < TelemetryEventBuffer DEFAULT_MAX_METRICS = 100 - # @!visibility private - attr_reader :pending_metrics - def initialize(configuration, client) - super(configuration.sdk_logger, FLUSH_INTERVAL) - - @client = client - @pending_metrics = [] - @max_metrics = configuration.max_metric_events || DEFAULT_MAX_METRICS - @mutex = Mutex.new - - log_debug("[Metrics] Initialized buffer with max_metrics=#{@max_metrics}, flush_interval=#{FLUSH_INTERVAL}s") - end - - def start - ensure_thread - self - end - - def flush - @mutex.synchronize do - return if empty? - - log_debug("[MetricEventBuffer] flushing #{size} metrics") - - send_metrics - end - end - alias_method :run, :flush - - def add_metric(metric) - raise ArgumentError, "expected a MetricEvent, got #{metric.class}" unless metric.is_a?(MetricEvent) - - @mutex.synchronize do - @pending_metrics << metric - send_metrics if size >= @max_metrics - end - - self - end - - def empty? - @pending_metrics.empty? - end - - def size - @pending_metrics.size - end - - def clear! - @pending_metrics.clear - end - - private - - def send_metrics - @client.send_metrics(@pending_metrics) - rescue => e - log_debug("[MetricEventBuffer] Failed to send metrics: #{e.message}") - ensure - clear! + super( + configuration, + client, + event_class: MetricEvent, + max_items: configuration.max_metric_events || DEFAULT_MAX_METRICS, + envelope_type: "trace_metric", + envelope_content_type: "application/vnd.sentry.items.trace-metric+json", + before_send: configuration.before_send_metric + ) end end end diff --git a/sentry-ruby/lib/sentry/telemetry_event_buffer.rb b/sentry-ruby/lib/sentry/telemetry_event_buffer.rb new file mode 100644 index 000000000..d2f7c5702 --- /dev/null +++ b/sentry-ruby/lib/sentry/telemetry_event_buffer.rb @@ -0,0 +1,128 @@ +# frozen_string_literal: true + +require "sentry/threaded_periodic_worker" +require "sentry/envelope" + +module Sentry + # TelemetryEventBuffer is a base class for buffering telemetry events (logs, metrics, etc.) + # and sending them to Sentry in a single envelope. + # + # This is used internally by the `Sentry::Client`. + # + # @!visibility private + class TelemetryEventBuffer < ThreadedPeriodicWorker + FLUSH_INTERVAL = 5 # seconds + + # @!visibility private + attr_reader :pending_items + + def initialize(configuration, client, event_class:, max_items:, envelope_type:, envelope_content_type:, before_send:) + super(configuration.sdk_logger, FLUSH_INTERVAL) + + @client = client + @dsn = configuration.dsn + @debug = configuration.debug + @event_class = event_class + @max_items = max_items + @envelope_type = envelope_type + @envelope_content_type = envelope_content_type + @before_send = before_send + + @pending_items = [] + @mutex = Mutex.new + + log_debug("[#{self.class}] Initialized buffer with max_items=#{@max_items}, flush_interval=#{FLUSH_INTERVAL}s") + end + + def start + ensure_thread + self + end + + def flush + @mutex.synchronize do + return if empty? + + log_debug("[#{self.class}] flushing #{size} #{@event_class}") + + send_items + end + + self + end + alias_method :run, :flush + + def add_item(item) + raise ArgumentError, "expected a #{@event_class}, got #{item.class}" unless item.is_a?(@event_class) + + @mutex.synchronize do + @pending_items << item + send_items if size >= @max_items + end + + self + end + + def empty? + @pending_items.empty? + end + + def size + @pending_items.size + end + + def clear! + @pending_items.clear + end + + private + + def send_items + envelope = Envelope.new( + event_id: Sentry::Utils.uuid, + sent_at: Sentry.utc_now.iso8601, + dsn: @dsn, + sdk: Sentry.sdk_meta + ) + + discarded_count = 0 + envelope_items = [] + + if @before_send + @pending_items.each do |item| + processed_item = @before_send.call(item) + + if processed_item + envelope_items << processed_item.to_h + else + discarded_count += 1 + end + end + else + envelope_items = @pending_items.map(&:to_h) + end + + unless discarded_count.zero? + @data_category = Sentry::Envelope::Item.data_category(@envelope_type) + @client.transport.record_lost_event(:before_send, @data_category, num: discarded_count) + end + + return if envelope_items.empty? + + envelope.add_item( + { + type: @envelope_type, + item_count: envelope_items.size, + content_type: @envelope_content_type + }, + { items: envelope_items } + ) + + @client.send_envelope(envelope) + rescue => e + log_error("[#{self.class}] Failed to send #{@event_class}", e, debug: @debug) + ensure + clear! + end + end +end diff --git a/sentry-ruby/spec/sentry/client_spec.rb b/sentry-ruby/spec/sentry/client_spec.rb index 88b73c7ec..354716d74 100644 --- a/sentry-ruby/spec/sentry/client_spec.rb +++ b/sentry-ruby/spec/sentry/client_spec.rb @@ -116,7 +116,7 @@ expect(log_item).not_to be_nil expect(log_item.payload[:level]).to eq('info') expect(log_item.payload[:message]).to eq('test4') - expect(log_item.data_category).to eq('log') + expect(log_item.data_category).to eq('log_item') end it 'sends envelope with spotlight transport if enabled' do @@ -152,7 +152,7 @@ expect(subject.transport).to have_recorded_lost_event(:network_error, 'error') expect(subject.transport).to have_recorded_lost_event(:network_error, 'transaction') - expect(subject.transport).to have_recorded_lost_event(:network_error, 'log') + expect(subject.transport).to have_recorded_lost_event(:network_error, 'log_item') end end end diff --git a/sentry-ruby/spec/sentry/envelope/item_spec.rb b/sentry-ruby/spec/sentry/envelope/item_spec.rb index a4ed3c6ec..9a46d8d33 100644 --- a/sentry-ruby/spec/sentry/envelope/item_spec.rb +++ b/sentry-ruby/spec/sentry/envelope/item_spec.rb @@ -9,7 +9,7 @@ ['transaction', 'transaction'], ['span', 'span'], ['profile', 'profile'], - ['log', 'log'], + ['log', 'log_item'], ['trace_metric', 'trace_metric'], ['check_in', 'monitor'], ['event', 'error'], diff --git a/sentry-ruby/spec/sentry/hub_spec.rb b/sentry-ruby/spec/sentry/hub_spec.rb index 2538f8fbc..8c41f983b 100644 --- a/sentry-ruby/spec/sentry/hub_spec.rb +++ b/sentry-ruby/spec/sentry/hub_spec.rb @@ -673,7 +673,7 @@ expect(subject.current_client).to receive(:buffer_metric_event).and_call_original expect do subject.capture_metric(name: "test", type: :counter, value: 1) - end.to change { subject.current_client.metric_event_buffer.pending_metrics.count }.by(1) + end.to change { subject.current_client.metric_event_buffer.size }.by(1) end end end diff --git a/sentry-ruby/spec/sentry/log_event_buffer_spec.rb b/sentry-ruby/spec/sentry/log_event_buffer_spec.rb index 053d0d3b8..3e1e9d9e5 100644 --- a/sentry-ruby/spec/sentry/log_event_buffer_spec.rb +++ b/sentry-ruby/spec/sentry/log_event_buffer_spec.rb @@ -1,133 +1,17 @@ # frozen_string_literal: true -RSpec.describe Sentry::LogEventBuffer do - subject(:log_event_buffer) { described_class.new(Sentry.configuration, client) } - - let(:string_io) { StringIO.new } - let(:logger) { ::Logger.new(string_io) } - let(:client) { double(Sentry::Client) } - let(:log_event) do - Sentry::LogEvent.new( - level: :info, - body: "Test message" - ) - end - - before do - perform_basic_setup do |config| - config.sdk_logger = logger - config.background_worker_threads = 0 - config.max_log_events = max_log_events - config.enable_logs = true - end - - Sentry.background_worker = Sentry::BackgroundWorker.new(Sentry.configuration) - end - - after do - Sentry.background_worker = Class.new { def shutdown; end; }.new - end - - describe "#add_event" do - let(:max_log_events) { 3 } - - it "does nothing when there are no pending events" do - expect(client).not_to receive(:capture_envelope) - - log_event_buffer.flush - - expect(sentry_envelopes.size).to be(0) - end - - it "does nothing when the number of events is less than max_events " do - expect(client).to_not receive(:send_logs) - - 2.times { log_event_buffer.add_event(log_event) } - end - - it "auto-flushes pending events to the client when the number of events reaches max_events" do - expect(client).to receive(:send_logs) - - 3.times { log_event_buffer.add_event(log_event) } - - expect(log_event_buffer).to be_empty - end - end - - describe "multi-threaded access" do - let(:max_log_events) { 30 } - - it "thread-safely handles concurrent access" do - expect(client).to receive(:send_logs).exactly(3).times - - threads = 3.times.map do - Thread.new do - (20..30).to_a.sample.times { log_event_buffer.add_event(log_event) } - end - end - - threads.each(&:join) +require "support/shared_examples_for_telemetry_event_buffers" - log_event_buffer.flush - - expect(log_event_buffer).to be_empty - end - end - - describe "error handling" do - let(:max_log_events) { 3 } - - let(:error) { Errno::ECONNREFUSED.new("Connection refused") } - - context "when send_logs raises an exception" do - before do - allow(client).to receive(:send_logs).and_raise(error) - end - - it "does not propagate exception from add_event when buffer is full" do - expect { - 3.times { log_event_buffer.add_event(log_event) } - }.not_to raise_error - end - - it "does not propagate exception from flush" do - 2.times { log_event_buffer.add_event(log_event) } - - expect { - log_event_buffer.flush - }.not_to raise_error - end - - it "logs the error to sdk_logger" do - 3.times { log_event_buffer.add_event(log_event) } - - expect(string_io.string).to include("Failed to send logs") - end - - it "clears the buffer after a failed send to avoid memory buildup" do - 3.times { log_event_buffer.add_event(log_event) } - - expect(log_event_buffer).to be_empty - end - end - - context "when background thread encounters an error" do - let(:max_log_events) { 100 } - - before do - allow(client).to receive(:send_logs).and_raise(error) - end - - it "keeps the background thread alive after an error" do - log_event_buffer.add_event(log_event) - log_event_buffer.start - - thread = log_event_buffer.instance_variable_get(:@thread) - - expect(thread).to be_alive - expect { log_event_buffer.flush }.not_to raise_error - expect(thread).to be_alive - end - end - end +RSpec.describe Sentry::LogEventBuffer do + subject { described_class.new(Sentry.configuration, client) } + + include_examples "telemetry event buffer", + event_factory: -> { + Sentry::LogEvent.new( + level: :info, + body: "Test message" + ) + }, + max_items_config: :max_log_events, + enable_config: :enable_logs end diff --git a/sentry-ruby/spec/sentry/metric_event_buffer_spec.rb b/sentry-ruby/spec/sentry/metric_event_buffer_spec.rb index b0a089586..c3982c2c4 100644 --- a/sentry-ruby/spec/sentry/metric_event_buffer_spec.rb +++ b/sentry-ruby/spec/sentry/metric_event_buffer_spec.rb @@ -1,134 +1,18 @@ # frozen_string_literal: true -RSpec.describe Sentry::MetricEventBuffer do - subject(:metric_event_buffer) { described_class.new(Sentry.configuration, client) } - - let(:string_io) { StringIO.new } - let(:logger) { ::Logger.new(string_io) } - let(:client) { double(Sentry::Client) } - let(:metric_event) do - Sentry::MetricEvent.new( - name: "test.metric", - type: :counter, - value: 1 - ) - end - - before do - perform_basic_setup do |config| - config.sdk_logger = logger - config.background_worker_threads = 0 - config.max_metric_events = max_metric_events - config.enable_metrics = true - end - - Sentry.background_worker = Sentry::BackgroundWorker.new(Sentry.configuration) - end - - after do - Sentry.background_worker = Class.new { def shutdown; end; }.new - end - - describe "#add_metric" do - let(:max_metric_events) { 3 } - - it "does nothing when there are no pending metrics" do - expect(client).not_to receive(:capture_envelope) - - metric_event_buffer.flush - - expect(sentry_envelopes.size).to be(0) - end - - it "does nothing when the number of metrics is less than max_metrics" do - expect(client).to_not receive(:send_metrics) - - 2.times { metric_event_buffer.add_metric(metric_event) } - end - - it "auto-flushes pending metrics to the client when the number of metrics reaches max_metrics" do - expect(client).to receive(:send_metrics) - - 3.times { metric_event_buffer.add_metric(metric_event) } - - expect(metric_event_buffer).to be_empty - end - end - - describe "multi-threaded access" do - let(:max_metric_events) { 30 } - - it "thread-safely handles concurrent access" do - expect(client).to receive(:send_metrics).exactly(3).times - - threads = 3.times.map do - Thread.new do - (20..30).to_a.sample.times { metric_event_buffer.add_metric(metric_event) } - end - end - - threads.each(&:join) +require "support/shared_examples_for_telemetry_event_buffers" - metric_event_buffer.flush - - expect(metric_event_buffer).to be_empty - end - end - - describe "error handling" do - let(:max_metric_events) { 3 } - - let(:error) { Errno::ECONNREFUSED.new("Connection refused") } - - context "when send_metrics raises an exception" do - before do - allow(client).to receive(:send_metrics).and_raise(error) - end - - it "does not propagate exception from add_metric when buffer is full" do - expect { - 3.times { metric_event_buffer.add_metric(metric_event) } - }.not_to raise_error - end - - it "does not propagate exception from flush" do - 2.times { metric_event_buffer.add_metric(metric_event) } - - expect { - metric_event_buffer.flush - }.not_to raise_error - end - - it "logs the error to sdk_logger" do - 3.times { metric_event_buffer.add_metric(metric_event) } - - expect(string_io.string).to include("Failed to send metrics") - end - - it "clears the buffer after a failed send to avoid memory buildup" do - 3.times { metric_event_buffer.add_metric(metric_event) } - - expect(metric_event_buffer).to be_empty - end - end - - context "when background thread encounters an error" do - let(:max_metric_events) { 100 } - - before do - allow(client).to receive(:send_metrics).and_raise(error) - end - - it "keeps the background thread alive after an error" do - metric_event_buffer.add_metric(metric_event) - metric_event_buffer.start - - thread = metric_event_buffer.instance_variable_get(:@thread) - - expect(thread).to be_alive - expect { metric_event_buffer.flush }.not_to raise_error - expect(thread).to be_alive - end - end - end +RSpec.describe Sentry::MetricEventBuffer do + subject { described_class.new(Sentry.configuration, client) } + + include_examples "telemetry event buffer", + event_factory: -> { + Sentry::MetricEvent.new( + name: "test.metric", + type: :counter, + value: 1 + ) + }, + max_items_config: :max_metric_events, + enable_config: :enable_metrics end diff --git a/sentry-ruby/spec/sentry/metrics_spec.rb b/sentry-ruby/spec/sentry/metrics_spec.rb index 248fd628d..7dce798fd 100644 --- a/sentry-ruby/spec/sentry/metrics_spec.rb +++ b/sentry-ruby/spec/sentry/metrics_spec.rb @@ -293,9 +293,12 @@ context "with before_send_metric callback" do it "receives MetricEvent" do - Sentry.configuration.before_send_metric = lambda do |metric| - expect(metric).to be_a(Sentry::MetricEvent) - metric + perform_basic_setup do |config| + config.enable_metrics = true + config.before_send_metric = lambda do |metric| + expect(metric).to be_a(Sentry::MetricEvent) + metric + end end Sentry.metrics.gauge("test.gauge", 42.5, unit: "seconds", attributes: { "foo" => "bar" }) @@ -303,9 +306,12 @@ end it "allows modifying metrics before sending" do - Sentry.configuration.before_send_metric = lambda do |metric| - metric.attributes["modified"] = true - metric + perform_basic_setup do |config| + config.enable_metrics = true + config.before_send_metric = lambda do |metric| + metric.attributes["modified"] = true + metric + end end Sentry.metrics.count("test.counter") @@ -317,8 +323,11 @@ end it "filters out metrics when callback returns nil" do - Sentry.configuration.before_send_metric = lambda do |metric| - metric.name == "test.filtered" ? nil : metric + perform_basic_setup do |config| + config.enable_metrics = true + config.before_send_metric = lambda do |metric| + metric.name == "test.filtered" ? nil : metric + end end Sentry.metrics.count("test.filtered") @@ -329,7 +338,7 @@ expect(sentry_metrics.count).to eq(1) expect(sentry_metrics.first[:name]).to eq("test.allowed") - expect(Sentry.get_current_client.transport).to have_recorded_lost_event(:before_send, 'metric', num: 2) + expect(Sentry.get_current_client.transport).to have_recorded_lost_event(:before_send, 'trace_metric', num: 2) end end end diff --git a/sentry-ruby/spec/support/shared_examples_for_telemetry_event_buffers.rb b/sentry-ruby/spec/support/shared_examples_for_telemetry_event_buffers.rb new file mode 100644 index 000000000..618577ff3 --- /dev/null +++ b/sentry-ruby/spec/support/shared_examples_for_telemetry_event_buffers.rb @@ -0,0 +1,126 @@ +# frozen_string_literal: true + +RSpec.shared_examples "telemetry event buffer" do |event_factory:, max_items_config:, enable_config:| + let(:string_io) { StringIO.new } + let(:logger) { ::Logger.new(string_io) } + let(:client) { double(Sentry::Client) } + let(:event) { event_factory.call } + + before do + perform_basic_setup do |config| + config.sdk_logger = logger + config.background_worker_threads = 0 + config.public_send(:"#{max_items_config}=", max_items) + config.public_send(:"#{enable_config}=", true) + end + + Sentry.background_worker = Sentry::BackgroundWorker.new(Sentry.configuration) + end + + after do + Sentry.background_worker = Class.new { def shutdown; end; }.new + end + + describe "#add_item" do + let(:max_items) { 3 } + + it "does nothing when there are no pending items" do + expect(client).not_to receive(:capture_envelope) + + subject.flush + + expect(sentry_envelopes.size).to be(0) + end + + it "does nothing when the number of items is less than max_items" do + expect(client).to_not receive(:send_envelope) + + 2.times { subject.add_item(event) } + end + + it "auto-flushes pending items to the client when the number of items reaches max_items" do + expect(client).to receive(:send_envelope) + + 3.times { subject.add_item(event) } + + expect(subject).to be_empty + end + end + + describe "multi-threaded access" do + let(:max_items) { 30 } + + it "thread-safely handles concurrent access" do + expect(client).to receive(:send_envelope).exactly(3).times + + threads = 3.times.map do + Thread.new do + (20..30).to_a.sample.times { subject.add_item(event) } + end + end + + threads.each(&:join) + + subject.flush + + expect(subject).to be_empty + end + end + + describe "error handling" do + let(:max_items) { 3 } + + let(:error) { Errno::ECONNREFUSED.new("Connection refused") } + + context "when send_envelope raises an exception" do + before do + allow(client).to receive(:send_envelope).and_raise(error) + end + + it "does not propagate exception from add_item when buffer is full" do + expect { + 3.times { subject.add_item(event) } + }.not_to raise_error + end + + it "does not propagate exception from flush" do + 2.times { subject.add_item(event) } + + expect { + subject.flush + }.not_to raise_error + end + + it "logs the error to sdk_logger" do + 3.times { subject.add_item(event) } + + expect(string_io.string).to include("Failed to send #{event.class}") + end + + it "clears the buffer after a failed send to avoid memory buildup" do + 3.times { subject.add_item(event) } + + expect(subject).to be_empty + end + end + + context "when background thread encounters an error" do + let(:max_items) { 100 } + + before do + allow(client).to receive(:send_envelope).and_raise(error) + end + + it "keeps the background thread alive after an error" do + subject.add_item(event) + subject.start + + thread = subject.instance_variable_get(:@thread) + + expect(thread).to be_alive + expect { subject.flush }.not_to raise_error + expect(thread).to be_alive + end + end + end +end