Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 27 additions & 2 deletions .simplecov
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,30 @@ SimpleCov.configure do
enable_coverage :branch
primary_coverage :branch

# Enable nocov comments to exclude lines from coverage
nocov_token "nocov"

# Use database name in command_name for proper merging across database runs
command_name "#{ENV.fetch('TARGET_DB', 'sqlite')}-tests"

# Focus on the gem's code, not the test dummy app or other non-gem files
add_filter "/test/"
add_filter "/gemfiles/"
add_filter "/.github/"
add_filter "/bin/"

# Ignore gem scaffolding base classes (inherited by users, contain no logic)
add_filter %r{app/mailers/acidic_job/application_mailer\.rb}
add_filter %r{app/jobs/acidic_job/application_job\.rb}
add_filter %r{app/controllers/acidic_job/application_controller\.rb}

# Ignore trivial files
add_filter "version.rb"
add_filter "Rakefile"

# Ignore test utility module (testing test utilities is meta)
add_filter "testing.rb"

# Group the gem's code
add_group "Library", "lib/"
add_group "App", "app/"
Expand All @@ -31,8 +49,15 @@ SimpleCov.configure do

# Minimum coverage thresholds - fail CI if coverage drops below these
# Only enforce when running the full test suite (not during db:prepare, etc.)
#
# Rationale for thresholds:
# - 95% line / 80% branch overall: High bar to catch regressions while allowing
# some leeway for legitimately untestable code (marked with :nocov:)
# - 80% line per-file: Ensures no single file is significantly under-tested
# - 0% branch per-file: Branch coverage varies widely by file complexity;
# enforcing at the global level is sufficient
if ENV["COVERAGE_CHECK"]
minimum_coverage line: 70, branch: 40
minimum_coverage_by_file line: 0, branch: 0
minimum_coverage line: 95, branch: 80
minimum_coverage_by_file line: 80, branch: 0
end
end
2 changes: 2 additions & 0 deletions lib/acidic_job/context.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def set(hash)
AcidicJob::Value.upsert_all(records, unique_by: [ :execution_id, :key ])
when :mysql2, :mysql, :trilogy
AcidicJob::Value.upsert_all(records)
# :nocov:
else
# Fallback for other adapters - try with unique_by first, fall back without
begin
Expand All @@ -32,6 +33,7 @@ def set(hash)
raise
end
end
# :nocov:
end
end
end
Expand Down
4 changes: 4 additions & 0 deletions lib/acidic_job/engine.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,13 @@ class Engine < ::Rails::Engine

config.acidic_job = ActiveSupport::OrderedOptions.new

# :nocov:
initializer "acidic_job.config" do
config.acidic_job.each do |name, value|
AcidicJob.public_send("#{name}=", value)
end
end
# :nocov:

initializer "acidic_job.logger" do
ActiveSupport.on_load :acidic_job do
Expand All @@ -33,11 +35,13 @@ class Engine < ::Rails::Engine
Serializers::JobSerializer.instance
]

# :nocov:
# Rails 7.1+ includes a RangeSerializer, so only add ours for older versions
unless defined?(ActiveJob::Serializers::RangeSerializer)
require_relative "serializers/range_serializer"
serializers << Serializers::RangeSerializer.instance
end
# :nocov:

ActiveJob::Serializers.add_serializers(*serializers)
end
Expand Down
65 changes: 65 additions & 0 deletions test/acidic_job/arguments_test.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# frozen_string_literal: true

require "test_helper"

class AcidicJob::ArgumentsTest < ActiveSupport::TestCase
# The GlobalID key used by ActiveJob for serialization
GLOBALID_KEY = "_aj_globalid"

# ============================================
# deserialize_global_id
# ============================================

test "deserialize_global_id locates existing record" do
thing = Thing.create!
gid_hash = { GLOBALID_KEY => thing.to_global_id.to_s }

result = AcidicJob::Arguments.deserialize_global_id(gid_hash)

assert_equal thing, result
end

test "deserialize_global_id returns nil for deleted record" do
thing = Thing.create!
gid_hash = { GLOBALID_KEY => thing.to_global_id.to_s }
thing.destroy!

result = AcidicJob::Arguments.deserialize_global_id(gid_hash)

assert_nil result
end

test "deserialize_global_id returns nil for non-existent record ID" do
# Create a GlobalID for a record that doesn't exist
gid_hash = { GLOBALID_KEY => "gid://dummy/Thing/999999" }

result = AcidicJob::Arguments.deserialize_global_id(gid_hash)

assert_nil result
end

# ============================================
# convert_to_global_id_hash
# ============================================

test "convert_to_global_id_hash returns GlobalID hash for persisted record" do
thing = Thing.create!

result = AcidicJob::Arguments.convert_to_global_id_hash(thing)

assert_kind_of Hash, result
assert result.key?(GLOBALID_KEY)
assert_match(/gid:\/\/.*\/Thing\/#{thing.id}/, result[GLOBALID_KEY])
end

test "convert_to_global_id_hash falls back to ActiveJob serializer for new record" do
new_thing = Thing.new # not persisted, no ID

result = AcidicJob::Arguments.convert_to_global_id_hash(new_thing)

# Should use ActiveJob::Serializers.serialize which uses our NewRecordSerializer
assert_kind_of Hash, result
# The result should have some serialization key (exact key depends on serializer)
assert result.key?("_aj_serialized") || result.key?(GLOBALID_KEY)
end
end
216 changes: 216 additions & 0 deletions test/acidic_job/context_test.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
# frozen_string_literal: true

require "test_helper"

class AcidicJob::ContextTest < ActiveSupport::TestCase
def create_execution
serialized_job = {
"job_class" => "TestJob",
"job_id" => SecureRandom.uuid,
"arguments" => []
}
definition = {
"meta" => { "version" => AcidicJob::VERSION },
"steps" => {
"step_1" => { "does" => "step_1", "then" => AcidicJob::FINISHED_RECOVERY_POINT }
}
}
AcidicJob::Execution.create!(
idempotency_key: SecureRandom.hex(32),
serialized_job: serialized_job,
definition: definition,
recover_to: "step_1"
)
end

# ============================================
# set
# ============================================

test "set stores a single key-value pair" do
execution = create_execution
context = AcidicJob::Context.new(execution)

context.set(foo: "bar")

assert_equal 1, execution.values.count
assert_equal "bar", execution.values.find_by(key: "foo").value
end

test "set stores multiple key-value pairs" do
execution = create_execution
context = AcidicJob::Context.new(execution)

context.set(foo: "bar", baz: 123, qux: [ 1, 2, 3 ])

assert_equal 3, execution.values.count
assert_equal "bar", execution.values.find_by(key: "foo").value
assert_equal 123, execution.values.find_by(key: "baz").value
assert_equal [ 1, 2, 3 ], execution.values.find_by(key: "qux").value
end

test "set upserts existing keys" do
execution = create_execution
context = AcidicJob::Context.new(execution)

context.set(foo: "original")
context.set(foo: "updated")

assert_equal 1, execution.values.count
assert_equal "updated", execution.values.find_by(key: "foo").value
end

# ============================================
# get
# ============================================

test "get retrieves a single value" do
execution = create_execution
context = AcidicJob::Context.new(execution)
context.set(foo: "bar")

result = context.get(:foo)

assert_equal [ "bar" ], result
end

test "get retrieves multiple values" do
execution = create_execution
context = AcidicJob::Context.new(execution)
context.set(foo: "bar", baz: 123)

result = context.get(:foo, :baz)

# Order is not guaranteed, so check both values are present
assert_equal 2, result.size
assert_includes result, "bar"
assert_includes result, 123
end

test "get returns empty array for non-existent key" do
execution = create_execution
context = AcidicJob::Context.new(execution)

result = context.get(:nonexistent)

assert_equal [], result
end

# ============================================
# fetch
# ============================================

test "fetch returns existing value" do
execution = create_execution
context = AcidicJob::Context.new(execution)
context.set(foo: "existing")

result = context.fetch(:foo, "default")

assert_equal "existing", result
end

test "fetch uses default when key does not exist" do
execution = create_execution
context = AcidicJob::Context.new(execution)

result = context.fetch(:foo, "default")

assert_equal "default", result
# Should also store the default
assert_equal "default", execution.values.find_by(key: "foo").value
end

test "fetch uses block when key does not exist and no default" do
execution = create_execution
context = AcidicJob::Context.new(execution)

result = context.fetch(:foo) { |key| "computed_#{key}" }

assert_equal "computed_foo", result
assert_equal "computed_foo", execution.values.find_by(key: "foo").value
end

# ============================================
# []= and []
# ============================================

test "[]= sets a value" do
execution = create_execution
context = AcidicJob::Context.new(execution)

context[:foo] = "bar"

assert_equal "bar", execution.values.find_by(key: "foo").value
end

test "[] gets a value" do
execution = create_execution
context = AcidicJob::Context.new(execution)
context.set(foo: "bar")

result = context[:foo]

assert_equal "bar", result
end

test "[] returns nil for non-existent key" do
execution = create_execution
context = AcidicJob::Context.new(execution)

result = context[:nonexistent]

assert_nil result
end

# ============================================
# Integration with workflow
# ============================================

# This test verifies that workflow context values persist across job retries.
#
# How it works:
# 1. First execution (executions=1): set_context stores attempt=1, then raises DefaultsError
# 2. retry_on triggers a retry, incrementing the job's `executions` counter to 2
# 3. Second execution (executions=2): set_context stores attempt=2 (overwriting), completes successfully
# 4. read_context runs and logs the final context values
#
# The assertion checks that attempt=2 because set_context ran twice (once per execution),
# each time storing the current `executions` value. The nested data persists unchanged
# since it was set identically in both executions.
test "context persists across job retries" do
class ContextRetryJob < ActiveJob::Base
include AcidicJob::Workflow

retry_on DefaultsError

def perform
execute_workflow(unique_by: job_id) do |w|
w.step :set_context
w.step :read_context
end
end

def set_context
ctx[:attempt] = executions
ctx[:data] = { nested: "value" }
raise DefaultsError if executions == 1
end

def read_context
ChaoticJob.log_to_journal!({
"attempt" => ctx[:attempt],
"data" => ctx[:data]
})
end
end

ContextRetryJob.perform_later
perform_all_jobs

entry = ChaoticJob.top_journal_entry
# After retry, attempt=2 because set_context ran twice, storing executions each time
assert_equal 2, entry["attempt"]
assert_equal "value", entry["data"][:nested]
end
end
Loading