diff --git a/README.md b/README.md index a77eac57..b2bb05eb 100644 --- a/README.md +++ b/README.md @@ -118,6 +118,7 @@ may have unexpected consequences if applied to other TIMDEX UI apps. - `REQUEST_PERIOD` - time in minutes used along with `REQUESTS_PER_PERIOD` - `REDIRECT_REQUESTS_PER_PERIOD`- number of requests that can be made that the query string starts with our legacy redirect parameter to throttle per `REQUEST_PERIOD` - `REDIRECT_REQUEST_PERIOD`- time in minutes used along with `REDIRECT_REQUEST_PERIOD` +- `RESULTS_PER_PAGE`: The number of results to display per page. Use an even number to avoid peculiarities. Defaults to 20 if unset. - `SENTRY_DSN`: Client key for Sentry exception logging. - `SENTRY_ENV`: Sentry environment for the application. Defaults to 'unknown' if unset. - `TACOS_SOURCE`: If set, this value is sent to TACOS (as the `sourceSystem` value) to distinguish which application diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb index 47e50fcc..7df5893a 100644 --- a/app/controllers/search_controller.rb +++ b/app/controllers/search_controller.rb @@ -65,7 +65,8 @@ def load_geodata_results @errors = extract_errors(response) return unless @errors.nil? - @pagination = Analyzer.new(@enhanced_query, response, :timdex).pagination + hits = response.dig(:data, 'search', 'hits') || 0 + @pagination = Analyzer.new(@enhanced_query, hits, :timdex).pagination raw_results = extract_results(response) @results = NormalizeTimdexResults.new(raw_results, @enhanced_query[:q]).normalize @filters = extract_filters(response) @@ -87,43 +88,58 @@ def load_timdex_results end def load_all_results - # Parallel fetching from both APIs - primo_thread = Thread.new { fetch_primo_data } - timdex_thread = Thread.new { fetch_timdex_data } + # Fetch results from both APIs in parallel + primo_data, timdex_data = fetch_all_data - # Wait for both threads to complete - primo_data = primo_thread.value - timdex_data = timdex_thread.value - - # Collect any errors from either API - all_errors = [] - all_errors.concat(primo_data[:errors]) if primo_data[:errors] - all_errors.concat(timdex_data[:errors]) if timdex_data[:errors] - @errors = all_errors.any? ? all_errors : nil + # Combine errors from both APIs + @errors = combine_errors(primo_data[:errors], timdex_data[:errors]) # Zipper merge results from both APIs - primo_results = primo_data[:results] || [] - timdex_results = timdex_data[:results] || [] - @results = primo_results.zip(timdex_results).flatten.compact + @results = merge_results(primo_data[:results], timdex_data[:results]) - # For now, just use primo pagination as a placeholder - @pagination = primo_data[:pagination] || {} + # Use Analyzer for combined pagination calculation + @pagination = Analyzer.new(@enhanced_query, timdex_data[:hits], :all, + primo_data[:hits]).pagination # Handle primo continuation for high page numbers @show_primo_continuation = primo_data[:show_continuation] || false end + def fetch_all_data + # Parallel fetching from both APIs + primo_thread = Thread.new { fetch_primo_data } + timdex_thread = Thread.new { fetch_timdex_data } + + [primo_thread.value, timdex_thread.value] + end + + def combine_errors(*error_arrays) + all_errors = error_arrays.compact.flatten + all_errors.any? ? all_errors : nil + end + + def merge_results(primo_results, timdex_results) + (primo_results || []).zip(timdex_results || []).flatten.compact + end + def fetch_primo_data current_page = @enhanced_query[:page] || 1 - per_page = @enhanced_query[:per_page] || 20 + per_page = if @active_tab == 'all' + ENV.fetch('RESULTS_PER_PAGE', '20').to_i / 2 + else + ENV.fetch('RESULTS_PER_PAGE', '20').to_i + end offset = (current_page - 1) * per_page # Check if we're beyond Primo API limits before making the request. - return { results: [], pagination: {}, errors: nil, show_continuation: true } if offset >= Analyzer::PRIMO_MAX_OFFSET + if offset >= Analyzer::PRIMO_MAX_OFFSET + return { results: [], pagination: {}, errors: nil, show_continuation: true, hits: 0 } + end - primo_response = query_primo + primo_response = query_primo(per_page, offset) + hits = primo_response.dig('info', 'total') || 0 results = NormalizePrimoResults.new(primo_response, @enhanced_query[:q]).normalize - pagination = Analyzer.new(@enhanced_query, primo_response, :primo).pagination + pagination = Analyzer.new(@enhanced_query, hits , :primo).pagination # Handle empty results from Primo API. Sometimes Primo will return no results at a given offset, # despite claiming in the initial query that more are available. This happens randomly and @@ -142,23 +158,37 @@ def fetch_primo_data end end - { results: results, pagination: pagination, errors: errors, show_continuation: show_continuation } + { results: results, pagination: pagination, errors: errors, show_continuation: show_continuation, + hits: hits } rescue StandardError => e - { results: [], pagination: {}, errors: handle_primo_errors(e), show_continuation: false } + { results: [], pagination: {}, errors: handle_primo_errors(e), show_continuation: false, hits: 0 } end def fetch_timdex_data - query = QueryBuilder.new(@enhanced_query).query + # For all tab, modify query to use half page size + if @active_tab == 'all' + per_page = ENV.fetch('RESULTS_PER_PAGE', '20').to_i / 2 + page = @enhanced_query[:page] || 1 + from_offset = ((page - 1) * per_page).to_s + + query_builder = QueryBuilder.new(@enhanced_query) + query = query_builder.query + query['from'] = from_offset + else + query = QueryBuilder.new(@enhanced_query).query + end + response = query_timdex(query) errors = extract_errors(response) if errors.nil? - pagination = Analyzer.new(@enhanced_query, response, :timdex).pagination + hits = response.dig(:data, 'search', 'hits') || 0 + pagination = Analyzer.new(@enhanced_query, hits, :timdex).pagination raw_results = extract_results(response) results = NormalizeTimdexResults.new(raw_results, @enhanced_query[:q]).normalize - { results: results, pagination: pagination, errors: nil } + { results: results, pagination: pagination, errors: nil, hits: hits } else - { results: [], pagination: {}, errors: errors } + { results: [], pagination: {}, errors: errors, hits: 0 } end end @@ -191,16 +221,12 @@ def query_timdex(query) end end - def query_primo + def query_primo(per_page, offset) # We generate unique cache keys to avoid naming collisions. cache_key = generate_cache_key(@enhanced_query) Rails.cache.fetch("#{cache_key}/primo", expires_in: 12.hours) do primo_search = PrimoSearch.new - per_page = @enhanced_query[:per_page] || 20 - current_page = @enhanced_query[:page] || 1 - offset = (current_page - 1) * per_page - primo_search.search(@enhanced_query[:q], per_page, offset) end end diff --git a/app/models/analyzer.rb b/app/models/analyzer.rb index f5d2a774..758bc0bd 100644 --- a/app/models/analyzer.rb +++ b/app/models/analyzer.rb @@ -1,54 +1,58 @@ class Analyzer attr_accessor :pagination - RESULTS_PER_PAGE = 20 - # Primo API theoretical maximum recommended offset is 2000 records (per Ex Libris documentation) # but in practice, the API often can't deliver results beyond ~960 records for large result sets, # likely due to performance constraints. PRIMO_MAX_OFFSET = 960 - def initialize(enhanced_query, response, source) + # Initializes pagination analysis for search results. + # + # @param enhanced_query [Hash] Query parameters including :page (current page number) + # @param hits [Integer] Number of hits from primary source (TIMDEX for :all, source-specific otherwise) + # @param source [Symbol] Source tab (:primo, :timdex, or :all) + # @param secondary_hits [Integer, nil] Optional hit count from secondary source (Primo hits for :all) + def initialize(enhanced_query, hits, source, secondary_hits = nil) @source = source + @enhanced_query = enhanced_query @pagination = {} - @pagination[:hits] = hits(response) - @pagination[:start] = ((enhanced_query[:page] - 1) * RESULTS_PER_PAGE) + 1 - @pagination[:end] = [enhanced_query[:page] * RESULTS_PER_PAGE, @pagination[:hits]].min - @pagination[:prev] = enhanced_query[:page] - 1 if enhanced_query[:page] > 1 - @pagination[:next] = next_page(enhanced_query[:page], @pagination[:hits]) if next_page( - enhanced_query[:page], @pagination[:hits] - ) - @pagination[:per_page] = RESULTS_PER_PAGE + set_pagination(hits, secondary_hits) end private - def hits(response) - return 0 if response.nil? - - if @source == :primo - primo_hits(response) - elsif @source == :timdex - timdex_hits(response) + # Sets the pagination hash with hit counts and per_page values. + # + # @param hits [Integer] Hit count from primary source + # @param secondary_hits [Integer, nil] Optional hit count from secondary source + def set_pagination(hits, secondary_hits = nil) + if @source == :all + @pagination[:hits] = (secondary_hits || 0) + (hits || 0) + @pagination[:per_page] = ENV.fetch('RESULTS_PER_PAGE', '20').to_i + calculate_pagination_values else - 0 + @pagination[:hits] = hits || 0 + @pagination[:per_page] = ENV.fetch('RESULTS_PER_PAGE', '20').to_i + calculate_pagination_values end end - def primo_hits(response) - return 0 unless response.is_a?(Hash) - - response.dig('info', 'total') || 0 - end - - def timdex_hits(response) - return 0 unless response.is_a?(Hash) && response.key?(:data) && response[:data].key?('search') - return 0 unless response[:data]['search'].is_a?(Hash) && response[:data]['search'].key?('hits') - - response[:data]['search']['hits'] + # Calculates and sets pagination navigation values (start, end, prev, next). + # Uses the already-set @pagination[:hits] and @pagination[:per_page] values. + def calculate_pagination_values + page = @enhanced_query[:page] || 1 + @pagination[:start] = ((page - 1) * @pagination[:per_page]) + 1 + @pagination[:end] = [page * @pagination[:per_page], @pagination[:hits]].min + @pagination[:prev] = page - 1 if page > 1 + @pagination[:next] = next_page(page, @pagination[:hits]) if next_page(page, @pagination[:hits]) end + # Calculates the next page number if more results are available. + # + # @param page [Integer] Current page number + # @param hits [Integer] Total number of results available + # @return [Integer, nil] Next page number or nil if no more pages def next_page(page, hits) - page + 1 if page * RESULTS_PER_PAGE < hits + page + 1 if page * @pagination[:per_page] < hits end end diff --git a/app/models/query_builder.rb b/app/models/query_builder.rb index 4031feb2..91638973 100644 --- a/app/models/query_builder.rb +++ b/app/models/query_builder.rb @@ -1,7 +1,6 @@ class QueryBuilder attr_reader :query - RESULTS_PER_PAGE = 20 QUERY_PARAMS = %w[q citation contributors fundingInformation identifiers locations subjects title booleanType].freeze FILTER_PARAMS = %i[accessToFilesFilter contentTypeFilter contributorsFilter formatFilter languagesFilter literaryFormFilter placesFilter sourceFilter subjectsFilter].freeze @@ -10,7 +9,8 @@ class QueryBuilder def initialize(enhanced_query) @query = {} - @query['from'] = calculate_from(enhanced_query[:page]) + @per_page = ENV.fetch('RESULTS_PER_PAGE', '20').to_i + @query['from'] = calculate_from(enhanced_query[:page], @per_page) if Feature.enabled?(:geodata) @query['geobox'] = 'true' if enhanced_query[:geobox] == 'true' @@ -27,10 +27,10 @@ def initialize(enhanced_query) private - def calculate_from(page = 1) + def calculate_from(page = 1, per_page = ENV.fetch('RESULTS_PER_PAGE', '20').to_i) # This needs to return a string because Timdex needs $from to be a String page = 1 if page.to_i.zero? - ((page - 1) * RESULTS_PER_PAGE).to_s + ((page - 1) * per_page).to_s end def extract_query(enhanced_query) diff --git a/test/controllers/search_controller_test.rb b/test/controllers/search_controller_test.rb index 7a33dd13..47036ae1 100644 --- a/test/controllers/search_controller_test.rb +++ b/test/controllers/search_controller_test.rb @@ -16,7 +16,7 @@ def mock_primo_search_success } mock_primo = mock('primo_search') - mock_primo.expects(:search).returns({ 'docs' => [sample_doc], 'total' => 1 }) + mock_primo.expects(:search).returns({ 'docs' => [sample_doc], 'info' => { 'total' => 1 } }) PrimoSearch.expects(:new).returns(mock_primo) mock_normalizer = mock('normalizer') @@ -24,6 +24,29 @@ def mock_primo_search_success NormalizePrimoResults.expects(:new).returns(mock_normalizer) end + def mock_primo_search_with_hits(total_hits) + sample_docs = (1..10).map do |i| + { + title: "Sample Primo Document Title #{i}", + format: 'Article', + year: '2025', + creators: [{ value: "Author #{i}", link: nil }], + links: [{ 'kind' => 'full record', 'url' => "https://example.com/record#{i}" }] + } + end + + mock_primo = mock('primo_search') + mock_primo.expects(:search).returns({ + 'docs' => sample_docs, + 'info' => { 'total' => total_hits } + }) + PrimoSearch.expects(:new).returns(mock_primo) + + mock_normalizer = mock('normalizer') + mock_normalizer.expects(:normalize).returns(sample_docs) + NormalizePrimoResults.expects(:new).returns(mock_normalizer) + end + def mock_timdex_search_success # Mock the TIMDEX GraphQL client to avoid external API calls sample_result = { @@ -68,6 +91,50 @@ def mock_timdex_search_success TimdexBase::Client.expects(:query).returns(mock_response) end + def mock_timdex_search_with_hits(total_hits) + sample_results = (1..10).map do |i| + { + 'title' => "Sample TIMDEX Document Title #{i}", + 'timdexRecordId' => "sample-record-#{i}", + 'contentType' => [{ 'value' => 'Article' }], + 'dates' => [{ 'kind' => 'Publication date', 'value' => '2023' }], + 'contributors' => [{ 'value' => "Creator #{i}", 'kind' => 'Creator' }], + 'sourceLink' => "https://example.com/record#{i}" + } + end + + mock_response = mock('timdex_response') + mock_errors = mock('timdex_errors') + mock_errors.stubs(:details).returns({}) + mock_errors.stubs(:to_h).returns({}) + mock_response.stubs(:errors).returns(mock_errors) + + mock_data = mock('timdex_data') + mock_search = mock('timdex_search') + mock_search.stubs(:to_h).returns({ + 'hits' => total_hits, + 'aggregations' => {}, + 'records' => sample_results + }) + mock_data.stubs(:search).returns(mock_search) + mock_data.stubs(:to_h).returns({ + 'search' => { + 'hits' => total_hits, + 'aggregations' => {}, + 'records' => sample_results + } + }) + mock_response.stubs(:data).returns(mock_data) + + TimdexBase::Client.expects(:query).returns(mock_response) + + # Mock the results normalization + normalized_results = sample_results.map { |result| result.merge({ source: 'TIMDEX' }) } + mock_normalizer = mock('normalizer') + mock_normalizer.expects(:normalize).returns(normalized_results) + NormalizeTimdexResults.expects(:new).returns(mock_normalizer) + end + test 'index shows basic search form by default' do get '/' assert_response :success @@ -637,11 +704,11 @@ def source_filter_count(controller) end test 'results uses simplified search summary for USE app' do - mock_primo_search_success + mock_primo_search_with_hits(10) get '/results?q=test&tab=primo' assert_response :success - assert_select '.results-context', text: /0 results/ + assert_select '.results-context', text: /10 results/ assert_select '.results-context-description', count: 1 assert_select '.results-context-description', text: /From all MIT Libraries sources/ end @@ -782,6 +849,15 @@ def source_filter_count(controller) test 'all tab shows primo continuation when page exceeds API offset limit' do mock_timdex_search_success + # Mock Primo API to return empty results for high page number (beyond offset limit) + mock_primo = mock('primo_search') + mock_primo.expects(:search).returns({ 'docs' => [], 'info' => { 'total' => 1000 } }) + PrimoSearch.expects(:new).returns(mock_primo) + + mock_normalizer = mock('normalizer') + mock_normalizer.expects(:normalize).returns([]) + NormalizePrimoResults.expects(:new).returns(mock_normalizer) + get '/results?q=test&tab=all&page=49' assert_response :success @@ -790,4 +866,41 @@ def source_filter_count(controller) assert_select '.primo-continuation h2', text: /Continue your search in Search Our Collections/ assert_select '.primo-continuation a[href*="primo.exlibrisgroup.com"]', count: 1 end + + test 'all tab pagination displays combined hit counts' do + mock_primo_search_with_hits(500) + mock_timdex_search_with_hits(300) + + get '/results?q=test&tab=all' + assert_response :success + + # Should show pagination with combined hit counts (500 + 300 = 800) + assert_select '.pagination-container' + assert_select '.pagination-container .current', text: /1 - 20 of 800/ + end + + test 'all tab pagination includes next page link when more results available' do + mock_primo_search_with_hits(500) + mock_timdex_search_with_hits(300) + + get '/results?q=test&tab=all' + assert_response :success + + # Should show next page link when there are more than 20 total results + assert_select '.pagination-container .next a[href*="page=2"]' + end + + test 'all tab pagination on page 2 includes previous page link' do + mock_primo_search_with_hits(500) + mock_timdex_search_with_hits(300) + + get '/results?q=test&tab=all&page=2' + assert_response :success + + # Should show previous page link + assert_select '.pagination-container .previous a[href*="page=1"]' + + # Should show current range (21-40 for page 2) + assert_select '.pagination-container .current', text: /21 - 40 of 800/ + end end diff --git a/test/models/analyzer_test.rb b/test/models/analyzer_test.rb index a4f46cea..74140ab5 100644 --- a/test/models/analyzer_test.rb +++ b/test/models/analyzer_test.rb @@ -3,15 +3,12 @@ class AnalyzerTest < ActiveSupport::TestCase test 'analyzer pagination does not include previous page value on first page of results' do hit_count = 95 - Analyzer.any_instance.stubs(:hits).returns(hit_count) - mocking_hits_so_this_is_empty = {} - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, mocking_hits_so_this_is_empty, :timdex).pagination + pagination = Analyzer.new(eq, hit_count, :timdex).pagination assert pagination.key?(:hits) assert pagination.key?(:start) @@ -26,15 +23,12 @@ class AnalyzerTest < ActiveSupport::TestCase test 'analyzer pagination includes all values when not on first or last page of results' do hit_count = 95 - Analyzer.any_instance.stubs(:hits).returns(hit_count) - mocking_hits_so_this_is_empty = {} - eq = { q: 'data', page: 2 } - pagination = Analyzer.new(eq, mocking_hits_so_this_is_empty, :timdex).pagination + pagination = Analyzer.new(eq, hit_count, :timdex).pagination assert pagination.key?(:hits) assert pagination.key?(:start) @@ -48,15 +42,13 @@ class AnalyzerTest < ActiveSupport::TestCase test 'analyzer pagination does not include last page value on last page of results' do hit_count = 95 - Analyzer.any_instance.stubs(:hits).returns(hit_count) - mocking_hits_so_this_is_empty = {} eq = { q: 'data', page: 5 } - pagination = Analyzer.new(eq, mocking_hits_so_this_is_empty, :timdex).pagination + pagination = Analyzer.new(eq, hit_count, :timdex).pagination assert pagination.key?(:hits) assert pagination.key?(:start) @@ -70,18 +62,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer works with primo response format' do - primo_response = { - 'info' => { - 'total' => 45 - } - } - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, primo_response, :primo).pagination + pagination = Analyzer.new(eq, 45, :primo).pagination assert_equal 45, pagination[:hits] assert_equal 1, pagination[:start] @@ -91,20 +77,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer works with timdex response format' do - timdex_response = { - data: { - 'search' => { - 'hits' => 75 - } - } - } - eq = { q: 'data', page: 2 } - pagination = Analyzer.new(eq, timdex_response, :timdex).pagination + pagination = Analyzer.new(eq, 75, :timdex).pagination assert_equal 75, pagination[:hits] assert_equal 21, pagination[:start] @@ -114,16 +92,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer handles missing primo total gracefully' do - primo_response = { - 'info' => {} - } - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, primo_response, :primo).pagination + pagination = Analyzer.new(eq, 0, :primo).pagination assert_equal 0, pagination[:hits] assert_equal 1, pagination[:start] @@ -133,18 +107,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer extracts large hit counts from primo responses' do - primo_response = { - 'info' => { - 'total' => 68_644_281 # Real-world example - } - } - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, primo_response, :primo).pagination + pagination = Analyzer.new(eq, 68_644_281, :primo).pagination # Should show the actual hit count from the API response assert_equal 68_644_281, pagination[:hits] @@ -155,20 +123,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer extracts large hit counts from timdex responses' do - timdex_response = { - data: { - 'search' => { - 'hits' => 68_644_281 # Same large number as Primo example - } - } - } - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, timdex_response, :timdex).pagination + pagination = Analyzer.new(eq, 68_644_281, :timdex).pagination # Should show the actual hit count from the API response assert_equal 68_644_281, pagination[:hits] @@ -179,14 +139,12 @@ class AnalyzerTest < ActiveSupport::TestCase end test 'analyzer handles unknown source types gracefully' do - response = { 'some' => 'data' } - eq = { q: 'data', page: 1 } - pagination = Analyzer.new(eq, response, :unknown_source).pagination + pagination = Analyzer.new(eq, 0, :unknown_source).pagination # Should default to 0 hits for unknown source types assert_equal 0, pagination[:hits] @@ -195,4 +153,169 @@ class AnalyzerTest < ActiveSupport::TestCase refute pagination.key?(:next) refute pagination.key?(:prev) end + + test 'analyzer combines hit counts for all tab with both API responses' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 250, :all, 150).pagination + + # Should combine hits from both APIs: 150 + 250 = 400 + assert_equal 400, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 20, pagination[:end] + assert_equal 2, pagination[:next] + refute pagination.key?(:prev) + end + + test 'analyzer handles nil responses for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, nil, :all, nil).pagination + + assert_equal 0, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 0, pagination[:end] + refute pagination.key?(:next) + refute pagination.key?(:prev) + end + + test 'analyzer calculates pagination correctly for all tab on page 2' do + eq = { + q: 'data', + page: 2 + } + + pagination = Analyzer.new(eq, 500, :all, 300).pagination + + # Should combine hits: 300 + 500 = 800 + # Page 2 should show results 21-40 of 800 + assert_equal 800, pagination[:hits] + assert_equal 21, pagination[:start] + assert_equal 40, pagination[:end] + assert_equal 1, pagination[:prev] + assert_equal 3, pagination[:next] + end + + test 'analyzer handles unbalanced API results for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 5, :all, 10_000).pagination + + # Should still combine hits and calculate pagination as expected + assert_equal 10_005, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 20, pagination[:end] + assert_equal 2, pagination[:next] + refute pagination.key?(:prev) + end + + test 'analyzer handles first API returning zero results for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 0, :all, 150).pagination + + assert_equal 150, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 20, pagination[:end] + assert_equal 2, pagination[:next] + refute pagination.key?(:prev) + end + + test 'analyzer handles second API returning zero results for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 150, :all, 0).pagination + + assert_equal 150, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 20, pagination[:end] + assert_equal 2, pagination[:next] + refute pagination.key?(:prev) + end + + test 'analyzer handles missing second API response for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 100, :all).pagination + + assert_equal 100, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 20, pagination[:end] + assert_equal 2, pagination[:next] + refute pagination.key?(:prev) + end + + test 'analyzer handles both APIs returning zero results for all tab' do + eq = { + q: 'data', + page: 1 + } + + pagination = Analyzer.new(eq, 0, :all, 0).pagination + + assert_equal 0, pagination[:hits] + assert_equal 1, pagination[:start] + assert_equal 0, pagination[:end] + refute pagination.key?(:next) + refute pagination.key?(:prev) + end + + test 'analyzer handles very large combined hit counts for all tab' do + eq = { + q: 'data', + page: 500 + } + + pagination = Analyzer.new(eq, 75_000_000, :all, 25_000_000).pagination + + assert_equal 100_000_000, pagination[:hits] + assert_equal 9981, pagination[:start] # (500-1) * 20 + 1 + assert_equal 10_000, pagination[:end] # 500 * 20 + assert_equal 499, pagination[:prev] + assert_equal 501, pagination[:next] + end + + test 'analyzer respects RESULTS_PER_PAGE environment variable' do + eq = { + q: 'data', + page: 2 + } + + pagination = Analyzer.new(eq, 100, :timdex).pagination + assert_equal 20, pagination[:per_page] + assert_equal 21, pagination[:start] # (2-1) * 20 + 1 + assert_equal 40, pagination[:end] # 2 * 20 + + ClimateControl.modify RESULTS_PER_PAGE: '10' do + pagination = Analyzer.new(eq, 100, :timdex).pagination + assert_equal 10, pagination[:per_page] + assert_equal 11, pagination[:start] # (2-1) * 10 + 1 + assert_equal 20, pagination[:end] # 2 * 10 + end + + ClimateControl.modify RESULTS_PER_PAGE: '50' do + pagination = Analyzer.new(eq, 200, :timdex).pagination + assert_equal 50, pagination[:per_page] + assert_equal 51, pagination[:start] # (2-1) * 50 + 1 + assert_equal 100, pagination[:end] # 2 * 50 + end + end end