@@ -65,7 +65,8 @@ def load_geodata_results
6565 @errors = extract_errors ( response )
6666 return unless @errors . nil?
6767
68- @pagination = Analyzer . new ( @enhanced_query , response , :timdex ) . pagination
68+ hits = response . dig ( :data , 'search' , 'hits' ) || 0
69+ @pagination = Analyzer . new ( @enhanced_query , hits , :timdex ) . pagination
6970 raw_results = extract_results ( response )
7071 @results = NormalizeTimdexResults . new ( raw_results , @enhanced_query [ :q ] ) . normalize
7172 @filters = extract_filters ( response )
@@ -87,43 +88,58 @@ def load_timdex_results
8788 end
8889
8990 def load_all_results
90- # Parallel fetching from both APIs
91- primo_thread = Thread . new { fetch_primo_data }
92- timdex_thread = Thread . new { fetch_timdex_data }
91+ # Fetch results from both APIs in parallel
92+ primo_data , timdex_data = fetch_all_data
9393
94- # Wait for both threads to complete
95- primo_data = primo_thread . value
96- timdex_data = timdex_thread . value
97-
98- # Collect any errors from either API
99- all_errors = [ ]
100- all_errors . concat ( primo_data [ :errors ] ) if primo_data [ :errors ]
101- all_errors . concat ( timdex_data [ :errors ] ) if timdex_data [ :errors ]
102- @errors = all_errors . any? ? all_errors : nil
94+ # Combine errors from both APIs
95+ @errors = combine_errors ( primo_data [ :errors ] , timdex_data [ :errors ] )
10396
10497 # Zipper merge results from both APIs
105- primo_results = primo_data [ :results ] || [ ]
106- timdex_results = timdex_data [ :results ] || [ ]
107- @results = primo_results . zip ( timdex_results ) . flatten . compact
98+ @results = merge_results ( primo_data [ :results ] , timdex_data [ :results ] )
10899
109- # For now, just use primo pagination as a placeholder
110- @pagination = primo_data [ :pagination ] || { }
100+ # Use Analyzer for combined pagination calculation
101+ @pagination = Analyzer . new ( @enhanced_query , timdex_data [ :hits ] , :all ,
102+ primo_data [ :hits ] ) . pagination
111103
112104 # Handle primo continuation for high page numbers
113105 @show_primo_continuation = primo_data [ :show_continuation ] || false
114106 end
115107
108+ def fetch_all_data
109+ # Parallel fetching from both APIs
110+ primo_thread = Thread . new { fetch_primo_data }
111+ timdex_thread = Thread . new { fetch_timdex_data }
112+
113+ [ primo_thread . value , timdex_thread . value ]
114+ end
115+
116+ def combine_errors ( *error_arrays )
117+ all_errors = error_arrays . compact . flatten
118+ all_errors . any? ? all_errors : nil
119+ end
120+
121+ def merge_results ( primo_results , timdex_results )
122+ ( primo_results || [ ] ) . zip ( timdex_results || [ ] ) . flatten . compact
123+ end
124+
116125 def fetch_primo_data
117126 current_page = @enhanced_query [ :page ] || 1
118- per_page = @enhanced_query [ :per_page ] || 20
127+ per_page = if @active_tab == 'all'
128+ ENV . fetch ( 'RESULTS_PER_PAGE' , '20' ) . to_i / 2
129+ else
130+ ENV . fetch ( 'RESULTS_PER_PAGE' , '20' ) . to_i
131+ end
119132 offset = ( current_page - 1 ) * per_page
120133
121134 # Check if we're beyond Primo API limits before making the request.
122- return { results : [ ] , pagination : { } , errors : nil , show_continuation : true } if offset >= Analyzer ::PRIMO_MAX_OFFSET
135+ if offset >= Analyzer ::PRIMO_MAX_OFFSET
136+ return { results : [ ] , pagination : { } , errors : nil , show_continuation : true , hits : 0 }
137+ end
123138
124- primo_response = query_primo
139+ primo_response = query_primo ( per_page , offset )
140+ hits = primo_response . dig ( 'info' , 'total' ) || 0
125141 results = NormalizePrimoResults . new ( primo_response , @enhanced_query [ :q ] ) . normalize
126- pagination = Analyzer . new ( @enhanced_query , primo_response , :primo ) . pagination
142+ pagination = Analyzer . new ( @enhanced_query , hits , :primo ) . pagination
127143
128144 # Handle empty results from Primo API. Sometimes Primo will return no results at a given offset,
129145 # despite claiming in the initial query that more are available. This happens randomly and
@@ -142,23 +158,37 @@ def fetch_primo_data
142158 end
143159 end
144160
145- { results : results , pagination : pagination , errors : errors , show_continuation : show_continuation }
161+ { results : results , pagination : pagination , errors : errors , show_continuation : show_continuation ,
162+ hits : hits }
146163 rescue StandardError => e
147- { results : [ ] , pagination : { } , errors : handle_primo_errors ( e ) , show_continuation : false }
164+ { results : [ ] , pagination : { } , errors : handle_primo_errors ( e ) , show_continuation : false , hits : 0 }
148165 end
149166
150167 def fetch_timdex_data
151- query = QueryBuilder . new ( @enhanced_query ) . query
168+ # For all tab, modify query to use half page size
169+ if @active_tab == 'all'
170+ per_page = ENV . fetch ( 'RESULTS_PER_PAGE' , '20' ) . to_i / 2
171+ page = @enhanced_query [ :page ] || 1
172+ from_offset = ( ( page - 1 ) * per_page ) . to_s
173+
174+ query_builder = QueryBuilder . new ( @enhanced_query )
175+ query = query_builder . query
176+ query [ 'from' ] = from_offset
177+ else
178+ query = QueryBuilder . new ( @enhanced_query ) . query
179+ end
180+
152181 response = query_timdex ( query )
153182 errors = extract_errors ( response )
154183
155184 if errors . nil?
156- pagination = Analyzer . new ( @enhanced_query , response , :timdex ) . pagination
185+ hits = response . dig ( :data , 'search' , 'hits' ) || 0
186+ pagination = Analyzer . new ( @enhanced_query , hits , :timdex ) . pagination
157187 raw_results = extract_results ( response )
158188 results = NormalizeTimdexResults . new ( raw_results , @enhanced_query [ :q ] ) . normalize
159- { results : results , pagination : pagination , errors : nil }
189+ { results : results , pagination : pagination , errors : nil , hits : hits }
160190 else
161- { results : [ ] , pagination : { } , errors : errors }
191+ { results : [ ] , pagination : { } , errors : errors , hits : 0 }
162192 end
163193 end
164194
@@ -191,16 +221,12 @@ def query_timdex(query)
191221 end
192222 end
193223
194- def query_primo
224+ def query_primo ( per_page , offset )
195225 # We generate unique cache keys to avoid naming collisions.
196226 cache_key = generate_cache_key ( @enhanced_query )
197227
198228 Rails . cache . fetch ( "#{ cache_key } /primo" , expires_in : 12 . hours ) do
199229 primo_search = PrimoSearch . new
200- per_page = @enhanced_query [ :per_page ] || 20
201- current_page = @enhanced_query [ :page ] || 1
202- offset = ( current_page - 1 ) * per_page
203-
204230 primo_search . search ( @enhanced_query [ :q ] , per_page , offset )
205231 end
206232 end
0 commit comments