@@ -17,15 +17,21 @@ def results
1717
1818 @enhanced_query = Enhancer . new ( params ) . enhanced_query
1919
20+ # Load GeoData results if applicable
21+ if Feature . enabled? ( :geodata )
22+ load_geodata_results
23+ render 'results_geo'
24+ return
25+ end
26+
2027 # Route to appropriate search based on active tab
2128 case @active_tab
2229 when 'primo'
2330 load_primo_results
2431 when 'timdex'
2532 load_timdex_results
26- when 'geodata'
27- load_gdt_results
28- render 'results_geo'
33+ when 'all'
34+ load_all_results
2935 end
3036 end
3137
@@ -50,7 +56,7 @@ def sleep_if_too_fast
5056 sleep ( 1 - duration )
5157 end
5258
53- def load_gdt_results
59+ def load_geodata_results
5460 query = QueryBuilder . new ( @enhanced_query ) . query
5561
5662 response = query_timdex ( query )
@@ -66,49 +72,96 @@ def load_gdt_results
6672 end
6773
6874 def load_primo_results
75+ data = fetch_primo_data
76+ @results = data [ :results ]
77+ @pagination = data [ :pagination ]
78+ @errors = data [ :errors ]
79+ @show_primo_continuation = data [ :show_continuation ]
80+ end
81+
82+ def load_timdex_results
83+ data = fetch_timdex_data
84+ @results = data [ :results ]
85+ @pagination = data [ :pagination ]
86+ @errors = data [ :errors ]
87+ end
88+
89+ def load_all_results
90+ # Parallel fetching from both APIs
91+ primo_thread = Thread . new { fetch_primo_data }
92+ timdex_thread = Thread . new { fetch_timdex_data }
93+
94+ # Wait for both threads to complete
95+ primo_data = primo_thread . value
96+ timdex_data = timdex_thread . value
97+
98+ # Collect any errors from either API
99+ all_errors = [ ]
100+ all_errors . concat ( primo_data [ :errors ] ) if primo_data [ :errors ]
101+ all_errors . concat ( timdex_data [ :errors ] ) if timdex_data [ :errors ]
102+ @errors = all_errors . any? ? all_errors : nil
103+
104+ # Zipper merge results from both APIs
105+ primo_results = primo_data [ :results ] || [ ]
106+ timdex_results = timdex_data [ :results ] || [ ]
107+ @results = primo_results . zip ( timdex_results ) . flatten . compact
108+
109+ # For now, just use primo pagination as a placeholder
110+ @pagination = primo_data [ :pagination ] || { }
111+
112+ # Handle primo continuation for high page numbers
113+ @show_primo_continuation = primo_data [ :show_continuation ] || false
114+ end
115+
116+ def fetch_primo_data
69117 current_page = @enhanced_query [ :page ] || 1
70118 per_page = @enhanced_query [ :per_page ] || 20
71119 offset = ( current_page - 1 ) * per_page
72120
73121 # Check if we're beyond Primo API limits before making the request.
74122 if offset >= Analyzer ::PRIMO_MAX_OFFSET
75- @show_primo_continuation = true
76- return
123+ return { results : [ ] , pagination : { } , errors : nil , show_continuation : true }
77124 end
78125
79126 primo_response = query_primo
80- @results = NormalizePrimoResults . new ( primo_response , @enhanced_query [ :q ] ) . normalize
127+ results = NormalizePrimoResults . new ( primo_response , @enhanced_query [ :q ] ) . normalize
128+ pagination = Analyzer . new ( @enhanced_query , primo_response , :primo ) . pagination
81129
82130 # Handle empty results from Primo API. Sometimes Primo will return no results at a given offset,
83131 # despite claiming in the initial query that more are available. This happens randomly and
84132 # seemingly for no reason (well below the recommended offset of 2,000). While the bug also
85133 # exists in Primo UI, sending users there seems like the best we can do.
86- if @results . empty?
134+ show_continuation = false
135+ errors = nil
136+
137+ if results . empty?
87138 docs = primo_response [ 'docs' ] if primo_response . is_a? ( Hash )
88139 if docs . nil? || docs . empty?
89140 # Only show continuation for pagination scenarios (page > 1), not for searches with no results
90- @show_primo_continuation = true if current_page > 1
141+ show_continuation = true if current_page > 1
91142 else
92- @ errors = [ { 'message' => 'No more results available at this page number.' } ]
143+ errors = [ { 'message' => 'No more results available at this page number.' } ]
93144 end
94145 end
95146
96- # Use Analyzer for consistent pagination across all search types
97- @pagination = Analyzer . new ( @enhanced_query , primo_response , :primo ) . pagination
147+ { results : results , pagination : pagination , errors : errors , show_continuation : show_continuation }
98148 rescue StandardError => e
99- @errors = handle_primo_errors ( e )
149+ { results : [ ] , pagination : { } , errors : handle_primo_errors ( e ) , show_continuation : false }
100150 end
101151
102- def load_timdex_results
152+ def fetch_timdex_data
103153 query = QueryBuilder . new ( @enhanced_query ) . query
104154 response = query_timdex ( query )
105-
106- @errors = extract_errors ( response )
107- return unless @errors . nil?
108-
109- @pagination = Analyzer . new ( @enhanced_query , response , :timdex ) . pagination
110- raw_results = extract_results ( response )
111- @results = NormalizeTimdexResults . new ( raw_results , @enhanced_query [ :q ] ) . normalize
155+ errors = extract_errors ( response )
156+
157+ if errors . nil?
158+ pagination = Analyzer . new ( @enhanced_query , response , :timdex ) . pagination
159+ raw_results = extract_results ( response )
160+ results = NormalizeTimdexResults . new ( raw_results , @enhanced_query [ :q ] ) . normalize
161+ { results : results , pagination : pagination , errors : nil }
162+ else
163+ { results : [ ] , pagination : { } , errors : errors }
164+ end
112165 end
113166
114167 def active_filters
@@ -121,9 +174,9 @@ def query_timdex(query)
121174
122175 # Builder hands off to wrapper which returns raw results here.
123176 Rails . cache . fetch ( "#{ cache_key } /#{ @active_tab } " , expires_in : 12 . hours ) do
124- raw = if @active_tab == ' geodata'
177+ raw = if Feature . enabled? ( : geodata)
125178 execute_geospatial_query ( query )
126- elsif @active_tab == 'timdex'
179+ elsif @active_tab == 'timdex' || @active_tab == 'all'
127180 TimdexBase ::Client . query ( TimdexSearch ::BaseQuery , variables : query )
128181 end
129182
0 commit comments