@@ -65,6 +65,10 @@ pub async fn root_search(
6565 . map_err ( |err| {
6666 SearchError :: InternalError ( format ! ( "Failed to build doc mapper. Cause: {}" , err) )
6767 } ) ?;
68+
69+ // try to build query against current schema
70+ let _query = doc_mapper. query ( doc_mapper. schema ( ) , search_request) ?;
71+
6872 let doc_mapper_str = serde_json:: to_string ( & doc_mapper) . map_err ( |err| {
6973 SearchError :: InternalError ( format ! ( "Failed to serialize doc mapper: Cause {}" , err) )
7074 } ) ?;
@@ -149,9 +153,8 @@ pub async fn root_search(
149153
150154 // Merge the fetched docs.
151155 let hits = fetch_docs_responses
152- . iter ( )
153- . map ( |response| response. hits . clone ( ) )
154- . flatten ( )
156+ . into_iter ( )
157+ . flat_map ( |response| response. hits )
155158 . sorted_by ( |hit1, hit2| {
156159 let value1 = if let Some ( partial_hit) = & hit1. partial_hit {
157160 partial_hit. sorting_field_value
@@ -216,9 +219,8 @@ fn jobs_to_leaf_request(
216219 search_request : Some ( request_with_offset_0) ,
217220 split_metadata : jobs
218221 . iter ( )
219- . map ( |job| {
220- extract_split_and_footer_offsets ( split_metadata_map. get ( & job. split_id ) . unwrap ( ) )
221- } )
222+ . map ( |job| split_metadata_map. get ( & job. split_id ) . expect ( & job. split_id ) )
223+ . map ( extract_split_and_footer_offsets)
222224 . collect ( ) ,
223225 doc_mapper : doc_mapper_str. to_string ( ) ,
224226 index_uri : index_uri. to_string ( ) ,
@@ -234,12 +236,11 @@ fn jobs_to_fetch_docs_request(
234236) -> FetchDocsRequest {
235237 let partial_hits = jobs
236238 . iter ( )
237- . map ( |job| partial_hits_map. remove ( & job. split_id ) . unwrap ( ) )
238- . flatten ( )
239+ . flat_map ( |job| partial_hits_map. remove ( & job. split_id ) . expect ( & job. split_id ) )
239240 . collect_vec ( ) ;
240241 let splits_footer_and_offsets = jobs
241242 . iter ( )
242- . map ( |job| split_metadata_map. get ( & job. split_id ) . unwrap ( ) )
243+ . map ( |job| split_metadata_map. get ( & job. split_id ) . expect ( & job . split_id ) )
243244 . map ( extract_split_and_footer_offsets)
244245 . collect_vec ( ) ;
245246
@@ -1024,4 +1025,64 @@ mod tests {
10241025 assert_eq ! ( search_response. hits. len( ) , 1 ) ;
10251026 Ok ( ( ) )
10261027 }
1028+
1029+ #[ tokio:: test]
1030+ async fn test_root_search_invalid_queries ( ) -> anyhow:: Result < ( ) > {
1031+ let mut metastore = MockMetastore :: new ( ) ;
1032+ metastore
1033+ . expect_index_metadata ( )
1034+ . returning ( |_index_id : & str | {
1035+ Ok ( IndexMetadata :: for_test (
1036+ "test-idx" ,
1037+ "file:///path/to/index/test-idx" ,
1038+ ) )
1039+ } ) ;
1040+ metastore. expect_list_splits ( ) . returning (
1041+ |_index_id : & str , _split_state : SplitState , _time_range : Option < Range < i64 > > , _tags| {
1042+ Ok ( vec ! [ mock_split( "split" ) ] )
1043+ } ,
1044+ ) ;
1045+
1046+ let client_pool =
1047+ Arc :: new ( SearchClientPool :: from_mocks ( vec ! [ Arc :: new( MockSearchService :: new( ) ) ] ) . await ?) ;
1048+ let cluster_client = ClusterClient :: new ( client_pool. clone ( ) ) ;
1049+
1050+ assert ! ( root_search(
1051+ & quickwit_proto:: SearchRequest {
1052+ index_id: "test-idx" . to_string( ) ,
1053+ query: r#"invalid_body:"test""# . to_string( ) ,
1054+ search_fields: vec![ "body" . to_string( ) ] ,
1055+ start_timestamp: None ,
1056+ end_timestamp: None ,
1057+ max_hits: 10 ,
1058+ start_offset: 0 ,
1059+ ..Default :: default ( )
1060+ } ,
1061+ & metastore,
1062+ & cluster_client,
1063+ & client_pool,
1064+ )
1065+ . await
1066+ . is_err( ) ) ;
1067+
1068+ assert ! ( root_search(
1069+ & quickwit_proto:: SearchRequest {
1070+ index_id: "test-idx" . to_string( ) ,
1071+ query: "test" . to_string( ) ,
1072+ search_fields: vec![ "invalid_body" . to_string( ) ] ,
1073+ start_timestamp: None ,
1074+ end_timestamp: None ,
1075+ max_hits: 10 ,
1076+ start_offset: 0 ,
1077+ ..Default :: default ( )
1078+ } ,
1079+ & metastore,
1080+ & cluster_client,
1081+ & client_pool,
1082+ )
1083+ . await
1084+ . is_err( ) ) ;
1085+
1086+ Ok ( ( ) )
1087+ }
10271088}
0 commit comments