Skip to content

Commit c525772

Browse files
committed
Use multi search queries
1 parent 50af053 commit c525772

File tree

5 files changed

+87
-61
lines changed

5 files changed

+87
-61
lines changed

gleam.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ gleam_stdlib = ">= 0.54.0 and < 1.0.0"
1212
grille_pain = ">= 1.1.0 and < 2.0.0"
1313
lustre = ">= 5.0.0 and < 6.0.0"
1414
modem = ">= 2.1.1 and < 3.0.0"
15+
gleam_json = ">= 3.0.2 and < 4.0.0"
1516

1617
[dev-dependencies]
1718
gleeunit = ">= 1.0.0 and < 2.0.0"

manifest.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ gleam_fetch = { version = ">= 1.2.0 and < 2.0.0" }
5252
gleam_hexpm = { version = ">= 3.0.0 and < 4.0.0" }
5353
gleam_http = { version = ">= 4.0.0 and < 5.0.0" }
5454
gleam_javascript = { version = ">= 1.0.0 and < 2.0.0" }
55+
gleam_json = { version = ">= 3.0.2 and < 4.0.0" }
5556
gleam_regexp = { version = ">= 1.1.0 and < 2.0.0" }
5657
gleam_stdlib = { version = ">= 0.54.0 and < 1.0.0" }
5758
gleeunit = { version = ">= 1.0.0 and < 2.0.0" }

src/hexdocs.gleam

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -156,15 +156,17 @@ fn api_returned_packages(
156156
}
157157

158158
fn api_returned_typesense_search(model: Model, response: Loss(decode.Dynamic)) {
159-
response
160-
|> result.try(fn(search_result) {
161-
search_result
162-
|> decode.run(hexdocs.typesense_decoder())
163-
|> result.map_error(loss.DecodeError)
164-
})
165-
|> result.map(model.set_search_results(model, _))
166-
|> result.map(pair.new(_, effect.none()))
167-
|> result.unwrap(#(model, effect.none()))
159+
let response =
160+
response
161+
|> result.try(fn(search_result) {
162+
search_result
163+
|> decode.run(hexdocs.typesense_decoder())
164+
|> result.map_error(loss.DecodeError)
165+
})
166+
case response {
167+
Error(_) -> #(model, toast.error("Server error. Retry later."))
168+
Ok(results) -> #(model.set_search_results(model, results), effect.none())
169+
}
168170
}
169171

170172
fn api_returned_initial_latest_packages(

src/hexdocs/endpoints.gleam

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import gleam/uri.{type Uri}
22
import hexdocs/config
33

44
pub fn search() -> Uri {
5-
let assert Ok(uri) = uri.parse(config.search_url())
5+
let assert Ok(uri) = uri.parse(config.search_url() <> "/multi_search")
66
uri
77
}
88

src/hexdocs/services/hexdocs.gleam

Lines changed: 73 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
import gleam/bool
22
import gleam/dynamic/decode
33
import gleam/fetch
4+
import gleam/http
45
import gleam/http/request
56
import gleam/int
67
import gleam/javascript/promise
8+
import gleam/json
79
import gleam/list
8-
import gleam/option.{Some}
910
import gleam/result
1011
import gleam/string
11-
import gleam/uri
1212
import hexdocs/config
1313
import hexdocs/data/model/version
1414
import hexdocs/endpoints
@@ -44,45 +44,63 @@ pub fn typesense_search(
4444
packages: List(version.Package),
4545
page: Int,
4646
) {
47-
let query = new_search_query_params(query, packages, page)
48-
let endpoint = uri.Uri(..endpoints.search(), query: Some(query))
49-
let assert Ok(request) = request.from_uri(endpoint)
47+
let body = new_search_body(query, packages, page)
48+
let endpoint = endpoints.search()
49+
let assert Ok(req) = request.from_uri(endpoint)
50+
let request =
51+
req
52+
|> request.set_method(http.Post)
53+
|> request.set_header("content-type", "application/json")
54+
|> request.set_body(body)
5055
fetch.send(request)
5156
|> promise.try_await(fetch.read_json_body)
5257
|> promise.map(result.map_error(_, loss.FetchError))
5358
}
5459

5560
pub fn typesense_decoder() {
56-
use found <- decode.field("found", decode.int)
57-
use hits <- decode.field("hits", {
61+
use results <- decode.field(
62+
"results",
5863
decode.list({
59-
use document <- decode.field("document", {
60-
use doc <- decode.field("doc", decode.string)
61-
use id <- decode.field("id", decode.string)
62-
use package <- decode.field("package", decode.string)
63-
use proglang <- decode.field("proglang", decode.string)
64-
use ref <- decode.field("ref", decode.string)
65-
use title <- decode.field("title", decode.string)
66-
use type_ <- decode.field("type", decode.string)
67-
Document(
68-
doc:,
69-
id:,
70-
package:,
71-
proglang:,
72-
ref:,
73-
title:,
74-
type_:,
75-
headers: [],
76-
)
77-
|> decode.success
64+
use found <- decode.field("found", decode.int)
65+
use hits <- decode.field("hits", {
66+
decode.list({
67+
use document <- decode.field("document", {
68+
use doc <- decode.field("doc", decode.string)
69+
use id <- decode.field("id", decode.string)
70+
use package <- decode.field("package", decode.string)
71+
use proglang <- decode.field("proglang", decode.string)
72+
use ref <- decode.field("ref", decode.string)
73+
use title <- decode.field("title", decode.string)
74+
use type_ <- decode.field("type", decode.string)
75+
Document(
76+
doc:,
77+
id:,
78+
package:,
79+
proglang:,
80+
ref:,
81+
title:,
82+
type_:,
83+
headers: [],
84+
)
85+
|> decode.success
86+
})
87+
decode.success(document)
88+
})
7889
})
79-
decode.success(document)
80-
})
81-
})
82-
let grouped_results = group_headers(hits)
83-
let removed_count = list.length(hits) - list.length(grouped_results)
84-
let max_results = list.take(grouped_results, config.per_page())
85-
decode.success(#(found - removed_count, max_results))
90+
decode.success(#(found, hits))
91+
}),
92+
)
93+
94+
// Extract first result (we only send one search)
95+
case results {
96+
[#(found, hits), ..] -> {
97+
let grouped_results = group_headers(hits)
98+
let removed_count = list.length(hits) - list.length(grouped_results)
99+
let max_results = list.take(grouped_results, config.per_page())
100+
decode.success(#(found - removed_count, max_results))
101+
}
102+
[] -> decode.success(#(0, []))
103+
}
86104
}
87105

88106
fn group_headers(documents: List(Document)) -> List(Document) {
@@ -151,28 +169,33 @@ fn group_headers(documents: List(Document)) -> List(Document) {
151169
})
152170
}
153171

154-
fn new_search_query_params(
172+
fn new_search_body(
155173
query: String,
156174
packages: List(version.Package),
157175
page: Int,
158-
) {
159-
list.new()
160-
|> list.key_set("q", query)
161-
|> list.key_set("query_by", "title,doc")
162-
|> list.key_set("query_by_weights", "3,1")
163-
|> list.key_set("page", int.to_string(page))
164-
// We multiply per 2 because we group results
165-
|> list.key_set("per_page", int.to_string(config.per_page() * 2))
166-
|> list.key_set("highlight_fields", "none")
167-
|> add_filter_by_packages_param(packages)
168-
|> uri.query_to_string
176+
) -> String {
177+
let filter_by = case get_filter_by_packages(packages) {
178+
"" -> []
179+
filter -> [#("filter_by", json.string(filter))]
180+
}
181+
182+
let search_params =
183+
json.object([
184+
#("q", json.string(query)),
185+
#("query_by", json.string("title,doc")),
186+
#("query_by_weights", json.string("3,1")),
187+
#("page", json.int(page)),
188+
#("per_page", json.int(config.per_page() * 2)),
189+
#("highlight_fields", json.string("none")),
190+
..filter_by
191+
])
192+
193+
json.object([#("searches", json.array([search_params], fn(x) { x }))])
194+
|> json.to_string
169195
}
170196

171-
fn add_filter_by_packages_param(
172-
query: List(#(String, String)),
173-
packages: List(version.Package),
174-
) -> List(#(String, String)) {
175-
use <- bool.guard(when: list.is_empty(packages), return: query)
197+
fn get_filter_by_packages(packages: List(version.Package)) -> String {
198+
use <- bool.guard(when: list.is_empty(packages), return: "")
176199
packages
177200
|> list.filter_map(fn(p) {
178201
case p.status {
@@ -182,7 +205,6 @@ fn add_filter_by_packages_param(
182205
})
183206
|> list.map(string.append("package:=", _))
184207
|> string.join("||")
185-
|> list.key_set(query, "filter_by", _)
186208
}
187209

188210
pub fn snippet(doc: String, search_input: String) -> String {

0 commit comments

Comments
 (0)