Skip to content

Commit fef4cff

Browse files
authored
remove .includes and add a set to make it O(n) (#2875)
1 parent 550adee commit fef4cff

File tree

3 files changed

+12
-5
lines changed

3 files changed

+12
-5
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1111

1212
### Changed
1313

14+
* reduced time complexity from O(n^2) to O(n) to improve the initial load time for search ([#2875])
1415
* Git no longer displays a message about the default branch name when calling `deploydocs()`. ([#2854])
1516
* Don't escape characters (such as ~) in URLs when writing LaTeX. ([#2210])
1617

assets/html/js/search.js

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -206,6 +206,7 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
206206
// find anything if searching for "add!", only for the entire qualification
207207
tokenize: (string) => {
208208
const tokens = [];
209+
const tokenSet = new Set();
209210
let remaining = string;
210211

211212
// julia specific patterns
@@ -232,8 +233,9 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
232233
let match;
233234
while ((match = pattern.exec(remaining)) != null) {
234235
const token = match[0].trim();
235-
if (token && !tokens.includes(token)) {
236+
if (token && !tokenSet.has(token)) {
236237
tokens.push(token);
238+
tokenSet.add(token);
237239
}
238240
}
239241
}
@@ -243,8 +245,9 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
243245
.split(/[\s\-,;()[\]{}]+/)
244246
.filter((t) => t.trim());
245247
for (const token of basicTokens) {
246-
if (token && !tokens.includes(token)) {
248+
if (token && !tokenSet.has(token)) {
247249
tokens.push(token);
250+
tokenSet.add(token);
248251
}
249252
}
250253

test/search/wrapper.js

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ const index = new MiniSearch({
3333
},
3434
tokenize: (string) => {
3535
const tokens = [];
36+
const tokenSet = new Set();
3637
let remaining = string;
3738

3839
// julia specific patterns
@@ -59,8 +60,9 @@ const index = new MiniSearch({
5960
let match;
6061
while ((match = pattern.exec(remaining)) != null) {
6162
const token = match[0].trim();
62-
if (token && !tokens.includes(token)) {
63+
if (token && !tokenSet.has(token)) {
6364
tokens.push(token);
65+
tokenSet.add(token);
6466
}
6567
}
6668
}
@@ -70,8 +72,9 @@ const index = new MiniSearch({
7072
.split(/[\s\-,;()[\]{}]+/)
7173
.filter((t) => t.trim());
7274
for (const token of basicTokens) {
73-
if (token && !tokens.includes(token)) {
75+
if (token && !tokenSet.has(token)) {
7476
tokens.push(token);
77+
tokenSet.add(token);
7578
}
7679
}
7780

@@ -89,4 +92,4 @@ const results = index.search(__QUERY__, {
8992

9093
// Extract unique page names from results (same logic as search.js)
9194
const pages = [...new Set(results.map(r => r.title))];
92-
console.log(JSON.stringify(pages.slice(0,5)));
95+
console.log(JSON.stringify(pages.slice(0,5)));

0 commit comments

Comments
 (0)