Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### Changed

* reduced time complexity from O(n^2) to O(n) to improve the initial load time for search ([#2875])
* Git no longer displays a message about the default branch name when calling `deploydocs()`. ([#2854])
* Don't escape characters (such as ~) in URLs when writing LaTeX. ([#2210])

Expand Down
7 changes: 5 additions & 2 deletions assets/html/js/search.js
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
// find anything if searching for "add!", only for the entire qualification
tokenize: (string) => {
const tokens = [];
const tokenSet = new Set();
let remaining = string;

// julia specific patterns
Expand All @@ -232,8 +233,9 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
let match;
while ((match = pattern.exec(remaining)) != null) {
const token = match[0].trim();
if (token && !tokens.includes(token)) {
if (token && !tokenSet.has(token)) {
tokens.push(token);
tokenSet.add(token);
}
}
}
Expand All @@ -243,8 +245,9 @@ function worker_function(documenterSearchIndex, documenterBaseURL, filters) {
.split(/[\s\-,;()[\]{}]+/)
.filter((t) => t.trim());
for (const token of basicTokens) {
if (token && !tokens.includes(token)) {
if (token && !tokenSet.has(token)) {
tokens.push(token);
tokenSet.add(token);
}
}

Expand Down
9 changes: 6 additions & 3 deletions test/search/wrapper.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ const index = new MiniSearch({
},
tokenize: (string) => {
const tokens = [];
const tokenSet = new Set();
let remaining = string;

// julia specific patterns
Expand All @@ -59,8 +60,9 @@ const index = new MiniSearch({
let match;
while ((match = pattern.exec(remaining)) != null) {
const token = match[0].trim();
if (token && !tokens.includes(token)) {
if (token && !tokenSet.has(token)) {
tokens.push(token);
tokenSet.add(token);
}
}
}
Expand All @@ -70,8 +72,9 @@ const index = new MiniSearch({
.split(/[\s\-,;()[\]{}]+/)
.filter((t) => t.trim());
for (const token of basicTokens) {
if (token && !tokens.includes(token)) {
if (token && !tokenSet.has(token)) {
tokens.push(token);
tokenSet.add(token);
}
}

Expand All @@ -89,4 +92,4 @@ const results = index.search(__QUERY__, {

// Extract unique page names from results (same logic as search.js)
const pages = [...new Set(results.map(r => r.title))];
console.log(JSON.stringify(pages.slice(0,5)));
console.log(JSON.stringify(pages.slice(0,5)));
Loading