Skip to content

Commit 0d35096

Browse files
cmdcolinclaude
andcommitted
update typescript to latest
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent ffca6f3 commit 0d35096

File tree

12 files changed

+1837
-435
lines changed

12 files changed

+1837
-435
lines changed

eslint.config.mjs

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ export default defineConfig(
99
ignores: [
1010
'esm/**/*',
1111
'dist/**/*',
12-
'*.js',
13-
'*.mjs',
12+
'**/*.js',
13+
'**/*.mjs',
14+
'**/*.d.ts',
1415
'example/*',
15-
'test/browser.test.ts',
1616
],
1717
},
1818
{
@@ -45,9 +45,11 @@ export default defineConfig(
4545
'unicorn/prefer-spread': 'off',
4646
'unicorn/expiring-todo-comments': 'off',
4747

48-
'@typescript-eslint/no-explicit-any': 'off',
49-
'@typescript-eslint/explicit-module-boundary-types': 'off',
50-
'@typescript-eslint/ban-ts-comment': 'off',
48+
'@typescript-eslint/no-explicit-any': 'warn',
49+
'@typescript-eslint/ban-ts-comment': [
50+
'error',
51+
{ 'ts-expect-error': 'allow-with-description', 'ts-ignore': true },
52+
],
5153

5254
'import/no-unresolved': 'off',
5355
'import/extensions': ['error', 'ignorePackages'],

jest.config.js

Lines changed: 0 additions & 5 deletions
This file was deleted.

package.json

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -41,19 +41,19 @@
4141
"author": "Matt Morgan",
4242
"repository": "GMOD/trix-js",
4343
"devDependencies": {
44-
"@types/node": "^25.0.10",
45-
"@typescript-eslint/eslint-plugin": "^8.53.1",
46-
"@typescript-eslint/parser": "^8.53.1",
44+
"@types/node": "^25.5.0",
45+
"@typescript-eslint/eslint-plugin": "^8.57.2",
46+
"@typescript-eslint/parser": "^8.57.2",
4747
"eslint": "^9.0.0",
4848
"eslint-plugin-import": "^2.31.0",
49-
"eslint-plugin-unicorn": "^62.0.0",
49+
"eslint-plugin-unicorn": "^63.0.0",
5050
"generic-filehandle2": "^2.0.18",
5151
"prettier": "^3.8.1",
52-
"puppeteer": "^24.0.0",
53-
"rimraf": "^6.0.1",
54-
"typescript": "^5.7.0",
55-
"typescript-eslint": "^8.53.1",
56-
"vitest": "^4.0.18"
52+
"puppeteer": "^24.40.0",
53+
"rimraf": "^6.1.3",
54+
"typescript": "^6.0.2",
55+
"typescript-eslint": "^8.57.2",
56+
"vitest": "^4.1.1"
5757
},
5858
"publishConfig": {
5959
"access": "public"

src/index.ts

Lines changed: 91 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -35,72 +35,84 @@ export default class Trix {
3535
}
3636

3737
async search(searchString: string, opts?: { signal?: AbortSignal }) {
38-
let resultArr = [] as [string, string][]
39-
const searchWords = searchString.split(' ')
38+
const searchWords = searchString.split(/\s+/)
4039
const firstWord = searchWords[0]
4140

4241
// validate that we have a non-empty search term
43-
if (firstWord) {
44-
const searchWord = firstWord.toLowerCase()
45-
const res = await this.getBuffer(searchWord, opts)
46-
47-
let { end, buffer } = res
48-
const { fileSize } = res
49-
let done = false
50-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
51-
while (!done) {
52-
const str = this.decoder.decode(buffer)
53-
54-
// slice to lastIndexOf('\n') to make sure we get complete records
55-
// since the buffer fetch could get halfway into a record
42+
if (!firstWord) {
43+
return []
44+
}
45+
46+
const searchWord = firstWord.toLowerCase()
47+
const res = await this.getBuffer(searchWord, opts)
48+
49+
let { end, buffer } = res
50+
const { fileSize } = res
51+
let resultArr = [] as [string, string][]
52+
let done = false
53+
54+
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
55+
while (!done) {
56+
const str = this.decoder.decode(buffer, { stream: true })
57+
58+
// slice to lastIndexOf('\n') to make sure we get complete records
59+
// since the buffer fetch could get halfway into a record
60+
const lastNewline = str.lastIndexOf('\n')
61+
if (lastNewline === -1) {
62+
// if no newline, we need more data unless we're at EOF
63+
if (fileSize !== undefined && end >= fileSize) {
64+
done = true
65+
}
66+
} else {
5667
const lines = str
57-
.slice(0, str.lastIndexOf('\n'))
68+
.slice(0, lastNewline)
5869
.split('\n')
5970
.filter(Boolean)
6071

61-
const hits2 = [] as string[]
6272
for (const line of lines) {
6373
const word = line.split(' ')[0]
6474

6575
if (word.startsWith(searchWord)) {
66-
hits2.push(line)
76+
const [term, ...parts] = line.split(' ')
77+
const hits = parts
78+
.filter(Boolean)
79+
.map(elt => [term, elt.split(',')[0]] as [string, string])
80+
resultArr = resultArr.concat(hits)
6781
} else if (word > searchWord) {
6882
// we are done scanning if we are lexicographically greater than
6983
// the search string
7084
done = true
85+
break
7186
}
7287
}
73-
const hits = hits2.flatMap(line => {
74-
const [term, ...parts] = line.split(' ')
75-
return parts
76-
.filter(Boolean)
77-
.map(elt => [term, elt.split(',')[0]] as [string, string])
78-
})
79-
80-
resultArr = resultArr.concat(hits)
81-
82-
// if we are done or have filled up maxResults, break
83-
if (done || resultArr.length >= this.maxResults) {
84-
break
85-
}
88+
}
8689

87-
// avoid reading past end of file
88-
if (fileSize !== undefined && end >= fileSize) {
89-
break
90-
}
90+
// if we are done or have filled up maxResults, break
91+
if (done || resultArr.length >= this.maxResults) {
92+
break
93+
}
9194

92-
// fetch more data, clamping to file size if known
93-
let bytesToRead = CHUNK_SIZE
94-
if (fileSize !== undefined) {
95-
bytesToRead = Math.min(CHUNK_SIZE, fileSize - end)
96-
}
97-
const res2 = await this.ixFile.read(bytesToRead, end, opts)
98-
if (res2.length === 0) {
99-
break
100-
}
101-
buffer = concatUint8Array([buffer, res2])
102-
end += res2.length
95+
// avoid reading past end of file
96+
if (fileSize !== undefined && end >= fileSize) {
97+
break
98+
}
99+
100+
// fetch more data, clamping to file size if known
101+
let bytesToRead = CHUNK_SIZE
102+
if (fileSize !== undefined) {
103+
bytesToRead = Math.min(CHUNK_SIZE, fileSize - end)
104+
}
105+
106+
if (bytesToRead <= 0) {
107+
break
108+
}
109+
110+
const res2 = await this.ixFile.read(bytesToRead, end, opts)
111+
if (res2.length === 0) {
112+
break
103113
}
114+
buffer = concatUint8Array([buffer, res2])
115+
end += res2.length
104116
}
105117

106118
// de-duplicate results based on the detail column (resultArr[1])
@@ -130,23 +142,46 @@ export default class Trix {
130142
}
131143

132144
private async getBuffer(searchWord: string, opts?: { signal?: AbortSignal }) {
145+
const indexes = await this.getIndex(opts)
146+
147+
// Binary search for the largest key <= searchWord
148+
let low = 0
149+
let high = indexes.length - 1
150+
let bestIndex = -1
151+
152+
while (low <= high) {
153+
const mid = Math.floor((low + high) / 2)
154+
if (indexes[mid][0] <= searchWord) {
155+
bestIndex = mid
156+
low = mid + 1
157+
} else {
158+
high = mid - 1
159+
}
160+
}
161+
133162
let start = 0
134163
let end = CHUNK_SIZE
135-
const indexes = await this.getIndex(opts)
136-
for (const [key, value] of indexes) {
137-
const trimmedKey = key.slice(0, searchWord.length)
138-
if (trimmedKey < searchWord) {
139-
start = value
140-
end = value + CHUNK_SIZE
164+
165+
if (bestIndex !== -1) {
166+
start = indexes[bestIndex][1]
167+
// The end should be the start of the NEXT index entry to cover the full range
168+
// where the word could exist. If it's the last index, read until EOF or start+CHUNK_SIZE.
169+
if (bestIndex + 1 < indexes.length) {
170+
end = indexes[bestIndex + 1][1]
171+
} else {
172+
const fileSize = await this.getIxFileSize(opts)
173+
end = fileSize ?? start + CHUNK_SIZE
141174
}
142175
}
143176

144-
const fileSize = await this.getIxFileSize(opts)
145-
if (fileSize !== undefined) {
146-
end = Math.min(end, fileSize)
177+
// Ensure we read at least one CHUNK_SIZE to handle cases where index entries are very close
178+
// or to ensure we have enough data to start with.
179+
if (end - start < CHUNK_SIZE) {
180+
const fileSize = await this.getIxFileSize(opts)
181+
end = fileSize === undefined ? start + CHUNK_SIZE : Math.min(start + CHUNK_SIZE, fileSize)
147182
}
148183

149184
const buffer = await this.ixFile.read(end - start, start, opts)
150-
return { buffer, end, fileSize }
185+
return { buffer, end, fileSize: await this.getIxFileSize(opts) }
151186
}
152187
}
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2+
3+
exports[`Browser tests with Puppeteer > handles EOF correctly with CORS enabled server 1`] = `
4+
[
5+
[
6+
"this",
7+
"id1",
8+
],
9+
[
10+
"this",
11+
"id2",
12+
],
13+
[
14+
"this",
15+
"id3",
16+
],
17+
]
18+
`;
19+
20+
exports[`Browser tests with Puppeteer > searches via HTTP with CORS enabled server 1`] = `
21+
[
22+
[
23+
"for",
24+
"id1",
25+
],
26+
[
27+
"for",
28+
"id2",
29+
],
30+
[
31+
"for",
32+
"id3",
33+
],
34+
]
35+
`;
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2+
3+
exports[`Edge case handling > handles search term with trailing spaces 1`] = `
4+
[
5+
[
6+
"this",
7+
"id1",
8+
],
9+
[
10+
"this",
11+
"id2",
12+
],
13+
[
14+
"this",
15+
"id3",
16+
],
17+
]
18+
`;
Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2+
3+
exports[`can find pneumobase features 1`] = `
4+
[
5+
[
6+
"spv_0500",
7+
"["D39V%3A505863..508558"|"D39V_annotation_coding_features_sorted.gff"|"SPV_0500"|"SPD_0500"|"SPV_0500"|"spr0500"|"SP_0574%20%20SP_0575"|"SPV_RS11315"]",
8+
],
9+
[
10+
"spv_0501",
11+
"["D39V%3A508859..509698"|"D39V_annotation_coding_features_sorted.gff"|"bglG"|"SPD_0501"|"SPV_0501"|"spr0504"|"SP_0576"|"SPV_RS02690"|"licT"]",
12+
],
13+
[
14+
"spv_0502",
15+
"["D39V%3A509716..511554"|"D39V_annotation_coding_features_sorted.gff"|"bglF"|"SPD_0502"|"SPV_0502"|"spr0505"|"SP_0577"|"SPV_RS02695"]",
16+
],
17+
[
18+
"spv_0503",
19+
"["D39V%3A511567..512982"|"D39V_annotation_coding_features_sorted.gff"|"bglA-2"|"SPD_0503"|"SPV_0503"|"spr0506"|"SP_0578"|"SPV_RS02700"|"bglB%20%20bglH"]",
20+
],
21+
[
22+
"spv_0504",
23+
"["D39V%3A513574..514620"|"D39V_annotation_coding_features_sorted.gff"|"pheS"|"SPD_0504"|"SPV_0504"|"spr0507"|"SP_0579"|"SPV_RS02705"]",
24+
],
25+
[
26+
"spv_0505",
27+
"["D39V%3A514620..515129"|"D39V_annotation_coding_features_sorted.gff"|"paiA"|"SPD_0505"|"SPV_0505"|"spr0508"|"SP_0580"|"SPV_RS02710"]",
28+
],
29+
[
30+
"spv_0506",
31+
"["D39V%3A515206..517608"|"D39V_annotation_coding_features_sorted.gff"|"pheT"|"SPD_0506"|"SPV_0506"|"spr0509"|"SP_0581"|"SPV_RS02715"]",
32+
],
33+
[
34+
"spv_0507",
35+
"["D39V%3A517676..518677"|"D39V_annotation_coding_features_sorted.gff"|"SPV_0507"|"SPD_0507"|"SPV_0507"|"spr0510"|"SP_0582"|"SPV_RS02720"]",
36+
],
37+
[
38+
"spv_0508",
39+
"["D39V%3A518833..519322"|"D39V_annotation_coding_features_sorted.gff"|"SPV_0508"|"SPD_0508"|"SPV_0508"|"spr0511"|"SP_0583"|"SPV_RS02725"]",
40+
],
41+
[
42+
"spv_0509",
43+
"["D39V%3A519406..519672"|"D39V_annotation_coding_features_sorted.gff"|"higA"|"SPD_0509"|"SPV_0509"|"spr0513"|"SP_0584"|"SPV_RS02730"]",
44+
],
45+
[
46+
"spv_0510",
47+
"["D39V%3A519933..522182"|"D39V_annotation_coding_features_sorted.gff"|"metE"|"SPD_0510"|"SPV_0510"|"spr0514"|"SP_0585"|"SPV_RS02735"]",
48+
],
49+
[
50+
"spv_0511",
51+
"["D39V%3A522246..523112"|"D39V_annotation_coding_features_sorted.gff"|"metF"|"SPD_0511"|"SPV_0511"|"spr0515"|"SP_0586"|"SPV_RS02740"]",
52+
],
53+
[
54+
"spv_0512",
55+
"["D39V%3A523797..526010"|"D39V_annotation_coding_features_sorted.gff"|"pnp"|"SPD_0512"|"SPV_0512"|"spr0516"|"SP_0588"|"SPV_RS02750"|"pnpA"]",
56+
],
57+
[
58+
"spv_0513",
59+
"["D39V%3A526026..526643"|"D39V_annotation_coding_features_sorted.gff"|"cysE"|"SPD_0513"|"SPV_0513"|"spr0517"|"SP_0589"|"SPV_RS02755"]",
60+
],
61+
[
62+
"spv_0514",
63+
"["D39V%3A526655..527539"|"D39V_annotation_coding_features_sorted.gff"|"SPV_0514"|"SPD_0514"|"SPV_0514"|"spr0518"|"SP_0590"|"SPV_RS02760"]",
64+
],
65+
[
66+
"spv_0515",
67+
"["D39V%3A527621..528964"|"D39V_annotation_coding_features_sorted.gff"|"cysS"|"SPD_0515"|"SPV_0515"|"spr0519"|"SP_0591"|"SPV_RS02770"]",
68+
],
69+
[
70+
"spv_0516",
71+
"["D39V%3A528957..529343"|"D39V_annotation_coding_features_sorted.gff"|"mrnC"|"SPD_0516"|"SPV_0516"|"spr0520"|"SP_0592"|"SPV_RS02775"]",
72+
],
73+
[
74+
"spv_0517",
75+
"["D39V%3A529347..530231"|"D39V_annotation_coding_features_sorted.gff"|"SPV_0517"|"SPD_0517"|"SPV_0517"|"spr0521"|"SP_0593"|"SPV_RS02780"]",
76+
],
77+
[
78+
"spv_0521",
79+
"["D39V%3A531637..532914"|"D39V_annotation_coding_features_sorted.gff"|"vex1"|"SPD_0521"|"SPV_0521"|"spr0524"|"SP_0599"|"SPV_RS02790"]",
80+
],
81+
[
82+
"spv_0522",
83+
"["D39V%3A532927..533574"|"D39V_annotation_coding_features_sorted.gff"|"vex2"|"SPD_0522"|"SPV_0522"|"spr0525"|"SP_0600"|"SPV_RS02795"]",
84+
],
85+
]
86+
`;

0 commit comments

Comments
 (0)