|
1 | 1 | import assert from 'assert'; |
2 | | -import { envHasExec, findAdaMain, getSymbols, which } from '../../src/helpers'; |
3 | | -import { DocumentSymbol, SymbolKind, Uri, commands, workspace } from 'vscode'; |
| 2 | +import { |
| 3 | + envHasExec, |
| 4 | + findAdaMain, |
| 5 | + getSymbols, |
| 6 | + parallelize, |
| 7 | + staggerProgress, |
| 8 | + which, |
| 9 | +} from '../../src/helpers'; |
| 10 | +import { |
| 11 | + CancellationError, |
| 12 | + DocumentSymbol, |
| 13 | + ProgressLocation, |
| 14 | + SymbolKind, |
| 15 | + Uri, |
| 16 | + commands, |
| 17 | + window, |
| 18 | + workspace, |
| 19 | +} from 'vscode'; |
4 | 20 | import { rangeToStr } from '../utils'; |
5 | 21 | import { adaExtState } from '../../src/extension'; |
6 | 22 |
|
@@ -134,3 +150,197 @@ suite('getSymbols', function () { |
134 | 150 | }; |
135 | 151 | } |
136 | 152 | }); |
| 153 | + |
| 154 | +suite('parallelize', function () { |
| 155 | + const cases = [ |
| 156 | + { |
| 157 | + size: 4, |
| 158 | + threads: 1, |
| 159 | + workDuration: 200, |
| 160 | + }, |
| 161 | + { |
| 162 | + size: 4, |
| 163 | + threads: 2, |
| 164 | + workDuration: 200, |
| 165 | + }, |
| 166 | + { |
| 167 | + size: 4, |
| 168 | + threads: 3, |
| 169 | + workDuration: 200, |
| 170 | + }, |
| 171 | + { |
| 172 | + size: 4, |
| 173 | + threads: 4, |
| 174 | + workDuration: 200, |
| 175 | + }, |
| 176 | + { |
| 177 | + size: 4, |
| 178 | + threads: 5, |
| 179 | + workDuration: 200, |
| 180 | + }, |
| 181 | + { |
| 182 | + size: 2 ** 10, |
| 183 | + threads: 3, |
| 184 | + workDuration: 0, |
| 185 | + }, |
| 186 | + { |
| 187 | + size: 2 ** 20, |
| 188 | + threads: 3, |
| 189 | + workDuration: 0, |
| 190 | + }, |
| 191 | + ]; |
| 192 | + |
| 193 | + for (const tc of cases) { |
| 194 | + testCase(tc.size, tc.threads, tc.workDuration); |
| 195 | + } |
| 196 | + |
| 197 | + function testCase(dataSize: number, threads: number, singleWorkDurationMs: number) { |
| 198 | + test(`Process ${dataSize} elements with ${threads} threads`, async function () { |
| 199 | + const data: number[] = Array.from({ length: dataSize }, (_, i) => i); |
| 200 | + let done = 0; |
| 201 | + let lastProgressDone = 0; |
| 202 | + const start = Date.now(); |
| 203 | + const result = await window.withProgress( |
| 204 | + { |
| 205 | + location: ProgressLocation.Notification, |
| 206 | + cancellable: true, |
| 207 | + title: this.currentTest?.fullTitle(), |
| 208 | + }, |
| 209 | + async (progress, token) => { |
| 210 | + return await parallelize( |
| 211 | + data, |
| 212 | + threads, |
| 213 | + async (i) => { |
| 214 | + if (token.isCancellationRequested) { |
| 215 | + throw new CancellationError(); |
| 216 | + } |
| 217 | + |
| 218 | + /** |
| 219 | + * Do we need a lock to increment this counter given |
| 220 | + * that we are processing with threads? |
| 221 | + * |
| 222 | + * The answer is no. |
| 223 | + * |
| 224 | + * In JavaScript semantics each function runs to |
| 225 | + * completion uninterrupted. 'async' doesn't mean that |
| 226 | + * functions run concurrently. It means that something |
| 227 | + * will be executed later. |
| 228 | + * |
| 229 | + * When a function flow encounters 'await', it hands |
| 230 | + * off processing to another async operation. |
| 231 | + * |
| 232 | + * So at any one time, only one function is executing |
| 233 | + * and accessing the counter. It is safe to increment |
| 234 | + * without locking. |
| 235 | + */ |
| 236 | + ++done; |
| 237 | + |
| 238 | + const reportEveryProgress = false; |
| 239 | + if (reportEveryProgress) { |
| 240 | + progress.report({ |
| 241 | + message: `${done} / ${data.length}`, |
| 242 | + increment: (1 * 100) / data.length, |
| 243 | + }); |
| 244 | + } else { |
| 245 | + lastProgressDone = staggerProgress( |
| 246 | + done, |
| 247 | + data.length, |
| 248 | + lastProgressDone, |
| 249 | + (increment) => { |
| 250 | + progress.report({ |
| 251 | + message: `${done} / ${data.length}`, |
| 252 | + increment: increment, |
| 253 | + }); |
| 254 | + }, |
| 255 | + ); |
| 256 | + } |
| 257 | + |
| 258 | + /** |
| 259 | + * Short-circuit the timing emulation if the test case doesn't want it. |
| 260 | + */ |
| 261 | + if (singleWorkDurationMs > 0) { |
| 262 | + /** |
| 263 | + * Create artificial delay in the processing of each item. |
| 264 | + */ |
| 265 | + await new Promise((resolve) => { |
| 266 | + setTimeout(resolve, singleWorkDurationMs); |
| 267 | + }); |
| 268 | + } |
| 269 | + |
| 270 | + return i + 1; |
| 271 | + }, |
| 272 | + token, |
| 273 | + ); |
| 274 | + }, |
| 275 | + ); |
| 276 | + const duration = Date.now() - start; |
| 277 | + |
| 278 | + assert.equal(result.length, data.length); |
| 279 | + |
| 280 | + /** |
| 281 | + * Don't check timing when the test cases doesn't use timing emulation. |
| 282 | + */ |
| 283 | + if (singleWorkDurationMs > 0) { |
| 284 | + /** |
| 285 | + * Given the number of threads, the overal duration should |
| 286 | + * not go beyond a certain limit. For example, if the data |
| 287 | + * size is 4, and we requested 4 threads to do the |
| 288 | + * operation, then all the data is processed in one go. So |
| 289 | + * overall the computation shouldn't take more than the |
| 290 | + * time needed to process one data item (with a margin). |
| 291 | + */ |
| 292 | + const margin = singleWorkDurationMs * 0.2; |
| 293 | + /** |
| 294 | + * When processing 4 items with 3 threads, the first parallel |
| 295 | + * batch processes 3 items and a second batch processes 1 item. |
| 296 | + * So there are 2 sequential iterations, hence the use of |
| 297 | + * Math.ceil. |
| 298 | + */ |
| 299 | + const sequentialChunks = Math.ceil(dataSize / threads); |
| 300 | + const expectedTotalWorkDuration = sequentialChunks * singleWorkDurationMs + margin; |
| 301 | + |
| 302 | + assert.ok( |
| 303 | + duration < expectedTotalWorkDuration, |
| 304 | + `The computation took ${duration}ms when we expected` + |
| 305 | + ` no more than ${expectedTotalWorkDuration}`, |
| 306 | + ); |
| 307 | + } |
| 308 | + }); |
| 309 | + } |
| 310 | +}); |
| 311 | + |
| 312 | +suite('staggerProgress', function () { |
| 313 | + test('staggerProgress', function () { |
| 314 | + const total = 1000; |
| 315 | + let lastProgress = 0; |
| 316 | + |
| 317 | + lastProgress = staggerProgress(5, total, lastProgress, () => { |
| 318 | + /** |
| 319 | + * progress is less than 1% so this shouldn't be called. |
| 320 | + */ |
| 321 | + assert.fail('Progress reporting was called unexpectedly'); |
| 322 | + }); |
| 323 | + assert.equal(lastProgress, 0); |
| 324 | + |
| 325 | + let called = false; |
| 326 | + lastProgress = staggerProgress(10, total, lastProgress, (increment) => { |
| 327 | + /** |
| 328 | + * Progress is 1%. Reporting should be called. |
| 329 | + */ |
| 330 | + called = true; |
| 331 | + assert.equal(increment, 1); |
| 332 | + }); |
| 333 | + assert.ok(called, 'Progress reporting function was unexpectedly not called'); |
| 334 | + assert.equal(lastProgress, 10); |
| 335 | + |
| 336 | + lastProgress = staggerProgress(50, total, lastProgress, (increment) => { |
| 337 | + /** |
| 338 | + * Progress is 1%. Reporting should be called. |
| 339 | + */ |
| 340 | + called = true; |
| 341 | + assert.equal(increment, 4); |
| 342 | + }); |
| 343 | + assert.ok(called, 'Progress reporting function was unexpectedly not called'); |
| 344 | + assert.equal(lastProgress, 50); |
| 345 | + }); |
| 346 | +}); |
0 commit comments