|
| 1 | +import { performance } from 'node:perf_hooks'; |
| 2 | +import { beforeEach, describe, expect, it } from 'vitest'; |
| 3 | +import type { ActionTrackEntryPayload } from '../user-timing-extensibility-api.type.js'; |
| 4 | +import { Profiler } from './profiler.js'; |
| 5 | + |
| 6 | +describe('Profiler Integration', () => { |
| 7 | + let profiler: Profiler<Record<string, ActionTrackEntryPayload>>; |
| 8 | + |
| 9 | + beforeEach(() => { |
| 10 | + // Clear all performance entries before each test |
| 11 | + performance.clearMarks(); |
| 12 | + performance.clearMeasures(); |
| 13 | + |
| 14 | + profiler = new Profiler({ |
| 15 | + prefix: 'test', |
| 16 | + track: 'integration-tests', |
| 17 | + color: 'primary', |
| 18 | + tracks: { |
| 19 | + async: { track: 'async-ops', color: 'secondary' }, |
| 20 | + sync: { track: 'sync-ops', color: 'tertiary' }, |
| 21 | + }, |
| 22 | + enabled: true, // Explicitly enable for integration tests |
| 23 | + }); |
| 24 | + }); |
| 25 | + |
| 26 | + it('should create complete performance timeline for sync operation', () => { |
| 27 | + const result = profiler.measure('sync-test', () => |
| 28 | + Array.from({ length: 1000 }, (_, i) => i).reduce( |
| 29 | + (sum, num) => sum + num, |
| 30 | + 0, |
| 31 | + ), |
| 32 | + ); |
| 33 | + |
| 34 | + expect(result).toBe(499_500); |
| 35 | + |
| 36 | + // Verify performance entries were created |
| 37 | + const marks = performance.getEntriesByType('mark'); |
| 38 | + const measures = performance.getEntriesByType('measure'); |
| 39 | + |
| 40 | + expect(marks).toEqual( |
| 41 | + expect.arrayContaining([ |
| 42 | + expect.objectContaining({ name: 'test:sync-test:start' }), |
| 43 | + expect.objectContaining({ name: 'test:sync-test:end' }), |
| 44 | + ]), |
| 45 | + ); |
| 46 | + |
| 47 | + expect(measures).toEqual( |
| 48 | + expect.arrayContaining([ |
| 49 | + expect.objectContaining({ |
| 50 | + name: 'test:sync-test', |
| 51 | + duration: expect.any(Number), |
| 52 | + }), |
| 53 | + ]), |
| 54 | + ); |
| 55 | + }); |
| 56 | + |
| 57 | + it('should create complete performance timeline for async operation', async () => { |
| 58 | + const result = await profiler.measureAsync('async-test', async () => { |
| 59 | + // Simulate async work |
| 60 | + await new Promise(resolve => setTimeout(resolve, 10)); |
| 61 | + return 'async-result'; |
| 62 | + }); |
| 63 | + |
| 64 | + expect(result).toBe('async-result'); |
| 65 | + |
| 66 | + // Verify performance entries were created |
| 67 | + const marks = performance.getEntriesByType('mark'); |
| 68 | + const measures = performance.getEntriesByType('measure'); |
| 69 | + |
| 70 | + expect(marks).toEqual( |
| 71 | + expect.arrayContaining([ |
| 72 | + expect.objectContaining({ name: 'test:async-test:start' }), |
| 73 | + expect.objectContaining({ name: 'test:async-test:end' }), |
| 74 | + ]), |
| 75 | + ); |
| 76 | + |
| 77 | + expect(measures).toEqual( |
| 78 | + expect.arrayContaining([ |
| 79 | + expect.objectContaining({ |
| 80 | + name: 'test:async-test', |
| 81 | + duration: expect.any(Number), |
| 82 | + }), |
| 83 | + ]), |
| 84 | + ); |
| 85 | + }); |
| 86 | + |
| 87 | + it('should handle nested measurements correctly', () => { |
| 88 | + profiler.measure('outer', () => { |
| 89 | + profiler.measure('inner', () => 'inner-result'); |
| 90 | + return 'outer-result'; |
| 91 | + }); |
| 92 | + |
| 93 | + const marks = performance.getEntriesByType('mark'); |
| 94 | + const measures = performance.getEntriesByType('measure'); |
| 95 | + |
| 96 | + expect(marks).toHaveLength(4); // 2 for outer + 2 for inner |
| 97 | + expect(measures).toHaveLength(2); // 1 for outer + 1 for inner |
| 98 | + |
| 99 | + // Check all marks exist |
| 100 | + const markNames = marks.map(m => m.name); |
| 101 | + expect(markNames).toStrictEqual( |
| 102 | + expect.arrayContaining([ |
| 103 | + 'test:outer:start', |
| 104 | + 'test:outer:end', |
| 105 | + 'test:inner:start', |
| 106 | + 'test:inner:end', |
| 107 | + ]), |
| 108 | + ); |
| 109 | + |
| 110 | + // Check all measures exist |
| 111 | + const measureNames = measures.map(m => m.name); |
| 112 | + expect(measureNames).toStrictEqual( |
| 113 | + expect.arrayContaining(['test:outer', 'test:inner']), |
| 114 | + ); |
| 115 | + }); |
| 116 | + |
| 117 | + it('should create markers with proper metadata', () => { |
| 118 | + profiler.marker('test-marker', { |
| 119 | + color: 'warning', |
| 120 | + tooltipText: 'Test marker tooltip', |
| 121 | + properties: [ |
| 122 | + ['event', 'test-event'], |
| 123 | + ['timestamp', Date.now()], |
| 124 | + ], |
| 125 | + }); |
| 126 | + |
| 127 | + const marks = performance.getEntriesByType('mark'); |
| 128 | + expect(marks).toEqual( |
| 129 | + expect.arrayContaining([ |
| 130 | + expect.objectContaining({ |
| 131 | + name: 'test-marker', |
| 132 | + detail: { |
| 133 | + devtools: expect.objectContaining({ |
| 134 | + dataType: 'marker', |
| 135 | + color: 'warning', |
| 136 | + tooltipText: 'Test marker tooltip', |
| 137 | + properties: [ |
| 138 | + ['event', 'test-event'], |
| 139 | + ['timestamp', expect.any(Number)], |
| 140 | + ], |
| 141 | + }), |
| 142 | + }, |
| 143 | + }), |
| 144 | + ]), |
| 145 | + ); |
| 146 | + }); |
| 147 | + |
| 148 | + it('should create proper DevTools payloads for tracks', () => { |
| 149 | + profiler.measure('track-test', () => 'result', { |
| 150 | + success: result => ({ |
| 151 | + properties: [['result', result]], |
| 152 | + tooltipText: 'Track test completed', |
| 153 | + }), |
| 154 | + }); |
| 155 | + |
| 156 | + const measures = performance.getEntriesByType('measure'); |
| 157 | + expect(measures).toEqual( |
| 158 | + expect.arrayContaining([ |
| 159 | + expect.objectContaining({ |
| 160 | + detail: { |
| 161 | + devtools: expect.objectContaining({ |
| 162 | + dataType: 'track-entry', |
| 163 | + track: 'integration-tests', |
| 164 | + color: 'primary', |
| 165 | + properties: [['result', 'result']], |
| 166 | + tooltipText: 'Track test completed', |
| 167 | + }), |
| 168 | + }, |
| 169 | + }), |
| 170 | + ]), |
| 171 | + ); |
| 172 | + }); |
| 173 | + |
| 174 | + it('should merge track defaults with measurement options', () => { |
| 175 | + // Use the sync track from our configuration |
| 176 | + profiler.measure('sync-op', () => 'sync-result', { |
| 177 | + success: result => ({ |
| 178 | + properties: [ |
| 179 | + ['operation', 'sync'], |
| 180 | + ['result', result], |
| 181 | + ], |
| 182 | + }), |
| 183 | + }); |
| 184 | + |
| 185 | + const measures = performance.getEntriesByType('measure'); |
| 186 | + expect(measures).toEqual( |
| 187 | + expect.arrayContaining([ |
| 188 | + expect.objectContaining({ |
| 189 | + detail: { |
| 190 | + devtools: expect.objectContaining({ |
| 191 | + dataType: 'track-entry', |
| 192 | + track: 'integration-tests', // default track |
| 193 | + color: 'primary', // default color |
| 194 | + properties: [ |
| 195 | + ['operation', 'sync'], |
| 196 | + ['result', 'sync-result'], |
| 197 | + ], |
| 198 | + }), |
| 199 | + }, |
| 200 | + }), |
| 201 | + ]), |
| 202 | + ); |
| 203 | + }); |
| 204 | + |
| 205 | + it('should mark errors with red color in DevTools', () => { |
| 206 | + const error = new Error('Test error'); |
| 207 | + |
| 208 | + expect(() => { |
| 209 | + profiler.measure('error-test', () => { |
| 210 | + throw error; |
| 211 | + }); |
| 212 | + }).toThrow(error); |
| 213 | + |
| 214 | + const measures = performance.getEntriesByType('measure'); |
| 215 | + expect(measures).toEqual( |
| 216 | + expect.arrayContaining([ |
| 217 | + expect.objectContaining({ |
| 218 | + detail: { |
| 219 | + devtools: expect.objectContaining({ |
| 220 | + color: 'error', |
| 221 | + properties: expect.arrayContaining([ |
| 222 | + ['Error Type', 'Error'], |
| 223 | + ['Error Message', 'Test error'], |
| 224 | + ]), |
| 225 | + }), |
| 226 | + }, |
| 227 | + }), |
| 228 | + ]), |
| 229 | + ); |
| 230 | + }); |
| 231 | + |
| 232 | + it('should include error metadata in DevTools properties', () => { |
| 233 | + const customError = new TypeError('Custom type error'); |
| 234 | + |
| 235 | + expect(() => { |
| 236 | + profiler.measure('custom-error-test', () => { |
| 237 | + throw customError; |
| 238 | + }); |
| 239 | + }).toThrow(customError); |
| 240 | + |
| 241 | + const measures = performance.getEntriesByType('measure'); |
| 242 | + expect(measures).toEqual( |
| 243 | + expect.arrayContaining([ |
| 244 | + expect.objectContaining({ |
| 245 | + detail: { |
| 246 | + devtools: expect.objectContaining({ |
| 247 | + properties: expect.arrayContaining([ |
| 248 | + ['Error Type', 'TypeError'], |
| 249 | + ['Error Message', 'Custom type error'], |
| 250 | + ]), |
| 251 | + }), |
| 252 | + }, |
| 253 | + }), |
| 254 | + ]), |
| 255 | + ); |
| 256 | + }); |
| 257 | + |
| 258 | + it('should not create performance entries when disabled', async () => { |
| 259 | + const disabledProfiler = new Profiler({ |
| 260 | + prefix: 'disabled', |
| 261 | + track: 'disabled-tests', |
| 262 | + color: 'primary', |
| 263 | + tracks: {}, |
| 264 | + enabled: false, |
| 265 | + }); |
| 266 | + |
| 267 | + // Test sync measurement |
| 268 | + const syncResult = disabledProfiler.measure('disabled-sync', () => 'sync'); |
| 269 | + expect(syncResult).toBe('sync'); |
| 270 | + |
| 271 | + // Test async measurement |
| 272 | + const asyncResult = disabledProfiler.measureAsync( |
| 273 | + 'disabled-async', |
| 274 | + async () => 'async', |
| 275 | + ); |
| 276 | + await expect(asyncResult).resolves.toBe('async'); |
| 277 | + |
| 278 | + // Test marker |
| 279 | + disabledProfiler.marker('disabled-marker'); |
| 280 | + |
| 281 | + // Verify no performance entries were created |
| 282 | + expect(performance.getEntriesByType('mark')).toHaveLength(0); |
| 283 | + expect(performance.getEntriesByType('measure')).toHaveLength(0); |
| 284 | + }); |
| 285 | +}); |
0 commit comments