|
| 1 | +import { describe, it, after } from 'node:test'; |
| 2 | +import { crawlSpecs } from "../src/lib/specs-crawler.js"; |
| 3 | +import mockServer from '../src/lib/mock-server.js'; |
| 4 | +import fs from "node:fs"; |
| 5 | +import path from "node:path"; |
| 6 | +import os from "node:os"; |
| 7 | +import assert from "node:assert"; |
| 8 | +import process from 'node:process'; |
| 9 | +import { fileURLToPath } from 'node:url'; |
| 10 | +import { loadJSON } from "../src/lib/util.js"; |
| 11 | + |
| 12 | +import packageConfig from '../package.json' with { type: 'json' }; |
| 13 | +const reffyVersion = packageConfig.version; |
| 14 | + |
| 15 | +const scriptPath = path.dirname(fileURLToPath(import.meta.url)); |
| 16 | + |
| 17 | +const specs = [ |
| 18 | + {url: "https://www.w3.org/TR/WOFF2/", nightly: {url: "https://w3c.github.io/woff/woff2/", pages:["https://w3c.github.io/woff/woff2/page.html"]}}, |
| 19 | + {url: "https://www.w3.org/TR/audio-output/", nightly: {url: "https://w3c.github.io/mediacapture-output/"}}, |
| 20 | + {url: "https://www.w3.org/TR/accelerometer/", nightly: {url: "https://w3c.github.io/accelerometer/"}} |
| 21 | +]; |
| 22 | + |
| 23 | +async function crawl() { |
| 24 | + const results = await crawlSpecs(specs, { forceLocalFetch: true }); |
| 25 | + // to avoid reporting bogus diff on updated date |
| 26 | + results.forEach(s => delete s.date); |
| 27 | + return results; |
| 28 | +} |
| 29 | + |
| 30 | +async function runWithAnnotatedCrawlData(path, fn) { |
| 31 | + const rawCrawlData = fs.readFileSync(path); |
| 32 | + let crawlData = JSON.parse(rawCrawlData); |
| 33 | + crawlData.crawler = `reffy-${reffyVersion}`; |
| 34 | + fs.writeFileSync(path, JSON.stringify(crawlData)); |
| 35 | + const res = await fn(); |
| 36 | + fs.writeFileSync(path, rawCrawlData); |
| 37 | + return res; |
| 38 | +} |
| 39 | +if (process.argv.includes('--update-fixture')) { |
| 40 | + // when called with --update-fixture, we update the fixture file used for comparison |
| 41 | + (async function () { |
| 42 | + const results = await crawl(); |
| 43 | + fs.writeFileSync(scriptPath + "/crawl-test.json", JSON.stringify(results, null, 2), "utf-8"); |
| 44 | + })().catch(err => { |
| 45 | + console.error(err); |
| 46 | + process.exit(2); |
| 47 | + }); |
| 48 | +} |
| 49 | + |
| 50 | + |
| 51 | +describe("The crawler", {timeout: 60000}, function () { |
| 52 | + it("runs without errors on a small sample of specs", async () => { |
| 53 | + const refResults = JSON.parse(fs.readFileSync(scriptPath + "/crawl-test.json", "utf-8")); |
| 54 | + const results = await crawl(); |
| 55 | + for (const result of results) { |
| 56 | + if (result?.ids?.length) { |
| 57 | + result.ids = result.ids.filter(id => !id.match(/\#respec\-/)); |
| 58 | + } |
| 59 | + } |
| 60 | + for (let i in refResults) { |
| 61 | + for (let prop in refResults[i]) { |
| 62 | + assert.deepEqual(results[i][prop], refResults[i][prop], |
| 63 | + `Unexpected crawl results for ${refResults[i].url} (property "${prop}")`); |
| 64 | + } |
| 65 | + assert.deepEqual(results[i], refResults[i], |
| 66 | + `Unexpected properties found in crawl result for ${refResults[i].url}`); |
| 67 | + } |
| 68 | + }); |
| 69 | + |
| 70 | + it("supports 'file' URLs", async () => { |
| 71 | + const fileurl = (new URL('crawl-spec.html', `file://${scriptPath}/`)).href; |
| 72 | + const results = await crawlSpecs([{ |
| 73 | + url: fileurl, |
| 74 | + nightly: { url: fileurl } |
| 75 | + }], { forceLocalFetch: true }); |
| 76 | + assert.equal(results[0].title, 'A test spec'); |
| 77 | + }); |
| 78 | + |
| 79 | + it("matches spec shortnames", async () => { |
| 80 | + const output = fs.mkdtempSync(path.join(os.tmpdir(), 'reffy-')); |
| 81 | + const refResults = JSON.parse(fs.readFileSync(scriptPath + "/crawl-test.json", "utf-8")) |
| 82 | + .find(res => res.url === 'https://www.w3.org/TR/accelerometer/'); |
| 83 | + await crawlSpecs({ |
| 84 | + specs: ['accelerometer'], |
| 85 | + output: output, |
| 86 | + forceLocalFetch: true |
| 87 | + }); |
| 88 | + const results = await loadJSON(path.resolve(output, 'index.json')); |
| 89 | + assert.equal(results.results[0].title, 'Accelerometer'); |
| 90 | + }); |
| 91 | + |
| 92 | + it("matches spec series shortnames", async () => { |
| 93 | + const output = fs.mkdtempSync(path.join(os.tmpdir(), 'reffy-')); |
| 94 | + await crawlSpecs({ |
| 95 | + specs: ['pointerlock'], |
| 96 | + output: output, |
| 97 | + forceLocalFetch: true |
| 98 | + }); |
| 99 | + const results = await loadJSON(path.resolve(output, 'index.json')); |
| 100 | + assert.equal(results.results[0].url, 'https://www.w3.org/TR/pointerlock-2/'); |
| 101 | + }); |
| 102 | + |
| 103 | + it("interprets filenames relative to the current folder", async () => { |
| 104 | + const output = fs.mkdtempSync(path.join(os.tmpdir(), 'reffy-')); |
| 105 | + await crawlSpecs({ |
| 106 | + specs: [path.join(path.relative(process.cwd(), scriptPath), 'crawl-spec.html')], |
| 107 | + output: output, |
| 108 | + forceLocalFetch: true |
| 109 | + }); |
| 110 | + const results = await loadJSON(path.resolve(output, 'index.json')); |
| 111 | + assert.equal(results.results[0].title, 'A test spec'); |
| 112 | + }); |
| 113 | + |
| 114 | + it("skips processing and reuse fallback data when spec cache info indicates it has not changed", async () => { |
| 115 | + const url = "https://www.w3.org/TR/ididnotchange/"; |
| 116 | + const fallback = path.resolve(scriptPath, 'crawl-cache.json'); |
| 117 | + const results = await runWithAnnotatedCrawlData(fallback, async () => crawlSpecs( |
| 118 | + [{ url, nightly: { url } }], |
| 119 | + { |
| 120 | + forceLocalFetch: true, |
| 121 | + fallback |
| 122 | + })); |
| 123 | + assert.equal(results[0].title, "Change is the only constant"); |
| 124 | + assert.ifError(results[0].error); |
| 125 | + assert.equal(results[0].refs, "A useful list of refs"); |
| 126 | + }); |
| 127 | + |
| 128 | + it("does not return cache info when a redirection took place", async () => { |
| 129 | + const url = "https://www.w3.org/TR/iredirect/"; |
| 130 | + const results = await crawlSpecs( |
| 131 | + [{ url, nightly: { url } }], |
| 132 | + { forceLocalFetch: true }); |
| 133 | + assert.equal(results[0].title, "Recently updated"); |
| 134 | + assert.equal(results[0].crawlCacheInfo, undefined); |
| 135 | + }) |
| 136 | + |
| 137 | + it("reports HTTP error statuses", async () => { |
| 138 | + const url = "https://www.w3.org/TR/idontexist/"; |
| 139 | + const results = await crawlSpecs( |
| 140 | + [{ url, nightly: { url } }], |
| 141 | + { forceLocalFetch: true }); |
| 142 | + assert.equal(results[0].title, "[Could not be determined, see error]"); |
| 143 | + assert(results[0].error.includes("Loading https://www.w3.org/TR/idontexist/ triggered HTTP status 404")); |
| 144 | + }); |
| 145 | + |
| 146 | + it("reports errors and returns fallback data when possible", async () => { |
| 147 | + const url = "https://www.w3.org/TR/idontexist/"; |
| 148 | + const fallback = path.resolve(scriptPath, 'crawl-fallback.json'); |
| 149 | + const results = await crawlSpecs( |
| 150 | + [{ url, nightly: { url } }], |
| 151 | + { |
| 152 | + forceLocalFetch: true, |
| 153 | + fallback |
| 154 | + }); |
| 155 | + assert.equal(results[0].title, "On the Internet, nobody knows you don't exist"); |
| 156 | + assert(results[0].error.includes("Loading https://www.w3.org/TR/idontexist/ triggered HTTP status 404")); |
| 157 | + assert.equal(results[0].refs, "A useful list of refs"); |
| 158 | + }); |
| 159 | + |
| 160 | + it("saves fallback extracts in target folder", async () => { |
| 161 | + const output = fs.mkdtempSync(path.join(os.tmpdir(), "reffy-")); |
| 162 | + const url = "https://www.w3.org/TR/idontexist/"; |
| 163 | + await crawlSpecs({ |
| 164 | + specs: [{ url, nightly: { url } }], |
| 165 | + output: output, |
| 166 | + forceLocalFetch: true, |
| 167 | + fallback: path.resolve(scriptPath, "crawl-fallback.json") |
| 168 | + }); |
| 169 | + const results = await loadJSON(path.resolve(output, "index.json")); |
| 170 | + assert.equal(results.results[0].url, "https://www.w3.org/TR/idontexist/"); |
| 171 | + assert(results.results[0].error.includes("Loading https://www.w3.org/TR/idontexist/ triggered HTTP status 404")); |
| 172 | + assert.equal(results.results[0].refs, "refs/idontexist.json"); |
| 173 | + const refs = await loadJSON(path.resolve(output, "refs", "idontexist.json")); |
| 174 | + assert.equal(refs.refs, "A useful list of refs"); |
| 175 | + }); |
| 176 | + |
| 177 | + it("reports draft CSS server issues", async () => { |
| 178 | + const url = "https://drafts.csswg.org/server-hiccup/"; |
| 179 | + const results = await crawlSpecs( |
| 180 | + [{ url, nightly: { url } }], |
| 181 | + { forceLocalFetch: true }); |
| 182 | + assert.equal(results[0].title, "[Could not be determined, see error]"); |
| 183 | + assert(results[0].error.includes("CSS server issue detected")); |
| 184 | + }); |
| 185 | + |
| 186 | + it("crawls the published spec when `--release` is set", async () => { |
| 187 | + const url = "https://www.w3.org/TR/remote-playback/"; |
| 188 | + const results = await crawlSpecs( |
| 189 | + [{ url, nightly: { url: 'https://w3c.github.io/idontexist' }, release: { url } }], |
| 190 | + { publishedVersion: true, forceLocalFetch: true }); |
| 191 | + assert.equal(results[0].title, "Published version"); |
| 192 | + }); |
| 193 | + |
| 194 | + it("skips non-published specs when `--release` is set", async () => { |
| 195 | + const url = "https://w3c.github.io/non-published/"; |
| 196 | + const results = await crawlSpecs( |
| 197 | + [{ url, nightly: { url } }], |
| 198 | + { publishedVersion: true }); |
| 199 | + assert.equal(results.length, 0); |
| 200 | + }); |
| 201 | + |
| 202 | + it("does not attempt to crawl specs without a nightly URL", async () => { |
| 203 | + const url = "https://www.iso.org/standard/85253.html"; |
| 204 | + const results = await crawlSpecs( |
| 205 | + [{ url }], |
| 206 | + { forceLocalFetch: true }); |
| 207 | + assert.deepStrictEqual(results[0], { url, versions: [url] }); |
| 208 | + }); |
| 209 | + |
| 210 | + after(() => { |
| 211 | + if (mockServer.pendingInterceptors().length > 0) { |
| 212 | + throw new Error("Additional network requests expected on:\n- " + mockServer.pendingInterceptors().map(miss => miss.origin + miss.path).join('\n- ')); |
| 213 | + } |
| 214 | + }); |
| 215 | +}); |
0 commit comments