Skip to content

Commit 5b19225

Browse files
committed
fix(docs): fix umd import, update readme, bump deps
PATCH
1 parent 50c94b3 commit 5b19225

File tree

3 files changed

+616
-379
lines changed

3 files changed

+616
-379
lines changed

README.md

Lines changed: 31 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@ The reports include trend history support, allowing you to track performance imp
1414

1515
Examples of usage with Playwright + Lighthouse (and Storybook).
1616

17+
`npm i -D lighthouse playwright-lighthouse lighthouse-reporting`
18+
1719
### In your frontend or testing framework
1820
The example of usage [playwright](https://github.com/microsoft/playwright) and [playwright-lighthouse](https://github.com/abhinaba-ghosh/playwright-lighthouse) together.
1921

@@ -38,9 +40,13 @@ const lighthousePages = [
3840
]
3941

4042
lighthousePages.forEach(({ name, po, thresholds, swimlanes }) => {
41-
playwrightLighthouseTest(name, async ({ port, baseURL }) => {
43+
playwrightLighthouseTest(name, async ({ context, port, baseURL }) => {
4244
const onlyCategories = ['accessibility', 'seo', 'performance']
4345

46+
// make sure to acess context or page at least once
47+
// to let playwright initialize context!
48+
context // this is enough to make the test work
49+
4450
const result: LighthouseResult = await playAudit({
4551
url: baseURL + po.getPath('123'),
4652
port,
@@ -97,8 +103,9 @@ import {
97103

98104
playwrightLighthouseTest.setTimeout(60000)
99105
const lhScoresDir = path.join(process.cwd(), process.env.LH_SCORES_DIR || 'lh-scores')
100-
const reportDir = path.join(process.cwd(), process.env.LH_REPORT_DIR || 'lighthouse')
101-
const htmlFilePath = path.join(reportDir, 'index.html')
106+
const csvReportDir = path.join(process.cwd(), process.env.LH_CSV_REPORT_DIR || 'lighthouse')
107+
const htmlReportDir = path.join(process.cwd(), process.env.LH_REPORT_DIR || 'lighthouse')
108+
const htmlFilePath = path.join(htmlReportDir, 'index.html')
102109

103110
// use stories.json instead of index.json for storybook v6
104111
// make sure to set buildStoriesJson to true in storybook main.js feature section
@@ -126,19 +133,24 @@ const runLighthouse = async (story: StorybookIndexStory, context: BrowserContext
126133
const thresholds = { accessibility: 100 }
127134
const name = story.id
128135

129-
const page = context.pages()[0]
130-
await page.goto(`/iframe.html?id=${story.id}`)
136+
// make sure to acess context or page at least once
137+
// to let playwright initialize context!
138+
context // this is enough to make the test work
139+
// or manually open the page
140+
// const page = context.pages()[0]
141+
// await page.goto(`/iframe.html?id=${story.id}`)
131142

132143
const result: LighthouseResult = await playAudit({
133144
url: baseURL + `/iframe.html?id=${story.id}`,
145+
// page, // alternatevely, path the page instead of the `url`
134146
port,
135147
thresholds,
136148
reports: {
137149
formats: {
138150
html: true,
139151
},
140152
name,
141-
directory: reportDir,
153+
directory: htmlReportDir,
142154
},
143155
opts: {
144156
onlyCategories,
@@ -149,7 +161,7 @@ const runLighthouse = async (story: StorybookIndexStory, context: BrowserContext
149161
})
150162

151163
const scores = getScores(result)
152-
await writeCsvResult(reportDir, name, scores, thresholds)
164+
await writeCsvResult(csvReportDir, name, scores, thresholds)
153165
await writeHtmlListEntryWithRetry(htmlFilePath, name, scores, thresholds, result.comparisonError)
154166
// write score results in JSON, allows generating the Average csv report
155167
await writeScoresToJson(lhScoresDir, name, scores, result)
@@ -167,7 +179,8 @@ const runLighthouse = async (story: StorybookIndexStory, context: BrowserContext
167179
import { PlaywrightTestConfig } from '@playwright/test'
168180

169181
const baseURL = 'http://127.0.0.1:6009'
170-
// process.env.LH_REPORT_DIR = 'lighthouse-storybook' // adjust lighthouse output folder if required
182+
// process.env.LH_REPORT_DIR = 'lighthouse-html' // adjust lighthouse output folder if required
183+
// process.env.LH_CSV_REPORT_DIR = 'lighthouse-csv' // adjust lighthouse csv report folder if required
171184
// process.env.LH_SCORES_DIR = 'lh-scores' // to write and store scores in json format or write average report
172185

173186

@@ -217,9 +230,16 @@ export default config
217230
<summary>global-setup.ts</summary>
218231

219232
```ts
233+
import path from 'path'
234+
import fs from 'fs/promises'
220235
import { lighthouseSetup } from 'lighthouse-reporting'
221236

237+
const lhScoresDir = path.join(process.cwd(), process.env.LH_SCORES_DIR || 'lh-scores')
238+
const csvReportDir = path.join(process.cwd(), process.env.LH_CSV_REPORT_DIR || 'lighthouse')
239+
222240
async function globalSetup() {
241+
await fs.mkdir(lhScoresDir, { recursive: true })
242+
await fs.mkdir(csvReportDir, { recursive: true })
223243
await lighthouseSetup()
224244
}
225245

@@ -232,14 +252,15 @@ export default globalSetup
232252
<summary>global-teardown.ts</summary>
233253

234254
```ts
255+
import path from 'path'
235256
import { lighthousePlaywrightTeardown, buildAverageCsv } from 'lighthouse-reporting'
236257

237258
const lhScoresDir = path.join(process.cwd(), process.env.LH_SCORES_DIR || 'lh-scores')
238-
const reportDir = path.join(process.cwd(), 'lighthouse')
259+
const csvReportDir = path.join(process.cwd(), process.env.LH_CSV_REPORT_DIR || 'lighthouse')
239260

240261
async function globalTeardown() {
241262
await lighthousePlaywrightTeardown()
242-
await buildAverageCsv(lhScoresDir, reportDir)
263+
await buildAverageCsv(lhScoresDir, csvReportDir)
243264
}
244265

245266
export default globalTeardown

0 commit comments

Comments
 (0)