Skip to content

Commit c14eccd

Browse files
committed
chore: Run biome fix unsafe
1 parent 618677f commit c14eccd

File tree

11 files changed

+64
-72
lines changed

11 files changed

+64
-72
lines changed

__mocks__/axios.ts

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { resolve } from "path";
1+
import { resolve } from "node:path";
22
import _axios from "axios";
33
/* eslint-env jest */
44
import * as fs from "fs-extra-promise";
@@ -7,8 +7,7 @@ export default async function axios(options: any) {
77
const path = resolve(
88
__dirname,
99
"../__fixtures__",
10-
(options.url || "").replace(/https?:\/\//, "").replace(/\//g, "_") +
11-
".json",
10+
`${(options.url || "").replace(/https?:\/\//, "").replace(/\//g, "_")}.json`,
1211
);
1312
let content;
1413
if (
@@ -18,7 +17,7 @@ export default async function axios(options: any) {
1817
try {
1918
content = JSON.parse((await fs.readFileAsync(path)).toString());
2019
} catch (error) {
21-
if (process.env["DEBUG_MOCKS"]) console.log(error);
20+
if (process.env.DEBUG_MOCKS) console.log(error);
2221
}
2322
}
2423

@@ -29,7 +28,7 @@ export default async function axios(options: any) {
2928
content = {
3029
data: response.data.toString(),
3130
status: response.status,
32-
headers: response.headers || { ["content-type"]: contentType },
31+
headers: response.headers || { "content-type": contentType },
3332
};
3433
if (
3534
!options.url.includes("localhost:") &&

src/__tests__/parser-compat.tests.ts

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,12 @@ describe("Same query should give same output for different parsers", () => {
1818
"title feed_url home_page_url items { id title url date_published author tags }";
1919
const feedUrl = "https://rolflekang.com/feed.xml";
2020

21-
const query =
22-
"query TestQuery {" +
23-
parserKeys
24-
.map(
25-
(key) =>
26-
`${key}: feed(url: "${feedUrl}", parser: ${key}) { ${fields} }`,
27-
)
28-
.join("\n") +
29-
" }";
21+
const query = `query TestQuery {${parserKeys
22+
.map(
23+
(key) =>
24+
`${key}: feed(url: "${feedUrl}", parser: ${key}) { ${fields} }`,
25+
)
26+
.join("\n")} }`;
3027

3128
try {
3229
response = (
@@ -62,7 +59,7 @@ describe("Same query should give same output for different parsers", () => {
6259

6360
for (let i = 0; i < parserKeys.length - 1; i++) {
6461
for (let j = 1; j < parserKeys.length; j++) {
65-
if (parserKeys[i] != parserKeys[j]) {
62+
if (parserKeys[i] !== parserKeys[j]) {
6663
test(`${parserKeys[i]} == ${parserKeys[j]}`, () => {
6764
try {
6865
expect(response.data[parserKeys[i] as ParserKey]).toEqual(

src/__tests__/utils.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import type { Server } from "http";
1+
import type { Server } from "node:http";
22
/* eslint-env jest */
33
import micro from "micro";
44

@@ -44,7 +44,7 @@ export function testGraphqlApi(
4444
"User-Agent": "graphql-test",
4545
"Content-Type": "application/json",
4646
},
47-
data: JSON.stringify({ query: "query TestQuery { " + query + " }" }),
47+
data: JSON.stringify({ query: `query TestQuery { ${query} }` }),
4848
})
4949
).data;
5050
} catch (error: any) {

src/errors.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@ import type { ParserKey } from "./types";
44

55
const debug = _debug("graphql-rss-parser:errors");
66
const development =
7-
!process.env["NODE_ENV"] ||
8-
process.env["NODE_ENV"] === "development" ||
9-
process.env["NODE_ENV"] === "test";
7+
!process.env.NODE_ENV ||
8+
process.env.NODE_ENV === "development" ||
9+
process.env.NODE_ENV === "test";
1010

1111
export class BaseError extends Error {
1212
code: string;

src/handlers/feed.ts

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,23 +19,22 @@ export async function parseFromString({
1919
}): Promise<ParserResponse> {
2020
if (parser) {
2121
return parse(parser, content);
22-
} else {
23-
for (let i = 0; i < parserKeys.length; i++) {
24-
try {
25-
const parserKey: ParserKey | undefined = parserKeys[i];
26-
if (!parserKey) {
27-
continue;
28-
}
29-
return await parse(parserKey, content);
30-
} catch (error) {
31-
if (i < parserKeys.length - 1) {
32-
continue;
33-
}
34-
throw error;
22+
}
23+
for (let i = 0; i < parserKeys.length; i++) {
24+
try {
25+
const parserKey: ParserKey | undefined = parserKeys[i];
26+
if (!parserKey) {
27+
continue;
28+
}
29+
return await parse(parserKey, content);
30+
} catch (error) {
31+
if (i < parserKeys.length - 1) {
32+
continue;
3533
}
34+
throw error;
3635
}
37-
throw new BaseError("No parsers worked", "no-parser");
3836
}
37+
throw new BaseError("No parsers worked", "no-parser");
3938
}
4039

4140
export async function parseFromQuery({

src/handlers/findFeed.ts

Lines changed: 13 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import * as cheerio from "cheerio";
2-
import { CheerioAPI } from "cheerio";
3-
import type { Node, Element } from "domhandler";
2+
import type { CheerioAPI } from "cheerio";
3+
import type { Element, Node } from "domhandler";
44
import normalizeUrl from "normalize-url";
55

66
import { BaseError } from "../errors";
@@ -26,10 +26,7 @@ export function normalizeFeedLink(baseUrl: string, link: string | undefined) {
2626

2727
function mapLinkTagToUrl(normalizedUrl: string) {
2828
return (linkTag: Node | Element) => {
29-
return normalizeFeedLink(
30-
normalizedUrl,
31-
(linkTag as Element).attribs["href"],
32-
);
29+
return normalizeFeedLink(normalizedUrl, (linkTag as Element).attribs.href);
3330
};
3431
}
3532

@@ -52,8 +49,8 @@ async function findJsonFeedsInDom(
5249
return { title, link: url };
5350
} catch (error) {
5451
if (
55-
process.env["NODE_ENV"] !== "production" &&
56-
process.env["NODE_ENV"] !== "test"
52+
process.env.NODE_ENV !== "production" &&
53+
process.env.NODE_ENV !== "test"
5754
) {
5855
console.log(error);
5956
}
@@ -83,8 +80,8 @@ async function findRssFeedsInDom(
8380
return { title, link: url };
8481
} catch (error) {
8582
if (
86-
process.env["NODE_ENV"] !== "production" &&
87-
process.env["NODE_ENV"] !== "test"
83+
process.env.NODE_ENV !== "production" &&
84+
process.env.NODE_ENV !== "test"
8885
) {
8986
console.log(error);
9087
}
@@ -120,8 +117,8 @@ export async function findFeed({
120117
return [{ title, link: normalizedUrl }];
121118
} catch (error) {
122119
if (
123-
process.env["NODE_ENV"] !== "production" &&
124-
process.env["NODE_ENV"] !== "test"
120+
process.env.NODE_ENV !== "production" &&
121+
process.env.NODE_ENV !== "test"
125122
) {
126123
console.log(error);
127124
}
@@ -139,8 +136,8 @@ export async function findFeed({
139136
return [{ title, link: normalizedUrl }];
140137
} catch (error) {
141138
if (
142-
process.env["NODE_ENV"] !== "production" &&
143-
process.env["NODE_ENV"] !== "test"
139+
process.env.NODE_ENV !== "production" &&
140+
process.env.NODE_ENV !== "test"
144141
) {
145142
console.log(error);
146143
}
@@ -154,8 +151,8 @@ export async function findFeed({
154151
return [{ title, link: normalizedUrl }];
155152
} catch (error) {
156153
if (
157-
process.env["NODE_ENV"] !== "production" &&
158-
process.env["NODE_ENV"] !== "test"
154+
process.env.NODE_ENV !== "production" &&
155+
process.env.NODE_ENV !== "test"
159156
) {
160157
console.log(error);
161158
}

src/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,13 @@ export default async function createHandler(options: Options) {
1616
Sentry.init({
1717
dsn: options.sentryDsn,
1818
release: `graphql-rss-parser@${options.version}`,
19-
environment: process.env["NODE_ENV"],
19+
environment: process.env.NODE_ENV,
2020
ignoreErrors: sentryIgnoreErrors,
2121
onFatalError(error: Error) {
2222
// @ts-ignore error does not have response
2323
console.error(error, error.response);
2424
},
25-
debug: process.env["DEBUG_SENTRY"] == "true",
25+
debug: process.env.DEBUG_SENTRY === "true",
2626
});
2727
}
2828

src/parsers/feedme.ts

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Readable } from "stream";
1+
import { Readable } from "node:stream";
22
import _debug from "debug";
33
import FeedMe from "feedme";
44
import type { FeedObject } from "feedme/dist/parser";
@@ -12,11 +12,11 @@ const findHtmlLink = (array: FeedObject[]): string | undefined => {
1212
const link = array.find(
1313
(item) =>
1414
typeof item === "object" &&
15-
item["rel"] === "alternate" &&
16-
item["type"] === "text/html",
15+
item.rel === "alternate" &&
16+
item.type === "text/html",
1717
);
18-
if (typeof link === "object" && typeof link?.["href"] === "string") {
19-
return link?.["href"] || undefined;
18+
if (typeof link === "object" && typeof link?.href === "string") {
19+
return link?.href || undefined;
2020
}
2121
return undefined;
2222
};
@@ -107,23 +107,23 @@ export function parse(feed: string): Promise<ParserResponse> {
107107
try {
108108
resolve({
109109
parser: "FEEDME",
110-
title: unpack(parsed["title"], "text", true, "title"),
111-
description: unpack(parsed["description"], "text"),
112-
home_page_url: evaluateLink(parsed["link"]),
110+
title: unpack(parsed.title, "text", true, "title"),
111+
description: unpack(parsed.description, "text"),
112+
home_page_url: evaluateLink(parsed.link),
113113
feed_url: undefined,
114114
items: parsed.items.map((item): Item => {
115-
const pubDate = unpack(item["pubdate"], "text");
115+
const pubDate = unpack(item.pubdate, "text");
116116
return {
117-
title: unpack(item["title"], "text"),
118-
url: evaluateLink(item["link"]),
119-
id: unpack(item["id"] || item["guid"], "text", true, "id"),
120-
content_html: unpack(item["description"], "text"),
121-
tags: unpackArray(item["category"], "text"),
117+
title: unpack(item.title, "text"),
118+
url: evaluateLink(item.link),
119+
id: unpack(item.id || item.guid, "text", true, "id"),
120+
content_html: unpack(item.description, "text"),
121+
tags: unpackArray(item.category, "text"),
122122
date_published: pubDate
123123
? new Date(pubDate).toISOString()
124124
: pubDate,
125-
authors: unpack(item["author"], "name")
126-
? [{ name: unpack(item["author"], "name") }]
125+
authors: unpack(item.author, "name")
126+
? [{ name: unpack(item.author, "name") }]
127127
: [],
128128
};
129129
}),

src/parsers/feedparser.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { Readable } from "stream";
1+
import { Readable } from "node:stream";
22
import _debug from "debug";
33
import FeedParser from "feedparser";
44

@@ -23,6 +23,7 @@ export function parse(feed: string): Promise<ParserResponse> {
2323
meta = meta || (this.meta as FeedParser.Meta);
2424

2525
let item;
26+
// biome-ignore lint/suspicious/noAssignInExpressions: ...
2627
while ((item = this.read())) {
2728
items.push({
2829
title: item.title,

src/parsers/jsonfeed-v1.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,8 @@ export async function parse(input: string): Promise<ParserResponse> {
2626
const author = (item as JsonFeedV1.Item).author;
2727
delete (item as JsonFeedV1.Item).author;
2828
return { ...item, authors: author ? [author] : [] };
29-
} else {
30-
return item;
3129
}
30+
return item;
3231
}) || [],
3332
};
3433
debug("done transform");

0 commit comments

Comments
 (0)