1+ # Accepted status codes - aligned with linkspector config
2+ accept = [200 , 201 , 204 , 304 , 403 , 429 ]
3+
4+ # Cache results to speed up subsequent runs
5+ cache = true
6+
7+ # Maximum number of retries per link
8+ max_retries = 3
9+
10+ # Timeout per request in seconds
11+ timeout = 20
12+
13+ # Maximum number of concurrent network requests
14+ max_concurrency = 16
15+
16+ # Exclude localhost and local URLs
17+ exclude = [
18+ # Localhost URLs from examples
19+ " http://localhost.*" ,
20+ " https://localhost.*" ,
21+
22+ # Relative paths (checked by build process)
23+ " ^/.*" ,
24+
25+ # MySQL documentation (returns 403 for automated requests)
26+ " https://dev.mysql.com/.*" ,
27+ " https://www.mysql.com/.*" ,
28+
29+ # NPM packages (returns 403 for automated requests)
30+ " https://www.npmjs.com/.*" ,
31+
32+ # OpenAI platform (returns 403 for automated requests)
33+ " https://openai.com/.*" ,
34+ " https://platform.openai.com/.*" ,
35+
36+ # Cloudflare (returns 403 for automated requests)
37+ " https://dash.cloudflare.com/.*" ,
38+ " https://playground.ai.cloudflare.com/.*" ,
39+
40+ # CockroachDB blog (certificate issues in CI)
41+ " https://www.cockroachlabs.com/blog/.*" ,
42+ ]
43+
44+ # Exclude email addresses
45+ exclude_mail = true
46+
47+ # Include files with these extensions
48+ include = [" **/*.md" , " **/*.mdx" , " **/*.html" ]
49+
50+ # Use custom headers to avoid bot detection
51+ [headers ]
52+ "User-Agent" = " Mozilla/5.0 (compatible; Link Checker/1.0)"
0 commit comments