@@ -34,7 +34,7 @@ const src = import.meta.resolve('./src/')
34
34
const randomNumber = Math . floor ( Math . random ( ) * 1000000 )
35
35
36
36
// global config
37
- const website = Deno . env . get ( 'WEBSITE' )
37
+ const WEBSITE = Deno . env . get ( 'WEBSITE' )
38
38
const author = Deno . env . get ( 'AUTHOR' )
39
39
const port = Deno . env . get ( 'PORT' )
40
40
const header = await Deno . readTextFile ( new URL ( './util/header.html' , src ) )
@@ -166,7 +166,7 @@ async function Others() {
166
166
// Handle the RSS
167
167
const rss = new URL ( './feed.xml' , dist )
168
168
const itemsRss = metaData . reduce ( ( acc , { date, title, summary } ) => {
169
- const url = `${ website } posts/${ handleUTC ( date ) } /`
169
+ const url = `${ WEBSITE } posts/${ handleUTC ( date ) } /`
170
170
return acc + `<item>
171
171
<title>${ title } </title>
172
172
<link>${ url } </link>
@@ -180,22 +180,22 @@ async function Others() {
180
180
const itemsSitemap = `<?xml version="1.0" encoding="UTF-8"?>
181
181
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
182
182
${ metaData . reduce ( ( acc , { date } ) =>
183
- `${ acc } <url><loc>${ website } posts/${ handleUTC ( date ) } /</loc></url>` , '' )
183
+ `${ acc } <url><loc>${ WEBSITE } posts/${ handleUTC ( date ) } /</loc></url>` , '' )
184
184
}
185
185
</urlset>`
186
186
187
187
// robots
188
188
const robots = new URL ( './robots.txt' , dist )
189
189
const robotsContent = `User-agent: *
190
190
Allow: /
191
- Sitemap: ${ website } sitemap.xml`
191
+ Sitemap: ${ WEBSITE } sitemap.xml`
192
192
193
193
const files = [
194
- [ rss , getRss ( author , website , itemsRss ) ] ,
194
+ [ rss , getRss ( author , WEBSITE , itemsRss ) ] ,
195
195
[ sitemap , itemsSitemap ] ,
196
196
[ robots , robotsContent ] ,
197
197
]
198
- const g = generateSingleFile ( cname , 'www.fwqaq.us' )
198
+ const g = generateSingleFile ( cname , WEBSITE . slice ( 8 , - 1 ) )
199
199
g . next ( )
200
200
files . forEach ( ( file ) => g . next ( file ) )
201
201
}
0 commit comments