11#!/usr/bin/env node
2+ import { createReadStream } from 'fs' ;
23import { promises as fs } from 'fs' ;
34import path from 'path' ;
45import process from 'process' ;
6+ import crypto from 'crypto' ;
7+ import YAML from 'yaml' ;
58
69function parseArgs ( argv ) {
710 const args = { } ;
@@ -162,6 +165,21 @@ async function normaliseReleaseAsset(filePath) {
162165 return { filePath : targetPath , fileName : normalisedName } ;
163166}
164167
168+ async function computeSha512 ( filePath ) {
169+ return new Promise ( ( resolve , reject ) => {
170+ const hash = crypto . createHash ( 'sha512' ) ;
171+ const stream = createReadStream ( filePath ) ;
172+ stream . on ( 'data' , ( chunk ) => hash . update ( chunk ) ) ;
173+ stream . on ( 'error' , reject ) ;
174+ stream . on ( 'end' , ( ) => resolve ( hash . digest ( 'base64' ) ) ) ;
175+ } ) ;
176+ }
177+
178+ async function getFileSize ( filePath ) {
179+ const stats = await fs . stat ( filePath ) ;
180+ return stats . size ;
181+ }
182+
165183async function collectAssets ( artifactRoot ) {
166184 const releaseAssets = [ ] ;
167185 const updateSupportFiles = [ ] ;
@@ -191,12 +209,20 @@ async function collectAssets(artifactRoot) {
191209 if ( ! isReleaseAsset ( fileName ) ) {
192210 continue ;
193211 }
212+
213+ const [ sha512 , size ] = await Promise . all ( [
214+ computeSha512 ( filePath ) ,
215+ getFileSize ( filePath ) ,
216+ ] ) ;
217+
194218 releaseAssets . push ( {
195219 platform,
196220 arch,
197221 fileName,
198222 filePath,
199223 format : detectFormat ( fileName ) ,
224+ sha512,
225+ size,
200226 } ) ;
201227 }
202228 }
@@ -218,6 +244,117 @@ async function collectAssets(artifactRoot) {
218244 return { releaseAssets, updateSupportFiles } ;
219245}
220246
247+ async function updateMetadataFiles ( metadataFiles , releaseAssets ) {
248+ if ( metadataFiles . length === 0 ) {
249+ return ;
250+ }
251+
252+ const assetByName = new Map ( releaseAssets . map ( ( asset ) => [ asset . fileName , asset ] ) ) ;
253+
254+ const ensureEntryMatchesAsset = ( entry ) => {
255+ if ( ! entry ) {
256+ return false ;
257+ }
258+ const key = entry . url || entry . path ;
259+ if ( ! key ) {
260+ return false ;
261+ }
262+ const asset = assetByName . get ( key ) ;
263+ if ( ! asset ) {
264+ return false ;
265+ }
266+ let changed = false ;
267+ if ( entry . url && entry . url !== asset . fileName ) {
268+ entry . url = asset . fileName ;
269+ changed = true ;
270+ }
271+ if ( entry . path && entry . path !== asset . fileName ) {
272+ entry . path = asset . fileName ;
273+ changed = true ;
274+ }
275+ if ( typeof asset . size === 'number' && entry . size !== asset . size ) {
276+ entry . size = asset . size ;
277+ changed = true ;
278+ }
279+ if ( asset . sha512 && entry . sha512 !== asset . sha512 ) {
280+ entry . sha512 = asset . sha512 ;
281+ changed = true ;
282+ }
283+ return changed ;
284+ } ;
285+
286+ for ( const metadataPath of metadataFiles ) {
287+ let parsed ;
288+ try {
289+ const source = await fs . readFile ( metadataPath , 'utf8' ) ;
290+ parsed = YAML . parse ( source ) ;
291+ } catch ( error ) {
292+ console . warn ( `Failed to parse auto-update metadata at ${ metadataPath } :` , error ) ;
293+ continue ;
294+ }
295+
296+ if ( ! parsed || typeof parsed !== 'object' ) {
297+ continue ;
298+ }
299+
300+ let changed = false ;
301+
302+ if ( Array . isArray ( parsed . files ) ) {
303+ for ( const entry of parsed . files ) {
304+ if ( ensureEntryMatchesAsset ( entry ) ) {
305+ changed = true ;
306+ }
307+ }
308+ }
309+
310+ const primaryKey =
311+ ( typeof parsed . path === 'string' && parsed . path ) ||
312+ ( Array . isArray ( parsed . files ) && parsed . files [ 0 ] && ( parsed . files [ 0 ] . path || parsed . files [ 0 ] . url ) ) ;
313+
314+ if ( primaryKey ) {
315+ const asset = assetByName . get ( primaryKey ) ;
316+ if ( asset ) {
317+ if ( parsed . path !== asset . fileName ) {
318+ parsed . path = asset . fileName ;
319+ changed = true ;
320+ }
321+ if ( asset . sha512 && parsed . sha512 !== asset . sha512 ) {
322+ parsed . sha512 = asset . sha512 ;
323+ changed = true ;
324+ }
325+ if ( typeof asset . size === 'number' && parsed . size !== undefined && parsed . size !== asset . size ) {
326+ parsed . size = asset . size ;
327+ changed = true ;
328+ }
329+ }
330+ }
331+
332+ if ( ! parsed . sha512 && parsed . path && assetByName . has ( parsed . path ) ) {
333+ parsed . sha512 = assetByName . get ( parsed . path ) . sha512 ;
334+ changed = true ;
335+ }
336+
337+ if ( ! Array . isArray ( parsed . files ) && parsed . path && assetByName . has ( parsed . path ) ) {
338+ parsed . files = [
339+ {
340+ url : parsed . path ,
341+ sha512 : parsed . sha512 ,
342+ size : assetByName . get ( parsed . path ) . size ,
343+ } ,
344+ ] ;
345+ changed = true ;
346+ }
347+
348+ if ( ! changed ) {
349+ continue ;
350+ }
351+
352+ const serialised = YAML . stringify ( parsed , { lineWidth : 0 } ) . trimEnd ( ) ;
353+ await fs . writeFile ( metadataPath , `${ serialised } \n` , 'utf8' ) ;
354+ console . log ( `Updated auto-update metadata checksums in ${ metadataPath } ` ) ;
355+ }
356+ }
357+
221358function buildDownloadTable ( entries , repo , tag ) {
222359 const header = [ '| Platform | Architecture | Format | Download |' , '| --- | --- | --- | --- |' ] ;
223360 const rows = entries . map ( ( entry ) => {
@@ -244,6 +381,7 @@ async function main() {
244381 const body = entryLines . join ( '\n' ) . trim ( ) ;
245382
246383 const { releaseAssets, updateSupportFiles } = await collectAssets ( artifactRoot ) ;
384+ await updateMetadataFiles ( updateSupportFiles , releaseAssets ) ;
247385 const table = buildDownloadTable ( releaseAssets , repository , tag ) ;
248386
249387 const sections = [ `# DocForge v${ version } ` ] ;
0 commit comments