11#!/usr/bin/env node
2+ import { createReadStream } from 'fs' ;
23import { promises as fs } from 'fs' ;
34import path from 'path' ;
45import process from 'process' ;
6+ import crypto from 'crypto' ;
7+ import YAML from 'yaml' ;
58
69function parseArgs ( argv ) {
710 const args = { } ;
@@ -115,6 +118,9 @@ function isReleaseAsset(filename) {
115118}
116119
117120function isAutoUpdateSupportFile ( filename ) {
121+ if ( filename === 'builder-debug.yml' ) {
122+ return false ;
123+ }
118124 if ( filename . endsWith ( '.blockmap' ) ) {
119125 return true ;
120126 }
@@ -153,13 +159,28 @@ async function normaliseReleaseAsset(filePath) {
153159 const fileName = path . basename ( filePath ) ;
154160 const normalisedName = normaliseInstallerFileName ( fileName ) ;
155161 if ( normalisedName === fileName ) {
156- return { filePath, fileName } ;
162+ return { filePath, fileName, originalFileName : fileName } ;
157163 }
158164
159165 const targetPath = path . join ( path . dirname ( filePath ) , normalisedName ) ;
160166 await fs . rename ( filePath , targetPath ) ;
161167 console . log ( `Normalised release asset name: ${ fileName } -> ${ normalisedName } ` ) ;
162- return { filePath : targetPath , fileName : normalisedName } ;
168+ return { filePath : targetPath , fileName : normalisedName , originalFileName : fileName } ;
169+ }
170+
171+ async function computeSha512 ( filePath ) {
172+ return new Promise ( ( resolve , reject ) => {
173+ const hash = crypto . createHash ( 'sha512' ) ;
174+ const stream = createReadStream ( filePath ) ;
175+ stream . on ( 'data' , ( chunk ) => hash . update ( chunk ) ) ;
176+ stream . on ( 'error' , reject ) ;
177+ stream . on ( 'end' , ( ) => resolve ( hash . digest ( 'base64' ) ) ) ;
178+ } ) ;
179+ }
180+
181+ async function getFileSize ( filePath ) {
182+ const stats = await fs . stat ( filePath ) ;
183+ return stats . size ;
163184}
164185
165186async function collectAssets ( artifactRoot ) {
@@ -180,7 +201,7 @@ async function collectAssets(artifactRoot) {
180201 const arch = normaliseArch ( parts . slice ( 1 ) . join ( '-' ) || parts [ 0 ] ) ;
181202 const files = await walkFiles ( path . join ( artifactRoot , dirName ) ) ;
182203 for ( const file of files ) {
183- const { filePath : normalisedPath , fileName : normalisedName } = await normaliseReleaseAsset ( file ) ;
204+ const { filePath : normalisedPath , fileName : normalisedName , originalFileName } = await normaliseReleaseAsset ( file ) ;
184205 const fileName = normalisedName ;
185206 const filePath = normalisedPath ;
186207 if ( isAutoUpdateSupportFile ( fileName ) ) {
@@ -191,12 +212,21 @@ async function collectAssets(artifactRoot) {
191212 if ( ! isReleaseAsset ( fileName ) ) {
192213 continue ;
193214 }
215+
216+ const [ sha512 , size ] = await Promise . all ( [
217+ computeSha512 ( filePath ) ,
218+ getFileSize ( filePath ) ,
219+ ] ) ;
220+
194221 releaseAssets . push ( {
195222 platform,
196223 arch,
197224 fileName,
198225 filePath,
226+ originalFileName,
199227 format : detectFormat ( fileName ) ,
228+ sha512,
229+ size,
200230 } ) ;
201231 }
202232 }
@@ -218,6 +248,130 @@ async function collectAssets(artifactRoot) {
218248 return { releaseAssets, updateSupportFiles } ;
219249}
220250
251+ async function updateMetadataFiles ( metadataFiles , releaseAssets ) {
252+ if ( metadataFiles . length === 0 ) {
253+ return ;
254+ }
255+
256+ const assetByName = new Map ( ) ;
257+ for ( const asset of releaseAssets ) {
258+ assetByName . set ( asset . fileName , asset ) ;
259+ const originalName = asset . originalFileName ;
260+ if ( originalName && originalName !== asset . fileName && ! assetByName . has ( originalName ) ) {
261+ assetByName . set ( originalName , asset ) ;
262+ }
263+ }
264+
265+ const ensureEntryMatchesAsset = ( entry ) => {
266+ if ( ! entry ) {
267+ return false ;
268+ }
269+ const key = entry . url || entry . path ;
270+ if ( ! key ) {
271+ return false ;
272+ }
273+ let asset = assetByName . get ( key ) ;
274+ if ( ! asset ) {
275+ const normalisedKey = normaliseInstallerFileName ( key ) ;
276+ if ( normalisedKey !== key && assetByName . has ( normalisedKey ) ) {
277+ asset = assetByName . get ( normalisedKey ) ;
278+ }
279+ }
280+ if ( ! asset ) {
281+ return false ;
282+ }
283+ let changed = false ;
284+ if ( entry . url && entry . url !== asset . fileName ) {
285+ entry . url = asset . fileName ;
286+ changed = true ;
287+ }
288+ if ( entry . path && entry . path !== asset . fileName ) {
289+ entry . path = asset . fileName ;
290+ changed = true ;
291+ }
292+ if ( typeof asset . size === 'number' && entry . size !== asset . size ) {
293+ entry . size = asset . size ;
294+ changed = true ;
295+ }
296+ if ( asset . sha512 && entry . sha512 !== asset . sha512 ) {
297+ entry . sha512 = asset . sha512 ;
298+ changed = true ;
299+ }
300+ return changed ;
301+ } ;
302+
303+ for ( const metadataPath of metadataFiles ) {
304+ let parsed ;
305+ try {
306+ const source = await fs . readFile ( metadataPath , 'utf8' ) ;
307+ parsed = YAML . parse ( source ) ;
308+ } catch ( error ) {
309+ console . warn ( `Failed to parse auto-update metadata at ${ metadataPath } :` , error ) ;
310+ continue ;
311+ }
312+
313+ if ( ! parsed || typeof parsed !== 'object' ) {
314+ continue ;
315+ }
316+
317+ let changed = false ;
318+
319+ if ( Array . isArray ( parsed . files ) ) {
320+ for ( const entry of parsed . files ) {
321+ if ( ensureEntryMatchesAsset ( entry ) ) {
322+ changed = true ;
323+ }
324+ }
325+ }
326+
327+ const primaryKey =
328+ ( typeof parsed . path === 'string' && parsed . path ) ||
329+ ( Array . isArray ( parsed . files ) && parsed . files [ 0 ] && ( parsed . files [ 0 ] . path || parsed . files [ 0 ] . url ) ) ;
330+
331+ if ( primaryKey ) {
332+ const asset = assetByName . get ( primaryKey ) ;
333+ if ( asset ) {
334+ if ( parsed . path !== asset . fileName ) {
335+ parsed . path = asset . fileName ;
336+ changed = true ;
337+ }
338+ if ( asset . sha512 && parsed . sha512 !== asset . sha512 ) {
339+ parsed . sha512 = asset . sha512 ;
340+ changed = true ;
341+ }
342+ if ( typeof asset . size === 'number' && parsed . size !== undefined && parsed . size !== asset . size ) {
343+ parsed . size = asset . size ;
344+ changed = true ;
345+ }
346+ }
347+ }
348+
349+ if ( ! parsed . sha512 && parsed . path && assetByName . has ( parsed . path ) ) {
350+ parsed . sha512 = assetByName . get ( parsed . path ) . sha512 ;
351+ changed = true ;
352+ }
353+
354+ if ( ! Array . isArray ( parsed . files ) && parsed . path && assetByName . has ( parsed . path ) ) {
355+ parsed . files = [
356+ {
357+ url : parsed . path ,
358+ sha512 : parsed . sha512 ,
359+ size : assetByName . get ( parsed . path ) . size ,
360+ } ,
361+ ] ;
362+ changed = true ;
363+ }
364+
365+ if ( ! changed ) {
366+ continue ;
367+ }
368+
369+ const serialised = YAML . stringify ( parsed , { lineWidth : 0 } ) . trimEnd ( ) ;
370+ await fs . writeFile ( metadataPath , `${ serialised } \n` , 'utf8' ) ;
371+ console . log ( `Updated auto-update metadata checksums in ${ metadataPath } ` ) ;
372+ }
373+ }
374+
221375function buildDownloadTable ( entries , repo , tag ) {
222376 const header = [ '| Platform | Architecture | Format | Download |' , '| --- | --- | --- | --- |' ] ;
223377 const rows = entries . map ( ( entry ) => {
@@ -244,6 +398,7 @@ async function main() {
244398 const body = entryLines . join ( '\n' ) . trim ( ) ;
245399
246400 const { releaseAssets, updateSupportFiles } = await collectAssets ( artifactRoot ) ;
401+ await updateMetadataFiles ( updateSupportFiles , releaseAssets ) ;
247402 const table = buildDownloadTable ( releaseAssets , repository , tag ) ;
248403
249404 const sections = [ `# DocForge v${ version } ` ] ;
0 commit comments