1
- #!/usr/bin/env node
2
-
3
- 'use strict'
4
-
5
- const fs = require ( 'fs' )
6
- const util = require ( 'util' )
7
- const path = require ( 'path' )
8
- const semver = require ( 'semver' )
9
- const ethUtil = require ( 'ethereumjs-util' )
10
- const ipfsImporter = require ( 'ipfs-unixfs-importer' )
11
- const IPLD = require ( 'ipld' )
12
- const inMemory = require ( 'ipld-in-memory' )
13
- const swarmhash = require ( 'swarmhash' )
14
-
15
1
// This script updates the index files list.js and list.txt in the directories containing binaries,
16
2
// as well as the 'latest' and 'nightly' symlinks/files.
17
3
4
+ import { fileURLToPath } from 'url'
5
+ import { dirname , join } from 'path'
6
+
7
+ import {
8
+ readlinkSync ,
9
+ unlinkSync ,
10
+ symlinkSync ,
11
+ readFileSync ,
12
+ writeFile ,
13
+ readdir ,
14
+ stat ,
15
+ lstat
16
+ } from 'fs'
17
+
18
+ import semver from 'semver'
19
+ import swarmhash from 'swarmhash'
20
+ import { readFile as readFileAsync } from 'node:fs/promises'
21
+ import { keccak , sha256 } from 'ethereumjs-util'
22
+ import { importer } from 'ipfs-unixfs-importer'
23
+ import { MemoryBlockstore } from 'blockstore-core/memory'
24
+
25
+ const __filename = fileURLToPath ( import . meta. url )
26
+ const __dirname = dirname ( __filename )
27
+
18
28
const ipfsHash = async ( content ) => {
19
- const iterator = ipfsImporter . importer ( [ { content } ] , await inMemory ( IPLD ) , { onlyHash : true } )
29
+ const iterator = importer ( [ { content } ] , new MemoryBlockstore ( ) , { onlyHash : true } )
20
30
const { value, done } = await iterator . next ( )
21
31
if ( done ) {
22
32
throw new Error ( 'Failed to calculate an IPFS hash.' )
23
33
}
24
-
25
34
await iterator . return ( )
26
35
return value . cid . toString ( )
27
36
}
@@ -40,14 +49,14 @@ if (typeof(module) !== 'undefined')
40
49
}
41
50
42
51
function updateSymlinkSync ( linkPathRelativeToRoot , targetRelativeToLink ) {
43
- const absoluteLinkPath = path . join ( __dirname , linkPathRelativeToRoot )
52
+ const absoluteLinkPath = join ( __dirname , linkPathRelativeToRoot )
44
53
let linkString
45
54
46
55
try {
47
- linkString = fs . readlinkSync ( absoluteLinkPath )
56
+ linkString = readlinkSync ( absoluteLinkPath )
48
57
49
58
if ( targetRelativeToLink !== linkString ) {
50
- fs . unlinkSync ( absoluteLinkPath )
59
+ unlinkSync ( absoluteLinkPath )
51
60
console . log ( 'Removed link ' + linkPathRelativeToRoot + ' -> ' + linkString )
52
61
}
53
62
} catch ( err ) {
@@ -57,29 +66,29 @@ function updateSymlinkSync (linkPathRelativeToRoot, targetRelativeToLink) {
57
66
}
58
67
59
68
if ( targetRelativeToLink !== linkString ) {
60
- fs . symlinkSync ( targetRelativeToLink , absoluteLinkPath , 'file' )
69
+ symlinkSync ( targetRelativeToLink , absoluteLinkPath , 'file' )
61
70
console . log ( 'Created link ' + linkPathRelativeToRoot + ' -> ' + targetRelativeToLink )
62
71
}
63
72
}
64
73
65
74
function updateCopy ( srcRelativeToRoot , destRelativeToRoot ) {
66
- fs . readFile ( path . join ( __dirname , srcRelativeToRoot ) , function ( err , data ) {
75
+ readFileSync ( join ( __dirname , srcRelativeToRoot ) , function ( err , data ) {
67
76
if ( err ) {
68
77
throw err
69
78
}
70
79
71
- const absoluteDest = path . join ( __dirname , destRelativeToRoot )
72
- fs . stat ( absoluteDest , function ( err , stats ) {
80
+ const absoluteDest = join ( __dirname , destRelativeToRoot )
81
+ stat ( absoluteDest , function ( err , stats ) {
73
82
if ( err && err . code !== 'ENOENT' ) {
74
83
throw err
75
84
}
76
85
77
86
// If the target is a symlink, we want to replace it with a copy rather than overwrite the file it links to
78
87
if ( ! err && stats . isSymbolicLink ( ) ) {
79
- fs . unlinkSync ( absoluteDest )
88
+ unlinkSync ( absoluteDest )
80
89
}
81
90
82
- fs . writeFile ( absoluteDest , data , function ( err ) {
91
+ writeFile ( absoluteDest , data , function ( err ) {
83
92
if ( err ) {
84
93
throw err
85
94
}
@@ -90,16 +99,16 @@ function updateCopy (srcRelativeToRoot, destRelativeToRoot) {
90
99
}
91
100
92
101
function deleteIfExists ( filePathRelativeToRoot ) {
93
- const absoluteFilePath = path . join ( __dirname , filePathRelativeToRoot )
102
+ const absoluteFilePath = join ( __dirname , filePathRelativeToRoot )
94
103
95
- fs . lstat ( absoluteFilePath , function ( err , stats ) {
104
+ lstat ( absoluteFilePath , function ( err , stats ) {
96
105
if ( err && err . code !== 'ENOENT' ) {
97
106
throw err
98
107
}
99
108
100
109
if ( ! err ) {
101
110
console . log ( 'Deleted ' + filePathRelativeToRoot )
102
- fs . unlinkSync ( absoluteFilePath )
111
+ unlinkSync ( absoluteFilePath )
103
112
}
104
113
} )
105
114
}
@@ -116,8 +125,8 @@ function buildVersion (build) {
116
125
}
117
126
118
127
async function makeEntry ( dir , parsedFileName , oldList ) {
119
- const pathRelativeToRoot = path . join ( dir , parsedFileName [ 0 ] )
120
- const absolutePath = path . join ( __dirname , pathRelativeToRoot )
128
+ const pathRelativeToRoot = join ( dir , parsedFileName [ 0 ] )
129
+ const absolutePath = join ( __dirname , pathRelativeToRoot )
121
130
122
131
const build = {
123
132
path : parsedFileName [ 0 ] ,
@@ -141,11 +150,10 @@ async function makeEntry (dir, parsedFileName, oldList) {
141
150
}
142
151
143
152
if ( ! build . sha256 || ! build . keccak256 || ! build . urls || build . urls . length !== 2 ) {
144
- const readFile = util . promisify ( fs . readFile )
145
- const fileContent = await readFile ( absolutePath )
146
- build . keccak256 = '0x' + ethUtil . keccak ( fileContent ) . toString ( 'hex' )
153
+ const fileContent = await readFileAsync ( absolutePath )
154
+ build . keccak256 = '0x' + keccak ( fileContent ) . toString ( 'hex' )
147
155
console . log ( "Computing hashes of '" + pathRelativeToRoot + "'" )
148
- build . sha256 = '0x' + ethUtil . sha256 ( fileContent ) . toString ( 'hex' )
156
+ build . sha256 = '0x' + sha256 ( fileContent ) . toString ( 'hex' )
149
157
build . urls = [
150
158
'bzzr://' + swarmhash ( fileContent ) . toString ( 'hex' ) ,
151
159
'dweb:/ipfs/' + await ipfsHash ( fileContent )
@@ -168,15 +176,15 @@ async function batchedAsyncMap (values, batchSize, asyncMapFunction) {
168
176
}
169
177
170
178
function processDir ( dir , options , listCallback ) {
171
- fs . readdir ( path . join ( __dirname , dir ) , { withFileTypes : true } , async function ( err , files ) {
179
+ readdir ( join ( __dirname , dir ) , { withFileTypes : true } , async function ( err , files ) {
172
180
if ( err ) {
173
181
throw err
174
182
}
175
183
176
184
let oldList
177
185
if ( options . reuseHashes ) {
178
186
try {
179
- oldList = JSON . parse ( fs . readFileSync ( path . join ( __dirname , dir , '/list.json' ) ) )
187
+ oldList = JSON . parse ( readFileSync ( join ( __dirname , dir , '/list.json' ) ) )
180
188
} catch ( err ) {
181
189
// Not being able to read the existing list is not a critical error.
182
190
// We'll just recreate it from scratch.
@@ -269,7 +277,7 @@ function processDir (dir, options, listCallback) {
269
277
270
278
// Write list.txt
271
279
// A descending list of file names.
272
- fs . writeFile ( path . join ( __dirname , dir , '/list.txt' ) , buildNames . join ( '\n' ) , function ( err ) {
280
+ writeFile ( join ( __dirname , dir , '/list.txt' ) , buildNames . join ( '\n' ) , function ( err ) {
273
281
if ( err ) {
274
282
throw err
275
283
}
@@ -278,7 +286,7 @@ function processDir (dir, options, listCallback) {
278
286
279
287
// Write bin/list.json
280
288
// Ascending list of builds and descending map of releases.
281
- fs . writeFile ( path . join ( __dirname , dir , '/list.json' ) , JSON . stringify ( { builds : parsedList , releases : releases , latestRelease : latestRelease } , null , 2 ) , function ( err ) {
289
+ writeFile ( join ( __dirname , dir , '/list.json' ) , JSON . stringify ( { builds : parsedList , releases : releases , latestRelease : latestRelease } , null , 2 ) , function ( err ) {
282
290
if ( err ) {
283
291
throw err
284
292
}
@@ -287,7 +295,7 @@ function processDir (dir, options, listCallback) {
287
295
288
296
// Write bin/list.js
289
297
// Descending list of build filenames and descending map of releases.
290
- fs . writeFile ( path . join ( __dirname , dir , '/list.js' ) , generateLegacyListJS ( buildNames , releases ) , function ( err ) {
298
+ writeFile ( join ( __dirname , dir , '/list.js' ) , generateLegacyListJS ( buildNames , releases ) , function ( err ) {
291
299
if ( err ) {
292
300
throw err
293
301
}
@@ -302,14 +310,14 @@ function processDir (dir, options, listCallback) {
302
310
303
311
binaryExtensions . forEach ( function ( extension ) {
304
312
if ( extension !== releaseExtension ) {
305
- deleteIfExists ( path . join ( dir , binaryPrefix + '-latest' + extension ) )
313
+ deleteIfExists ( join ( dir , binaryPrefix + '-latest' + extension ) )
306
314
}
307
315
} )
308
316
309
317
if ( dir === '/bin' ) {
310
- updateCopy ( path . join ( dir , latestReleaseFile ) , path . join ( dir , binaryPrefix + '-latest' + releaseExtension ) )
318
+ updateCopy ( join ( dir , latestReleaseFile ) , join ( dir , binaryPrefix + '-latest' + releaseExtension ) )
311
319
} else {
312
- updateSymlinkSync ( path . join ( dir , binaryPrefix + '-latest' + releaseExtension ) , latestReleaseFile )
320
+ updateSymlinkSync ( join ( dir , binaryPrefix + '-latest' + releaseExtension ) , latestReleaseFile )
313
321
}
314
322
}
315
323
@@ -319,11 +327,11 @@ function processDir (dir, options, listCallback) {
319
327
320
328
binaryExtensions . forEach ( function ( extension ) {
321
329
if ( extension !== nightlyExtension ) {
322
- deleteIfExists ( path . join ( dir , binaryPrefix + '-latest' + extension ) )
330
+ deleteIfExists ( join ( dir , binaryPrefix + '-latest' + extension ) )
323
331
}
324
332
} )
325
333
326
- updateSymlinkSync ( path . join ( dir , binaryPrefix + '-nightly' + nightlyExtension ) , latestBuildFile )
334
+ updateSymlinkSync ( join ( dir , binaryPrefix + '-nightly' + nightlyExtension ) , latestBuildFile )
327
335
}
328
336
} )
329
337
}
@@ -387,13 +395,13 @@ DIRS.forEach(function (dir) {
387
395
// Starting with 0.6.2 we no longer build asm.js releases and the new builds added to bin/ are all wasm.
388
396
if ( semver . gt ( release . version , '0.6.1' ) ) {
389
397
updateSymlinkSync (
390
- path . join ( '/wasm' , release . path ) ,
391
- path . join ( '..' , 'bin' , release . path )
398
+ join ( '/wasm' , release . path ) ,
399
+ join ( '..' , 'bin' , release . path )
392
400
)
393
401
} else {
394
402
updateSymlinkSync (
395
- path . join ( '/emscripten-asmjs' , 'solc-emscripten-asmjs-v' + release . longVersion + '.js' ) ,
396
- path . join ( '..' , 'bin' , release . path )
403
+ join ( '/emscripten-asmjs' , 'solc-emscripten-asmjs-v' + release . longVersion + '.js' ) ,
404
+ join ( '..' , 'bin' , release . path )
397
405
)
398
406
}
399
407
}
@@ -405,8 +413,8 @@ DIRS.forEach(function (dir) {
405
413
parsedList . forEach ( function ( release ) {
406
414
if ( release . prerelease === undefined ) {
407
415
updateSymlinkSync (
408
- path . join ( '/emscripten-wasm32' , 'solc-emscripten-wasm32-v' + release . longVersion + '.js' ) ,
409
- path . join ( '..' , 'wasm' , release . path )
416
+ join ( '/emscripten-wasm32' , 'solc-emscripten-wasm32-v' + release . longVersion + '.js' ) ,
417
+ join ( '..' , 'wasm' , release . path )
410
418
)
411
419
}
412
420
} )
0 commit comments