11#!/usr/bin/env node
22
3- import  {  readdir ,  readFile ,  writeFile ,  mkdir ,  stat  }  from  'fs/promises' ; 
3+ import  {  readdir ,  readFile ,  writeFile ,  stat  }  from  'fs/promises' ; 
4+ import  {  createTar  }  from  'nanotar' ; 
45
56async  function  packModules ( root ,  urlRoot )  { 
67    const  files  =   await  readdir ( root ,  {  withFileTypes : true  } ) ; 
@@ -16,42 +17,21 @@ async function packModules(root, urlRoot) {
1617    return  packedData ; 
1718} 
1819
19- async  function  packDirectory ( root ,  urlRoot ,  genRoot ,  dirPath  =  '' ,  indent  =  0 )  { 
20-     const  files  =   await  readdir ( `${ root }  /${ dirPath }  ` ,  {  withFileTypes : true  } ) ; 
21-     const  packedData  =  [ `{\n` ] ; 
20+ async  function  collectDirectory ( root ,  dirPath  =  '' ,  packedData  =  [ ] )  { 
21+     const  files  =  await  readdir ( `${ root }  /${ dirPath }  ` ,  {  withFileTypes : true  } ) ; 
2222    for  ( const  file  of  files )  { 
23-         packedData . push ( `${ '    ' . repeat ( indent  +  1 ) } ${ JSON . stringify ( file . name ) }  : ` ) ; 
24-         const  filePath  =  `${ dirPath }  /${ file . name }  ` ; 
23+         const  filePath  =  dirPath  ===  ''  ? file . name  : `${ dirPath }  /${ file . name }  ` ; 
2524        const  fileStats  =  await  stat ( `${ root }  /${ filePath }  ` ) ; 
2625        if  ( fileStats . isDirectory ( ) )  { 
27-             packedData . push ( await  packDirectory ( root ,  urlRoot ,  genRoot ,  filePath ,  indent  +  1 ) ) ; 
26+             packedData . push ( { name : filePath } ) ; 
27+             await  collectDirectory ( root ,  filePath ,  packedData ) ; 
2828        }  else  if  ( fileStats . isFile ( ) )  { 
29-             const  fileData  =  await  readFile ( `${ root }  /${ filePath }  ` ) ; 
30-             let  emittedAsText  =  false ; 
31-             if  ( fileData . length  <  131072 )  {  // emit as a separate file if >128K 
32-                 try  { 
33-                     const  textData  =  new  TextDecoder ( 'utf-8' ,  {  fatal : true  } ) . decode ( fileData ) ; 
34-                     packedData . push ( JSON . stringify ( textData ) ) ; 
35-                     emittedAsText  =  true ; 
36-                 }  catch ( e )  { 
37-                     if  ( e  instanceof  TypeError )  { 
38-                         emittedAsText  =  false ; 
39-                     }  else  { 
40-                         throw  e ; 
41-                     } 
42-                 } 
43-             } 
44-             if  ( ! emittedAsText )  { 
45-                 await  mkdir ( `${ genRoot }  /${ urlRoot }  /${ dirPath }  ` ,  {  recursive : true  } ) ; 
46-                 await  writeFile ( `${ genRoot }  /${ urlRoot }  /${ filePath }  ` ,  fileData ) ; 
47-                 packedData . push ( `new URL(${ JSON . stringify ( urlRoot  +  filePath ) }  , import.meta.url)` ) ; 
48-             } 
29+             packedData . push ( { name : filePath ,  data : await  readFile ( `${ root }  /${ filePath }  ` ) } ) ; 
4930        }  else  { 
50-             packedData . push ( 'null' ) ; 
31+             console . error ( `Unsupported '${ filePath }  '!` ) ; 
32+             process . exit ( 2 ) ; 
5133        } 
52-         packedData . push ( `,\n` ) ; 
5334    } 
54-     packedData . push ( `${ '    ' . repeat ( indent ) }  }` ) ; 
5535    return  packedData ; 
5636} 
5737
@@ -61,22 +41,50 @@ if (!(args.length >= 2 && args.length <= 4)) {
6141    process . exit ( 1 ) ; 
6242} 
6343
64- const  resourceFileName  =  args [ 0 ] ; 
44+ const  resourceFilePath  =  args [ 0 ] ; 
6545const  genDirectory  =  args [ 1 ] ; 
6646const  shareDirectory  =  args [ 2 ] ; 
6747const  shareRoot  =  args [ 3 ]  ||  'share' ; 
6848
69- let  output  =   `\ 
70- export const modules = ${ ( await  packModules ( genDirectory ,  './' ) ) . flat ( Infinity ) . join ( '' ) }  ; 
71- ` ; 
72- if  ( shareDirectory ) 
73-     output  +=  `\ 
74- export const filesystem = { 
75-     ${ shareRoot }  : ${ ( await  packDirectory ( shareDirectory ,  `./${ shareRoot }  ` ,  genDirectory ,  '' ,  1 ) ) . flat ( Infinity ) . join ( '' ) }  
76- }; 
77- ` ; 
78- else 
79-     output  +=  `\ 
80- export const filesystem = {}; 
49+ let  output  =  `\ 
50+ import { parseTar } from 'nanotar'; 
51+ 
52+ function unpackResources(url) { 
53+     function defaultFetchFn(url) { 
54+         return fetch(url).then((resp) => resp.arrayBuffer()); 
55+     } 
56+ 
57+     return async (fetchFn = defaultFetchFn) => { 
58+         const root = {}; 
59+         for (const tarEntry of parseTar(await fetchFn(url))) { 
60+             const nameParts = tarEntry.name.split('/'); 
61+             const dirNames = nameParts.slice(0, -1); 
62+             const fileName = nameParts[nameParts.length - 1]; 
63+             let dir = root; 
64+             for (const dirName of dirNames) 
65+                 dir = dir[dirName]; 
66+             if (tarEntry.type === 'directory') { 
67+                 dir[fileName] = {}; 
68+             } else { 
69+                 dir[fileName] = tarEntry.data; 
70+             } 
71+         } 
72+         return root; 
73+     }; 
74+ } 
75+ 
8176` ; 
82- await  writeFile ( resourceFileName ,  output ) ; 
77+ const  moduleObject  =  ( await  packModules ( genDirectory ,  './' ) ) . flat ( Infinity ) . join ( '' ) ; 
78+ output  +=  `export const modules = ${ moduleObject }  ;\n\n` ; 
79+ if  ( shareDirectory )  { 
80+     const  tarFilePath  =  resourceFilePath . replace ( / \. j s $ / ,  '.tar' ) ; 
81+     await  writeFile ( tarFilePath ,  createTar ( await  collectDirectory ( shareDirectory ) ) ) ; 
82+     const  tarFileName  =  tarFilePath . replace ( / ^ .+ \/ / ,  '' ) ; 
83+     const  resourceObject  =  `unpackResources(new URL('./${ tarFileName }  ', import.meta.url))` ; 
84+     output  +=  `export const filesystem = {\n` ; 
85+     output  +=  `    ${ shareRoot }  : ${ resourceObject }  ,\n` ; 
86+     output  +=  `};\n` ; 
87+ }  else  { 
88+     output  +=  `export const filesystem = {};\n` ; 
89+ } 
90+ await  writeFile ( resourceFilePath ,  output ) ; 
0 commit comments