@@ -5,6 +5,7 @@ import { v4 as uuidv4 } from "uuid";
55import _ from "lodash" ;
66import axios from "axios" ;
77import jsonpath from "jsonpath" ;
8+ // import pLimit from 'p-limit';
89
910
1011import { CHUNK_SIZE , MIGRATION_DATA_CONFIG } from "../constants/index.js" ;
@@ -24,6 +25,7 @@ const {
2425 RTE_REFERENCES_DIR_NAME ,
2526 ENTRIES_DIR_NAME ,
2627 ASSETS_DIR_NAME ,
28+ GLOBAL_FIELDS_DIR_NAME ,
2729 // FILE
2830 LOCALE_MASTER_LOCALE ,
2931 LOCALE_FILE_NAME ,
@@ -38,6 +40,7 @@ const {
3840 ENTRIES_MASTER_FILE ,
3941 WEBHOOKS_FILE_NAME ,
4042 RTE_REFERENCES_FILE_NAME ,
43+ GLOBAL_FIELDS_FILE_NAME ,
4144
4245} = MIGRATION_DATA_CONFIG ;
4346
@@ -152,8 +155,8 @@ const processField = (
152155 if ( lang_value . sys ) {
153156 const { linkType, id } = lang_value . sys ;
154157
155- if ( linkType === "Entry" && id in entryId ) return [ entryId [ id ] ] ;
156- if ( linkType === "Asset" && id in assetId ) return assetId [ id ] ;
158+ if ( linkType === "Entry" && id in entryId ) return [ entryId ?. [ id ] ] ;
159+ if ( linkType === "Asset" && id in assetId ) return assetId ?. [ id ] ;
157160 }
158161
159162 // Handle arrays and nested objects
@@ -181,7 +184,7 @@ const processArrayFields = (array: any, entryId: any, assetId: any) => {
181184 if ( id in entryId ) {
182185 array . splice ( i , 1 , entryId [ id ] ) ;
183186 } else if ( id in assetId ) {
184- array . splice ( i , 1 , assetId [ id ] ) ;
187+ array . splice ( i , 1 , assetId ?. [ id ] ) ;
185188 }
186189 } ) ;
187190 // Clean up empty objects
@@ -342,10 +345,10 @@ const saveAsset = async (
342345 await fs . promises . mkdir ( assetPath , { recursive : true } ) ;
343346 // Write file as binary
344347 await fs . promises . writeFile ( path . join ( assetPath , fileName ) , Buffer . from ( response . data ) , "binary" ) ;
345- await customLogger ( projectId , destination_stack_id , 'info' , message ) ;
346348 await writeFile ( assetPath , `_contentstack_${ assets . sys . id } .json` , assetData [ assets . sys . id ] ) ;
347349 metadata . push ( { uid : assets . sys . id , url : fileUrl , filename : fileName } ) ;
348350 delete failedJSON [ assets . sys . id ] ;
351+ await customLogger ( projectId , destination_stack_id , 'info' , message ) ;
349352 } catch ( err : any ) {
350353 if ( retryCount === 1 ) {
351354 failedJSON [ assets . sys . id ] = {
@@ -401,31 +404,41 @@ const createAssets = async (packagePath: any, destination_stack_id:string, proje
401404 const failedJSON : any = { } ;
402405 const assetData : any = { } ;
403406 const metadata : AssetMetaData [ ] = [ ] ;
404-
407+ const fileMeta = { "1" : ASSETS_SCHEMA_FILE } ;
405408 const assets = JSON . parse ( data ) ?. assets ;
406409
407410 if ( assets && assets . length > 0 ) {
408411 const tasks = assets . map (
409- async ( asset : any ) =>
410- await saveAsset ( asset , failedJSON , assetData , metadata , projectId , destination_stack_id , 0 )
412+ async ( asset : any , index : any ) =>
413+ saveAsset ( asset , failedJSON , assetData , metadata , projectId , destination_stack_id , 0 )
411414 ) ;
415+
416+ // This code is intentionally commented out
417+ // for testing purposes.
418+
419+ // const limit = pLimit(10); // Limit concurrent operations to 10
420+ // const tasks = assets.map((asset: any) =>
421+ // limit(() => saveAsset(asset, failedJSON, assetData, metadata, projectId, destination_stack_id, 0))
422+ // );
423+ // await Promise.all(tasks);
424+
412425 await Promise . all ( tasks ) ;
413426 const assetMasterFolderPath = path . join ( assetsSave , ASSETS_FAILED_FILE ) ;
414427
415428 await writeOneFile ( path . join ( assetsSave , ASSETS_SCHEMA_FILE ) , assetData ) ;
416- const chunks : { [ key : string ] : any } = makeChunks ( assetData ) ;
417- const refs : any = { } ;
418-
419- for ( const [ index , chunkId ] of Object . keys ( chunks ) . entries ( ) ) {
420- refs [ index + 1 ] = `${ chunkId } -${ ASSETS_FILE_NAME } ` ;
421- await writeOneFile (
422- path . join ( assetsSave , `${ chunkId } -${ ASSETS_FILE_NAME } ` ) ,
423- chunks [ chunkId ]
424- ) ;
425- }
426-
427- await writeOneFile ( path . join ( assetsSave , ASSETS_FILE_NAME ) , refs ) ;
428- await writeOneFile ( path . join ( assetsSave , ASSETS_METADATA_FILE ) , metadata ) ;
429+ // const chunks: { [key: string]: any } = makeChunks(assetData);
430+ // const refs: any = {};
431+
432+ // for (const [index, chunkId] of Object.keys(chunks).entries()) {
433+ // refs[index + 1] = `${chunkId}-${ASSETS_FILE_NAME}`;
434+ // await writeOneFile(
435+ // path.join(assetsSave, `${chunkId}-${ASSETS_FILE_NAME}`),
436+ // chunks[chunkId]
437+ // );
438+ // }
439+
440+ await writeOneFile ( path . join ( assetsSave , ASSETS_FILE_NAME ) , fileMeta ) ;
441+ // await writeOneFile(path.join(assetsSave, ASSETS_METADATA_FILE), metadata);
429442 failedJSON && await writeFile ( assetMasterFolderPath , ASSETS_FAILED_FILE , failedJSON ) ;
430443 } else {
431444 const message = getLogMessage (
@@ -720,6 +733,7 @@ const createEntry = async (packagePath: any, destination_stack_id:string, projec
720733const createLocale = async ( packagePath : string , destination_stack_id :string , projectId :string ) => {
721734 const srcFunc = 'createLocale' ;
722735 const localeSave = path . join ( DATA , destination_stack_id , LOCALE_DIR_NAME ) ;
736+ const globalFieldSave = path . join ( DATA , destination_stack_id , GLOBAL_FIELDS_DIR_NAME ) ;
723737
724738 try {
725739 const msLocale : Record < string , Locale > = { } ;
@@ -773,6 +787,7 @@ const createLocale = async (packagePath: string, destination_stack_id:string, pr
773787 await writeFile ( localeSave , LOCALE_FILE_NAME , allLocales )
774788 await writeFile ( localeSave , LOCALE_MASTER_LOCALE , msLocale )
775789 await writeFile ( localeSave , LOCALE_CF_LANGUAGE , localeList )
790+ await writeFile ( globalFieldSave , GLOBAL_FIELDS_FILE_NAME , [ ] )
776791 const message = getLogMessage (
777792 srcFunc ,
778793 `locales have been successfully transformed.` ,
0 commit comments