@@ -10,30 +10,12 @@ import type { Deployment, deploymentUpdateSchema } from '@onlook/db';
10
10
import { addBuiltWithScript , injectBuiltWithScript } from '@onlook/growth' ;
11
11
import { DeploymentStatus } from '@onlook/models' ;
12
12
import { addNextBuildConfig } from '@onlook/parser' ;
13
- import {
14
- convertToBase64 ,
15
- isBinaryFile ,
16
- isEmptyString ,
17
- isNullOrUndefined ,
18
- LogTimer ,
19
- updateGitignore ,
20
- type FileOperations ,
21
- } from '@onlook/utility' ;
22
- import { type FreestyleFile } from 'freestyle-sandboxes' ;
13
+ import { isEmptyString , isNullOrUndefined , updateGitignore , type FileOperations } from '@onlook/utility' ;
23
14
import { addDeploymentLog } from './helpers/logs' ;
24
15
import { uploadBufferAndGetSignedUrl } from '@/server/utils/supabase/admin-storage' ;
25
- import { STORAGE_BUCKETS } from '@onlook/constants' ;
26
16
import type { z } from 'zod' ;
27
17
28
- type ChunkProcessor = (
29
- chunk : Record < string , FreestyleFile > ,
30
- chunkInfo : {
31
- index : number ;
32
- total : number ;
33
- filesProcessed : number ;
34
- totalFiles : number ;
35
- } ,
36
- ) => Promise < void > ;
18
+ //
37
19
38
20
export class PublishManager {
39
21
constructor ( private readonly provider : Provider ) { }
@@ -99,74 +81,7 @@ export class PublishManager {
99
81
} ;
100
82
}
101
83
102
- async publish ( {
103
- buildScript,
104
- buildFlags,
105
- skipBadge,
106
- updateDeployment,
107
- } : {
108
- buildScript : string ;
109
- buildFlags : string ;
110
- skipBadge : boolean ;
111
- updateDeployment : (
112
- deployment : z . infer < typeof deploymentUpdateSchema > ,
113
- ) => Promise < Deployment | null > ;
114
- } ) : Promise < Record < string , FreestyleFile > > {
115
- await this . prepareProject ( ) ;
116
- await updateDeployment ( {
117
- status : DeploymentStatus . IN_PROGRESS ,
118
- message : 'Preparing deployment...' ,
119
- progress : 30 ,
120
- } ) ;
121
84
122
- if ( ! skipBadge ) {
123
- await updateDeployment ( {
124
- status : DeploymentStatus . IN_PROGRESS ,
125
- message : 'Adding "Built with Onlook" badge...' ,
126
- progress : 35 ,
127
- } ) ;
128
- await this . addBadge ( './' ) ;
129
- }
130
-
131
- await updateDeployment ( {
132
- status : DeploymentStatus . IN_PROGRESS ,
133
- message : 'Building project...' ,
134
- progress : 40 ,
135
- } ) ;
136
-
137
- await this . runBuildStep ( buildScript , buildFlags ) ;
138
-
139
- await updateDeployment ( {
140
- status : DeploymentStatus . IN_PROGRESS ,
141
- message : 'Postprocessing project...' ,
142
- progress : 50 ,
143
- } ) ;
144
-
145
- const { success : postprocessSuccess , error : postprocessError } =
146
- await this . postprocessBuild ( ) ;
147
-
148
- if ( ! postprocessSuccess ) {
149
- throw new Error ( `Failed to postprocess project for deployment: ${ postprocessError } ` ) ;
150
- }
151
-
152
- await updateDeployment ( {
153
- status : DeploymentStatus . IN_PROGRESS ,
154
- message : 'Preparing files for publish...' ,
155
- progress : 60 ,
156
- } ) ;
157
-
158
- const NEXT_BUILD_OUTPUT_PATH = `${ CUSTOM_OUTPUT_DIR } /standalone` ;
159
- return await this . serializeFiles ( NEXT_BUILD_OUTPUT_PATH , {
160
- onProgress : async ( processed , total ) => {
161
- const progress = Math . floor ( 60 + ( processed / total ) * 35 ) ;
162
- await updateDeployment ( {
163
- status : DeploymentStatus . IN_PROGRESS ,
164
- message : `Processing files... ${ processed } /${ total } ` ,
165
- progress,
166
- } ) ;
167
- } ,
168
- } ) ;
169
- }
170
85
171
86
private async addBadge ( folderPath : string ) {
172
87
await injectBuiltWithScript ( folderPath , this . fileOperations ) ;
@@ -257,112 +172,50 @@ export class PublishManager {
257
172
} ;
258
173
}
259
174
260
- private async serializeFiles (
261
- currentDir : string ,
262
- options : {
263
- chunkSize ?: number ;
264
- batchSize ?: number ;
265
- onProgress ?: ( processed : number , total : number ) => Promise < void > ;
266
- } = { } ,
267
- ) : Promise < Record < string , FreestyleFile > > {
268
- const { chunkSize = 100 , batchSize = 10 , onProgress } = options ;
269
-
270
- const timer = new LogTimer ( 'File Serialization' ) ;
271
- const allFiles : Record < string , FreestyleFile > = { } ;
272
-
273
- try {
274
- const allFilePaths = await this . collectAllFilePaths ( currentDir ) ;
275
- timer . log ( `File discovery completed - ${ allFilePaths . length } files found` ) ;
276
-
277
- const filteredPaths = allFilePaths . filter ( ( filePath ) => ! this . shouldSkipFile ( filePath ) ) ;
278
- timer . log ( `Filtered to ${ filteredPaths . length } files after exclusions` ) ;
175
+ // serializeFiles removed in favor of artifact-based deployment
279
176
280
- const { binaryFiles, textFiles } = this . categorizeFiles ( filteredPaths ) ;
281
- timer . log (
282
- `Categorized: ${ textFiles . length } text files, ${ binaryFiles . length } binary files` ,
283
- ) ;
177
+ // processFilesInChunks removed
284
178
285
- let totalProcessed = 0 ;
286
- const totalFiles = filteredPaths . length ;
287
-
288
- const handleAndMergeChunk : ChunkProcessor = async ( chunk ) => {
289
- Object . assign ( allFiles , chunk ) ;
290
- } ;
291
-
292
- if ( textFiles . length > 0 ) {
293
- timer . log ( `Processing ${ textFiles . length } text files` ) ;
294
- await this . processFilesInChunks (
295
- textFiles ,
296
- currentDir ,
297
- chunkSize ,
298
- batchSize ,
299
- 'text' ,
300
- async ( chunk , chunkInfo ) => {
301
- await handleAndMergeChunk ( chunk , chunkInfo ) ;
302
- totalProcessed += Object . keys ( chunk ) . length ;
303
- if ( onProgress ) {
304
- await onProgress ( totalProcessed , totalFiles ) ;
305
- }
306
- } ,
307
- ) ;
308
- timer . log ( 'Text files processing completed' ) ;
309
- }
179
+
310
180
311
- if ( binaryFiles . length > 0 ) {
312
- timer . log ( `Processing ${ binaryFiles . length } binary files` ) ;
313
- await this . processFilesInChunks (
314
- binaryFiles ,
315
- currentDir ,
316
- chunkSize ,
317
- batchSize ,
318
- 'binary' ,
319
- async ( chunk , chunkInfo ) => {
320
- await handleAndMergeChunk ( chunk , chunkInfo ) ;
321
- totalProcessed += Object . keys ( chunk ) . length ;
322
- if ( onProgress ) {
323
- await onProgress ( totalProcessed , totalFiles ) ;
324
- }
325
- } ,
326
- ) ;
327
- timer . log ( 'Binary files processing completed' ) ;
328
- }
181
+
329
182
330
- timer . log (
331
- `Serialization completed - ${ filteredPaths . length } files processed in ${ timer . getElapsed ( ) } ms` ,
332
- ) ;
183
+ private getProcessMemoryUsageSnapshot ( ) : {
184
+ rssMB : number ;
185
+ heapTotalMB : number ;
186
+ heapUsedMB : number ;
187
+ externalMB : number ;
188
+ arrayBuffersMB : number ;
189
+ } {
190
+ const mem = typeof process !== 'undefined' && process . memoryUsage ? process . memoryUsage ( ) : ( { } as NodeJS . MemoryUsage ) ;
191
+ const toMB = ( bytes : number | undefined ) => ( typeof bytes === 'number' ? + ( bytes / ( 1024 * 1024 ) ) . toFixed ( 2 ) : 0 ) ;
192
+ return {
193
+ rssMB : toMB ( mem . rss ) ,
194
+ heapTotalMB : toMB ( mem . heapTotal ) ,
195
+ heapUsedMB : toMB ( mem . heapUsed ) ,
196
+ externalMB : toMB ( mem . external ) ,
197
+ arrayBuffersMB : toMB ( ( mem as unknown as { arrayBuffers ?: number } ) . arrayBuffers ) ,
198
+ } ;
199
+ }
333
200
334
- return allFiles ;
335
- } catch ( error ) {
336
- console . error ( 'Error during serialization:' , error ) ;
337
- throw error ;
201
+ private logMemoryUsage ( label : string , deploymentId ?: string ) : void {
202
+ const mem = this . getProcessMemoryUsageSnapshot ( ) ;
203
+ const message = `${ label } – Memory (MB): rss=${ mem . rssMB } , heapUsed=${ mem . heapUsedMB } , heapTotal=${ mem . heapTotalMB } , external=${ mem . externalMB } , arrayBuffers=${ mem . arrayBuffersMB } ` ;
204
+ console . log ( message ) ;
205
+ if ( deploymentId ) {
206
+ addDeploymentLog ( deploymentId , message , 'debug' ) ;
338
207
}
339
208
}
340
209
341
- private async processFilesInChunks (
342
- filePaths : string [ ] ,
343
- currentDir : string ,
344
- chunkSize : number ,
345
- batchSize : number ,
346
- fileType : 'text' | 'binary' ,
347
- onChunkComplete : ChunkProcessor ,
348
- ) : Promise < void > {
349
- const chunks = this . createChunks ( filePaths , chunkSize ) ;
350
- const timer = new LogTimer ( 'Chunking' ) ;
210
+
351
211
352
- console . log ( `Starting processing of ${ filePaths . length } files in chunks of ${ chunkSize } ` ) ;
212
+
353
213
354
- for ( let chunkIndex = 0 ; chunkIndex < chunks . length ; chunkIndex ++ ) {
355
- const chunk = chunks [ chunkIndex ] ;
356
- if ( ! chunk ) {
357
- continue ;
358
- }
214
+
359
215
360
- console . log (
361
- `Processing chunk ${ chunkIndex + 1 } /${ chunks . length } (${ chunk . length } files)` ,
362
- ) ;
216
+
363
217
364
- let chunkData : Record < string , FreestyleFile > | null =
365
- await this . processChunkWithBatching ( chunk , currentDir , batchSize , fileType ) ;
218
+
366
219
367
220
await onChunkComplete ( chunkData , {
368
221
index : chunkIndex ,
@@ -593,6 +446,7 @@ export class PublishManager {
593
446
} ) : Promise < string > {
594
447
await this . prepareProject ( ) ;
595
448
addDeploymentLog ( deploymentId , 'Project prepared for deployment' , 'debug' ) ;
449
+ this . logMemoryUsage ( 'After prepareProject' , deploymentId ) ;
596
450
await updateDeployment ( {
597
451
status : DeploymentStatus . IN_PROGRESS ,
598
452
message : 'Preparing deployment...' ,
@@ -607,6 +461,7 @@ export class PublishManager {
607
461
} ) ;
608
462
await this . addBadge ( './' ) ;
609
463
addDeploymentLog ( deploymentId , 'Badge injected' , 'debug' ) ;
464
+ this . logMemoryUsage ( 'After badge injection' , deploymentId ) ;
610
465
}
611
466
612
467
await updateDeployment ( {
@@ -615,8 +470,10 @@ export class PublishManager {
615
470
progress : 40 ,
616
471
} ) ;
617
472
addDeploymentLog ( deploymentId , `Running build: ${ buildScript } ${ buildFlags ?? '' } ` . trim ( ) , 'info' ) ;
473
+ this . logMemoryUsage ( 'Before build' , deploymentId ) ;
618
474
await this . runBuildStep ( buildScript , buildFlags ) ;
619
475
addDeploymentLog ( deploymentId , 'Build step completed' , 'success' ) ;
476
+ this . logMemoryUsage ( 'After build' , deploymentId ) ;
620
477
621
478
await updateDeployment ( {
622
479
status : DeploymentStatus . IN_PROGRESS ,
@@ -631,6 +488,7 @@ export class PublishManager {
631
488
throw new Error ( `Failed to postprocess project for deployment: ${ postprocessError } ` ) ;
632
489
}
633
490
addDeploymentLog ( deploymentId , 'Postprocess completed' , 'success' ) ;
491
+ this . logMemoryUsage ( 'After postprocess' , deploymentId ) ;
634
492
635
493
const NEXT_BUILD_OUTPUT_PATH = `${ CUSTOM_OUTPUT_DIR } /standalone` ;
636
494
@@ -645,6 +503,7 @@ export class PublishManager {
645
503
const tarCommand = `tar -czf ${ artifactLocalPath } -C ${ NEXT_BUILD_OUTPUT_PATH } .` ;
646
504
addDeploymentLog ( deploymentId , 'Creating tar.gz artifact' , 'debug' ) ;
647
505
await this . session . commands . run ( tarCommand ) ;
506
+ this . logMemoryUsage ( 'After tar artifact creation' , deploymentId ) ;
648
507
649
508
await updateDeployment ( {
650
509
status : DeploymentStatus . IN_PROGRESS ,
@@ -653,12 +512,14 @@ export class PublishManager {
653
512
} ) ;
654
513
655
514
// Read artifact bytes and upload to storage
515
+ this . logMemoryUsage ( 'Before reading artifact bytes' , deploymentId ) ;
656
516
const artifactBytes = await this . session . fs . readFile ( artifactLocalPath ) ;
657
517
if ( ! artifactBytes ) {
658
518
addDeploymentLog ( deploymentId , 'Failed to read build artifact' , 'error' ) ;
659
519
throw new Error ( 'Failed to read build artifact' ) ;
660
520
}
661
521
const bytes : Uint8Array = artifactBytes as unknown as Uint8Array ;
522
+ this . logMemoryUsage ( 'After reading artifact bytes' , deploymentId ) ;
662
523
663
524
const objectPath = `deployments/${ deploymentId } /build.tar.gz` ;
664
525
addDeploymentLog ( deploymentId , 'Uploading artifact to storage' , 'info' ) ;
@@ -673,6 +534,21 @@ export class PublishManager {
673
534
expiresInSeconds : 60 * 60 , // 1 hour is enough for Freestyle to fetch
674
535
} ,
675
536
) ;
537
+ this . logMemoryUsage ( 'After upload to storage' , deploymentId ) ;
538
+
539
+ // Remove local artifact from sandbox to free disk space
540
+ try {
541
+ await this . session . fs . remove ( artifactLocalPath , false ) ;
542
+ addDeploymentLog ( deploymentId , 'Local artifact deleted from sandbox' , 'debug' ) ;
543
+ this . logMemoryUsage ( 'After deleting local artifact' , deploymentId ) ;
544
+ } catch ( cleanupError ) {
545
+ console . warn ( 'Failed to delete local artifact:' , cleanupError ) ;
546
+ addDeploymentLog (
547
+ deploymentId ,
548
+ `Warning: failed to delete local artifact (${ String ( cleanupError ) } )` ,
549
+ 'debug' ,
550
+ ) ;
551
+ }
676
552
677
553
await updateDeployment ( {
678
554
status : DeploymentStatus . IN_PROGRESS ,
0 commit comments