Skip to content

Commit 65e21ce

Browse files
remove unused code
1 parent 1f7969b commit 65e21ce

File tree

1 file changed

+57
-181
lines changed
  • apps/web/client/src/server/api/routers/publish

1 file changed

+57
-181
lines changed

apps/web/client/src/server/api/routers/publish/manager.ts

Lines changed: 57 additions & 181 deletions
Original file line numberDiff line numberDiff line change
@@ -10,30 +10,12 @@ import type { Deployment, deploymentUpdateSchema } from '@onlook/db';
1010
import { addBuiltWithScript, injectBuiltWithScript } from '@onlook/growth';
1111
import { DeploymentStatus } from '@onlook/models';
1212
import { addNextBuildConfig } from '@onlook/parser';
13-
import {
14-
convertToBase64,
15-
isBinaryFile,
16-
isEmptyString,
17-
isNullOrUndefined,
18-
LogTimer,
19-
updateGitignore,
20-
type FileOperations,
21-
} from '@onlook/utility';
22-
import { type FreestyleFile } from 'freestyle-sandboxes';
13+
import { isEmptyString, isNullOrUndefined, updateGitignore, type FileOperations } from '@onlook/utility';
2314
import { addDeploymentLog } from './helpers/logs';
2415
import { uploadBufferAndGetSignedUrl } from '@/server/utils/supabase/admin-storage';
25-
import { STORAGE_BUCKETS } from '@onlook/constants';
2616
import type { z } from 'zod';
2717

28-
type ChunkProcessor = (
29-
chunk: Record<string, FreestyleFile>,
30-
chunkInfo: {
31-
index: number;
32-
total: number;
33-
filesProcessed: number;
34-
totalFiles: number;
35-
},
36-
) => Promise<void>;
18+
//
3719

3820
export class PublishManager {
3921
constructor(private readonly provider: Provider) { }
@@ -99,74 +81,7 @@ export class PublishManager {
9981
};
10082
}
10183

102-
async publish({
103-
buildScript,
104-
buildFlags,
105-
skipBadge,
106-
updateDeployment,
107-
}: {
108-
buildScript: string;
109-
buildFlags: string;
110-
skipBadge: boolean;
111-
updateDeployment: (
112-
deployment: z.infer<typeof deploymentUpdateSchema>,
113-
) => Promise<Deployment | null>;
114-
}): Promise<Record<string, FreestyleFile>> {
115-
await this.prepareProject();
116-
await updateDeployment({
117-
status: DeploymentStatus.IN_PROGRESS,
118-
message: 'Preparing deployment...',
119-
progress: 30,
120-
});
12184

122-
if (!skipBadge) {
123-
await updateDeployment({
124-
status: DeploymentStatus.IN_PROGRESS,
125-
message: 'Adding "Built with Onlook" badge...',
126-
progress: 35,
127-
});
128-
await this.addBadge('./');
129-
}
130-
131-
await updateDeployment({
132-
status: DeploymentStatus.IN_PROGRESS,
133-
message: 'Building project...',
134-
progress: 40,
135-
});
136-
137-
await this.runBuildStep(buildScript, buildFlags);
138-
139-
await updateDeployment({
140-
status: DeploymentStatus.IN_PROGRESS,
141-
message: 'Postprocessing project...',
142-
progress: 50,
143-
});
144-
145-
const { success: postprocessSuccess, error: postprocessError } =
146-
await this.postprocessBuild();
147-
148-
if (!postprocessSuccess) {
149-
throw new Error(`Failed to postprocess project for deployment: ${postprocessError}`);
150-
}
151-
152-
await updateDeployment({
153-
status: DeploymentStatus.IN_PROGRESS,
154-
message: 'Preparing files for publish...',
155-
progress: 60,
156-
});
157-
158-
const NEXT_BUILD_OUTPUT_PATH = `${CUSTOM_OUTPUT_DIR}/standalone`;
159-
return await this.serializeFiles(NEXT_BUILD_OUTPUT_PATH, {
160-
onProgress: async (processed, total) => {
161-
const progress = Math.floor(60 + (processed / total) * 35);
162-
await updateDeployment({
163-
status: DeploymentStatus.IN_PROGRESS,
164-
message: `Processing files... ${processed}/${total}`,
165-
progress,
166-
});
167-
},
168-
});
169-
}
17085

17186
private async addBadge(folderPath: string) {
17287
await injectBuiltWithScript(folderPath, this.fileOperations);
@@ -257,112 +172,50 @@ export class PublishManager {
257172
};
258173
}
259174

260-
private async serializeFiles(
261-
currentDir: string,
262-
options: {
263-
chunkSize?: number;
264-
batchSize?: number;
265-
onProgress?: (processed: number, total: number) => Promise<void>;
266-
} = {},
267-
): Promise<Record<string, FreestyleFile>> {
268-
const { chunkSize = 100, batchSize = 10, onProgress } = options;
269-
270-
const timer = new LogTimer('File Serialization');
271-
const allFiles: Record<string, FreestyleFile> = {};
272-
273-
try {
274-
const allFilePaths = await this.collectAllFilePaths(currentDir);
275-
timer.log(`File discovery completed - ${allFilePaths.length} files found`);
276-
277-
const filteredPaths = allFilePaths.filter((filePath) => !this.shouldSkipFile(filePath));
278-
timer.log(`Filtered to ${filteredPaths.length} files after exclusions`);
175+
// serializeFiles removed in favor of artifact-based deployment
279176

280-
const { binaryFiles, textFiles } = this.categorizeFiles(filteredPaths);
281-
timer.log(
282-
`Categorized: ${textFiles.length} text files, ${binaryFiles.length} binary files`,
283-
);
177+
// processFilesInChunks removed
284178

285-
let totalProcessed = 0;
286-
const totalFiles = filteredPaths.length;
287-
288-
const handleAndMergeChunk: ChunkProcessor = async (chunk) => {
289-
Object.assign(allFiles, chunk);
290-
};
291-
292-
if (textFiles.length > 0) {
293-
timer.log(`Processing ${textFiles.length} text files`);
294-
await this.processFilesInChunks(
295-
textFiles,
296-
currentDir,
297-
chunkSize,
298-
batchSize,
299-
'text',
300-
async (chunk, chunkInfo) => {
301-
await handleAndMergeChunk(chunk, chunkInfo);
302-
totalProcessed += Object.keys(chunk).length;
303-
if (onProgress) {
304-
await onProgress(totalProcessed, totalFiles);
305-
}
306-
},
307-
);
308-
timer.log('Text files processing completed');
309-
}
179+
310180

311-
if (binaryFiles.length > 0) {
312-
timer.log(`Processing ${binaryFiles.length} binary files`);
313-
await this.processFilesInChunks(
314-
binaryFiles,
315-
currentDir,
316-
chunkSize,
317-
batchSize,
318-
'binary',
319-
async (chunk, chunkInfo) => {
320-
await handleAndMergeChunk(chunk, chunkInfo);
321-
totalProcessed += Object.keys(chunk).length;
322-
if (onProgress) {
323-
await onProgress(totalProcessed, totalFiles);
324-
}
325-
},
326-
);
327-
timer.log('Binary files processing completed');
328-
}
181+
329182

330-
timer.log(
331-
`Serialization completed - ${filteredPaths.length} files processed in ${timer.getElapsed()}ms`,
332-
);
183+
private getProcessMemoryUsageSnapshot(): {
184+
rssMB: number;
185+
heapTotalMB: number;
186+
heapUsedMB: number;
187+
externalMB: number;
188+
arrayBuffersMB: number;
189+
} {
190+
const mem = typeof process !== 'undefined' && process.memoryUsage ? process.memoryUsage() : ({} as NodeJS.MemoryUsage);
191+
const toMB = (bytes: number | undefined) => (typeof bytes === 'number' ? +(bytes / (1024 * 1024)).toFixed(2) : 0);
192+
return {
193+
rssMB: toMB(mem.rss),
194+
heapTotalMB: toMB(mem.heapTotal),
195+
heapUsedMB: toMB(mem.heapUsed),
196+
externalMB: toMB(mem.external),
197+
arrayBuffersMB: toMB((mem as unknown as { arrayBuffers?: number }).arrayBuffers),
198+
};
199+
}
333200

334-
return allFiles;
335-
} catch (error) {
336-
console.error('Error during serialization:', error);
337-
throw error;
201+
private logMemoryUsage(label: string, deploymentId?: string): void {
202+
const mem = this.getProcessMemoryUsageSnapshot();
203+
const message = `${label} – Memory (MB): rss=${mem.rssMB}, heapUsed=${mem.heapUsedMB}, heapTotal=${mem.heapTotalMB}, external=${mem.externalMB}, arrayBuffers=${mem.arrayBuffersMB}`;
204+
console.log(message);
205+
if (deploymentId) {
206+
addDeploymentLog(deploymentId, message, 'debug');
338207
}
339208
}
340209

341-
private async processFilesInChunks(
342-
filePaths: string[],
343-
currentDir: string,
344-
chunkSize: number,
345-
batchSize: number,
346-
fileType: 'text' | 'binary',
347-
onChunkComplete: ChunkProcessor,
348-
): Promise<void> {
349-
const chunks = this.createChunks(filePaths, chunkSize);
350-
const timer = new LogTimer('Chunking');
210+
351211

352-
console.log(`Starting processing of ${filePaths.length} files in chunks of ${chunkSize}`);
212+
353213

354-
for (let chunkIndex = 0; chunkIndex < chunks.length; chunkIndex++) {
355-
const chunk = chunks[chunkIndex];
356-
if (!chunk) {
357-
continue;
358-
}
214+
359215

360-
console.log(
361-
`Processing chunk ${chunkIndex + 1}/${chunks.length} (${chunk.length} files)`,
362-
);
216+
363217

364-
let chunkData: Record<string, FreestyleFile> | null =
365-
await this.processChunkWithBatching(chunk, currentDir, batchSize, fileType);
218+
366219

367220
await onChunkComplete(chunkData, {
368221
index: chunkIndex,
@@ -593,6 +446,7 @@ export class PublishManager {
593446
}): Promise<string> {
594447
await this.prepareProject();
595448
addDeploymentLog(deploymentId, 'Project prepared for deployment', 'debug');
449+
this.logMemoryUsage('After prepareProject', deploymentId);
596450
await updateDeployment({
597451
status: DeploymentStatus.IN_PROGRESS,
598452
message: 'Preparing deployment...',
@@ -607,6 +461,7 @@ export class PublishManager {
607461
});
608462
await this.addBadge('./');
609463
addDeploymentLog(deploymentId, 'Badge injected', 'debug');
464+
this.logMemoryUsage('After badge injection', deploymentId);
610465
}
611466

612467
await updateDeployment({
@@ -615,8 +470,10 @@ export class PublishManager {
615470
progress: 40,
616471
});
617472
addDeploymentLog(deploymentId, `Running build: ${buildScript} ${buildFlags ?? ''}`.trim(), 'info');
473+
this.logMemoryUsage('Before build', deploymentId);
618474
await this.runBuildStep(buildScript, buildFlags);
619475
addDeploymentLog(deploymentId, 'Build step completed', 'success');
476+
this.logMemoryUsage('After build', deploymentId);
620477

621478
await updateDeployment({
622479
status: DeploymentStatus.IN_PROGRESS,
@@ -631,6 +488,7 @@ export class PublishManager {
631488
throw new Error(`Failed to postprocess project for deployment: ${postprocessError}`);
632489
}
633490
addDeploymentLog(deploymentId, 'Postprocess completed', 'success');
491+
this.logMemoryUsage('After postprocess', deploymentId);
634492

635493
const NEXT_BUILD_OUTPUT_PATH = `${CUSTOM_OUTPUT_DIR}/standalone`;
636494

@@ -645,6 +503,7 @@ export class PublishManager {
645503
const tarCommand = `tar -czf ${artifactLocalPath} -C ${NEXT_BUILD_OUTPUT_PATH} .`;
646504
addDeploymentLog(deploymentId, 'Creating tar.gz artifact', 'debug');
647505
await this.session.commands.run(tarCommand);
506+
this.logMemoryUsage('After tar artifact creation', deploymentId);
648507

649508
await updateDeployment({
650509
status: DeploymentStatus.IN_PROGRESS,
@@ -653,12 +512,14 @@ export class PublishManager {
653512
});
654513

655514
// Read artifact bytes and upload to storage
515+
this.logMemoryUsage('Before reading artifact bytes', deploymentId);
656516
const artifactBytes = await this.session.fs.readFile(artifactLocalPath);
657517
if (!artifactBytes) {
658518
addDeploymentLog(deploymentId, 'Failed to read build artifact', 'error');
659519
throw new Error('Failed to read build artifact');
660520
}
661521
const bytes: Uint8Array = artifactBytes as unknown as Uint8Array;
522+
this.logMemoryUsage('After reading artifact bytes', deploymentId);
662523

663524
const objectPath = `deployments/${deploymentId}/build.tar.gz`;
664525
addDeploymentLog(deploymentId, 'Uploading artifact to storage', 'info');
@@ -673,6 +534,21 @@ export class PublishManager {
673534
expiresInSeconds: 60 * 60, // 1 hour is enough for Freestyle to fetch
674535
},
675536
);
537+
this.logMemoryUsage('After upload to storage', deploymentId);
538+
539+
// Remove local artifact from sandbox to free disk space
540+
try {
541+
await this.session.fs.remove(artifactLocalPath, false);
542+
addDeploymentLog(deploymentId, 'Local artifact deleted from sandbox', 'debug');
543+
this.logMemoryUsage('After deleting local artifact', deploymentId);
544+
} catch (cleanupError) {
545+
console.warn('Failed to delete local artifact:', cleanupError);
546+
addDeploymentLog(
547+
deploymentId,
548+
`Warning: failed to delete local artifact (${String(cleanupError)})`,
549+
'debug',
550+
);
551+
}
676552

677553
await updateDeployment({
678554
status: DeploymentStatus.IN_PROGRESS,

0 commit comments

Comments
 (0)