Skip to content

Commit 54df509

Browse files
update clean up function
1 parent 6e0cf73 commit 54df509

File tree

1 file changed

+27
-31
lines changed
  • apps/web/client/src/server/api/routers/publish

1 file changed

+27
-31
lines changed

apps/web/client/src/server/api/routers/publish/manager.ts

Lines changed: 27 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,7 @@ export class PublishManager {
210210
} = options;
211211

212212
const timer = new LogTimer('File Serialization');
213+
const allFiles: Record<string, FreestyleFile> = {};
213214

214215
try {
215216
const allFilePaths = await this.collectAllFilePaths(currentDir);
@@ -224,6 +225,11 @@ export class PublishManager {
224225
let totalProcessed = 0;
225226
const totalFiles = filteredPaths.length;
226227

228+
const handleAndMergeChunk: ChunkProcessor = async (chunk, chunkInfo) => {
229+
Object.assign(allFiles, chunk);
230+
await onChunkComplete(chunk, chunkInfo);
231+
};
232+
227233
if (textFiles.length > 0) {
228234
timer.log(`Processing ${textFiles.length} text files`);
229235
await this.processFilesInChunks(
@@ -233,7 +239,7 @@ export class PublishManager {
233239
batchSize,
234240
'text',
235241
async (chunk, chunkInfo) => {
236-
await onChunkComplete(chunk, chunkInfo);
242+
await handleAndMergeChunk(chunk, chunkInfo);
237243
totalProcessed += Object.keys(chunk).length;
238244
if (onProgress) {
239245
await onProgress(totalProcessed, totalFiles);
@@ -252,7 +258,7 @@ export class PublishManager {
252258
batchSize,
253259
'binary',
254260
async (chunk, chunkInfo) => {
255-
await onChunkComplete(chunk, chunkInfo);
261+
await handleAndMergeChunk(chunk, chunkInfo);
256262
totalProcessed += Object.keys(chunk).length;
257263
if (onProgress) {
258264
await onProgress(totalProcessed, totalFiles);
@@ -263,8 +269,8 @@ export class PublishManager {
263269
}
264270

265271
timer.log(`Serialization completed - ${filteredPaths.length} files processed in ${timer.getElapsed()}ms`);
266-
267-
return this.getFinalSummary();
272+
273+
return allFiles;
268274

269275
} catch (error) {
270276
console.error('Error during serialization:', error);
@@ -293,7 +299,7 @@ export class PublishManager {
293299

294300
console.log(`Processing chunk ${chunkIndex + 1}/${chunks.length} (${chunk.length} files)`);
295301

296-
const chunkData = await this.processChunkWithBatching(
302+
let chunkData: Record<string, FreestyleFile> | null = await this.processChunkWithBatching(
297303
chunk,
298304
currentDir,
299305
batchSize,
@@ -309,7 +315,7 @@ export class PublishManager {
309315

310316
console.log(`Completed chunk ${chunkIndex + 1}/${chunks.length}. Total processed: ${(chunkIndex + 1) * chunkSize}/${filePaths.length} (${timer.getElapsed()}ms elapsed)`);
311317

312-
this.clearChunkData(chunkData, chunk);
318+
chunkData = null;
313319
await this.yieldForGarbageCollection();
314320
}
315321

@@ -344,7 +350,6 @@ export class PublishManager {
344350
console.log(`Batch ${batchIndex}/${totalBatches} completed (${batch.length} files) in ${endTime - startTime}ms`);
345351

346352
Object.assign(chunkData, batchResults);
347-
Object.keys(batchResults).forEach(key => delete batchResults[key]);
348353
}
349354

350355
return chunkData;
@@ -355,40 +360,31 @@ export class PublishManager {
355360
chunkInfo: { index: number; total: number; filesProcessed: number; totalFiles: number }
356361
): Promise<void> {
357362
console.log(`Processing chunk ${chunkInfo.index + 1}/${chunkInfo.total} with ${Object.keys(chunk).length} files`);
358-
359-
const chunkSize = JSON.stringify(chunk).length;
360-
console.log(`Chunk ${chunkInfo.index + 1} size: ${(chunkSize / 1024 / 1024).toFixed(2)}MB`);
363+
const chunkSizeBytes = this.computeChunkSizeBytes(chunk);
364+
console.log(`Chunk ${chunkInfo.index + 1} size: ${(chunkSizeBytes / 1024 / 1024).toFixed(2)}MB`);
361365
}
362366

363-
private clearChunkData(chunkData: Record<string, FreestyleFile>, filePaths: string[]): void {
364-
Object.keys(chunkData).forEach(key => {
365-
delete chunkData[key];
366-
});
367-
filePaths.length = 0;
368-
369-
if (global.gc && process.env.NODE_ENV === 'development') {
370-
global.gc();
371-
}
372-
}
373367

374368
private async yieldForGarbageCollection(): Promise<void> {
375369
await new Promise(resolve => setImmediate(resolve));
376370
await new Promise(resolve => setTimeout(resolve, 0));
377371
}
378372

379-
private getFinalSummary(): Record<string, FreestyleFile> {
380-
return {
381-
'__summary__': {
382-
content: JSON.stringify({
383-
message: 'Files processed in chunks and sent to server',
384-
timestamp: new Date().toISOString(),
385-
processedAt: Date.now()
386-
}),
387-
encoding: 'utf-8' as const
373+
private computeChunkSizeBytes(chunk: Record<string, FreestyleFile>): number {
374+
let total = 0;
375+
const textEncoder = new TextEncoder();
376+
for (const file of Object.values(chunk)) {
377+
const content = file.content;
378+
if (file.encoding === 'base64') {
379+
const len = content.length;
380+
const padding = content.endsWith('==') ? 2 : content.endsWith('=') ? 1 : 0;
381+
total += Math.max(0, Math.floor((len * 3) / 4) - padding);
382+
} else {
383+
total += textEncoder.encode(content).length;
388384
}
389-
};
385+
}
386+
return total;
390387
}
391-
392388
private createChunks<T>(array: T[], chunkSize: number): T[][] {
393389
const chunks: T[][] = [];
394390
for (let i = 0; i < array.length; i += chunkSize) {

0 commit comments

Comments
 (0)