diff --git a/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts b/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts
index 16796c47e..c2ecd8dd0 100644
--- a/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts
+++ b/apps/api/src/app/auto-import-jobs-schedular/usecase/auto-import-jobs-schedular.ts
@@ -17,27 +17,94 @@ export class AutoImportJobsSchedular {
@Cron(CronExpression.EVERY_MINUTE)
async handleCronSchedular() {
- console.log('Cron Running');
- await this.fetchAndExecuteScheduledJobs();
+ const startTime = new Date();
+ const memUsageStart = process.memoryUsage();
+ const cpuUsageStart = process.cpuUsage();
+
+ console.log('========================================');
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Cron Started at ${startTime.toISOString()}`);
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log('========================================');
+
+ try {
+ await this.fetchAndExecuteScheduledJobs();
+
+ const endTime = new Date();
+ const duration = endTime.getTime() - startTime.getTime();
+ const memUsageEnd = process.memoryUsage();
+ const cpuUsageEnd = process.cpuUsage(cpuUsageStart);
+
+ console.log('========================================');
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Cron Completed at ${endTime.toISOString()}`);
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Duration: ${duration}ms`);
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`);
+
+ if (duration > 5000) {
+ console.warn(`[AUTO-IMPORT-JOBS-SCHEDULER] ⚠️ WARNING: Cron execution took ${duration}ms (>5s threshold)`);
+ }
+ console.log('========================================');
+ } catch (error) {
+ const endTime = new Date();
+ const duration = endTime.getTime() - startTime.getTime();
+
+ console.error('========================================');
+ console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] ❌ ERROR at ${endTime.toISOString()}`);
+ console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] Duration before error: ${duration}ms`);
+ console.error('[AUTO-IMPORT-JOBS-SCHEDULER] Error details:', error);
+ console.error('========================================');
+ }
}
private async fetchAndExecuteScheduledJobs() {
const now = dayjs();
const userJobs = await this.userJobRepository.find({});
+
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Total jobs found: ${userJobs.length}`);
+
+ let jobsProcessed = 0;
+ let jobsSkipped = 0;
+ let jobsExecuted = 0;
for (const userJob of userJobs) {
- if (await this.shouldCroneRun({ userJob })) {
- try {
+ const jobStartTime = Date.now();
+ try {
+ if (await this.shouldCroneRun({ userJob })) {
if (this.isJobDueNow(userJob.nextRun, now)) {
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job is due now - JobID: ${userJob._id}, Time: ${new Date().toISOString()}`);
+
const nextScheduledTime = this.calculateNextRun(userJob.cron, userJob.nextRun);
-
await this.scheduleUpdateNextRun(userJob._id, nextScheduledTime, dayjs(userJob.endsOn));
-
+
+ const executeStartTime = Date.now();
await this.userJobTriggerService.execute(userJob._id);
+ const executeDuration = Date.now() - executeStartTime;
+
+ jobsExecuted++;
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job executed - JobID: ${userJob._id}, Duration: ${executeDuration}ms`);
+
+ if (executeDuration > 5000) {
+ console.warn(`[AUTO-IMPORT-JOBS-SCHEDULER] ⚠️ WARNING: Job execution took ${executeDuration}ms (>5s) - JobID: ${userJob._id}`);
+ }
+ } else {
+ jobsSkipped++;
}
- } catch (error) {}
+ } else {
+ jobsSkipped++;
+ }
+
+ jobsProcessed++;
+ const jobDuration = Date.now() - jobStartTime;
+ if (jobDuration > 1000) {
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Job processing took ${jobDuration}ms - JobID: ${userJob._id}`);
+ }
+ } catch (error) {
+ console.error(`[AUTO-IMPORT-JOBS-SCHEDULER] ❌ Error processing job ${userJob._id} at ${new Date().toISOString()}`, error);
}
}
+
+ console.log(`[AUTO-IMPORT-JOBS-SCHEDULER] Summary - Processed: ${jobsProcessed}, Executed: ${jobsExecuted}, Skipped: ${jobsSkipped}`);
}
calculateNextRun(cronExpression: string, currentNextRun: Date): dayjs.Dayjs {
diff --git a/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts b/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts
index b14459628..fad3e99f9 100644
--- a/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts
+++ b/apps/api/src/app/failed-webhook-request-retry/usecase/failed-webhook-request-retry.usecase.ts
@@ -14,25 +14,82 @@ export class FailedWebhookRetry {
@Cron(CronExpression.EVERY_5_MINUTES)
async processWebhookRetries() {
+ const startTime = new Date();
+ const memUsageStart = process.memoryUsage();
+ const cpuUsageStart = process.cpuUsage();
+
+ console.log('========================================');
+ console.log(`[FAILED-WEBHOOK-RETRY] Cron Started at ${startTime.toISOString()}`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log('========================================');
+
try {
const failedWebhooks: FailedWebhookRetryRequestsEntity[] = await this.failedWebhookRetryRequestsRepository.find({
nextRequestTime: { $lt: new Date() },
});
+ console.log(`[FAILED-WEBHOOK-RETRY] Found ${failedWebhooks.length} failed webhooks to retry`);
+
if (!failedWebhooks.length) {
+ console.log(`[FAILED-WEBHOOK-RETRY] No webhooks to process, exiting`);
return;
}
- await Promise.allSettled(failedWebhooks.map((wbh) => this.processWebhook(wbh)));
+ const processStartTime = Date.now();
+ const results = await Promise.allSettled(failedWebhooks.map((wbh) => this.processWebhook(wbh)));
+ const processDuration = Date.now() - processStartTime;
+
+ const successful = results.filter(r => r.status === 'fulfilled').length;
+ const failed = results.filter(r => r.status === 'rejected').length;
+
+ const endTime = new Date();
+ const duration = endTime.getTime() - startTime.getTime();
+ const memUsageEnd = process.memoryUsage();
+ const cpuUsageEnd = process.cpuUsage(cpuUsageStart);
+
+ console.log('========================================');
+ console.log(`[FAILED-WEBHOOK-RETRY] Cron Completed at ${endTime.toISOString()}`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Results - Successful: ${successful}, Failed: ${failed}, Total: ${failedWebhooks.length}`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Processing Duration: ${processDuration}ms`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Total Duration: ${duration}ms`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[FAILED-WEBHOOK-RETRY] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[FAILED-WEBHOOK-RETRY] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`);
+
+ if (duration > 5000) {
+ console.warn(`[FAILED-WEBHOOK-RETRY] ⚠️ WARNING: Cron execution took ${duration}ms (>5s threshold)`);
+ }
+
+ if (failedWebhooks.length > 100) {
+ console.warn(`[FAILED-WEBHOOK-RETRY] ⚠️ WARNING: Processing large batch of ${failedWebhooks.length} webhooks`);
+ }
+
+ console.log('========================================');
} catch (error) {
+ const endTime = new Date();
+ const duration = endTime.getTime() - startTime.getTime();
+
+ console.error('========================================');
+ console.error(`[FAILED-WEBHOOK-RETRY] ❌ ERROR at ${endTime.toISOString()}`);
+ console.error(`[FAILED-WEBHOOK-RETRY] Duration before error: ${duration}ms`);
+ console.error('[FAILED-WEBHOOK-RETRY] Error details:', error);
+ console.error('========================================');
throw error;
}
}
private async processWebhook(webhook: FailedWebhookRetryRequestsEntity) {
+ const webhookStartTime = Date.now();
try {
+ console.log(`[FAILED-WEBHOOK-RETRY] Processing webhook - ID: ${webhook._id}, Time: ${new Date().toISOString()}`);
+
this.queueService.publishToQueue(QueuesEnum.SEND_FAILED_WEBHOOK_DATA, webhook._id as string);
+
+ const webhookDuration = Date.now() - webhookStartTime;
+ console.log(`[FAILED-WEBHOOK-RETRY] Webhook queued - ID: ${webhook._id}, Duration: ${webhookDuration}ms`);
} catch (error) {
+ const webhookDuration = Date.now() - webhookStartTime;
+ console.error(`[FAILED-WEBHOOK-RETRY] ❌ Error processing webhook - ID: ${webhook._id}, Duration: ${webhookDuration}ms, Time: ${new Date().toISOString()}`, error);
throw error;
}
}
diff --git a/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts b/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts
index 933c29b7d..c46bcf52d 100644
--- a/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts
+++ b/apps/api/src/app/upload/usecases/uploadcleanup-scheduler/uploadcleanup-scheduler.service.ts
@@ -16,23 +16,59 @@ export class UploadCleanupSchedulerService {
@Cron(CRON_SCHEDULE.UPLOAD_CLEANUP_DEFAULT_CRON_TIME)
async handleCleanupCronSchedular(cleanupDays: number = CRON_SCHEDULE.UPLOAD_CLEANUP_DAYS) {
+ const startTime = new Date();
+ const memUsageStart = process.memoryUsage();
+ const cpuUsageStart = process.cpuUsage();
+
+ console.log('========================================');
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cron Started at ${startTime.toISOString()}`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cleanup days: ${cleanupDays}`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Usage (Start): RSS=${(memUsageStart.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageStart.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log('========================================');
+
const cleanupDaysAgo = dayjs().subtract(cleanupDays, 'day').toDate();
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Finding uploads older than: ${cleanupDaysAgo.toISOString()}`);
const uploads = await Upload.find({
uploadedDate: { $lt: cleanupDaysAgo },
_uploadedFileId: { $exists: true, $ne: '' },
});
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Found ${uploads.length} uploads to clean up`);
+
if (uploads.length === 0) {
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] No uploads to clean up, exiting`);
return;
}
+
+ if (uploads.length > 100) {
+ console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Processing large batch of ${uploads.length} uploads - potential CPU intensive operation`);
+ }
+
+ let uploadsProcessed = 0;
+ let uploadsSucceeded = 0;
+ let uploadsFailed = 0;
+ let totalFilesDeleted = 0;
+ let totalCollectionsDropped = 0;
for (const upload of uploads) {
+ const uploadStartTime = Date.now();
try {
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Processing upload - ID: ${upload.id}, UploadedFileID: ${upload._uploadedFileId}`);
+
const files = await this.fileRepository.find({ _id: upload._uploadedFileId });
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Found ${files.length} files for upload ${upload.id}`);
+
+ const storagDeleteStart = Date.now();
await this.storageService.deleteFolder(upload.id);
+ const storagDeleteDuration = Date.now() - storagDeleteStart;
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Storage folder deleted - Upload: ${upload.id}, Duration: ${storagDeleteDuration}ms`);
+
+ if (storagDeleteDuration > 2000) {
+ console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Storage deletion took ${storagDeleteDuration}ms (>2s) - Upload: ${upload.id}`);
+ }
- await Promise.allSettled(
+ const fileResults = await Promise.allSettled(
files.map(async (file) => {
try {
await Upload.updateOne({ _uploadedFileId: file._id }, { $set: { _uploadedFileId: '' } });
@@ -40,7 +76,11 @@ export class UploadCleanupSchedulerService {
// Delete file from storage and db
try {
await this.fileRepository.delete({ _id: file._id });
- } catch (error) {}
+ totalFilesDeleted++;
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] File deleted - FileID: ${file._id}`);
+ } catch (error) {
+ console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error deleting file - FileID: ${file._id}`, error);
+ }
const collectionName = `${upload._id}-records`;
try {
@@ -51,12 +91,59 @@ export class UploadCleanupSchedulerService {
if (collections.length > 0) {
const collection = this.dalService.connection.collection(collectionName);
await collection.drop();
+ totalCollectionsDropped++;
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Collection dropped - Name: ${collectionName}`);
}
- } catch (error) {}
- } catch (error) {}
+ } catch (error) {
+ console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error dropping collection - Name: ${collectionName}`, error);
+ }
+ } catch (error) {
+ console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error processing file - FileID: ${file._id}`, error);
+ throw error;
+ }
})
);
- } catch (error) {}
+
+ const fileSuccesses = fileResults.filter(r => r.status === 'fulfilled').length;
+ const fileFailures = fileResults.filter(r => r.status === 'rejected').length;
+
+ const uploadDuration = Date.now() - uploadStartTime;
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Upload processed - ID: ${upload.id}, Files: ${files.length}, Success: ${fileSuccesses}, Failed: ${fileFailures}, Duration: ${uploadDuration}ms`);
+
+ if (uploadDuration > 5000) {
+ console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Upload processing took ${uploadDuration}ms (>5s) - Upload: ${upload.id}`);
+ }
+
+ uploadsSucceeded++;
+ } catch (error) {
+ const uploadDuration = Date.now() - uploadStartTime;
+ console.error(`[UPLOAD-CLEANUP-SCHEDULER] ❌ Error processing upload - ID: ${upload.id}, Duration: ${uploadDuration}ms`, error);
+ uploadsFailed++;
+ } finally {
+ uploadsProcessed++;
+ }
+ }
+
+ const endTime = new Date();
+ const duration = endTime.getTime() - startTime.getTime();
+ const memUsageEnd = process.memoryUsage();
+ const cpuUsageEnd = process.cpuUsage(cpuUsageStart);
+
+ console.log('========================================');
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Cron Completed at ${endTime.toISOString()}`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Summary - Total: ${uploadsProcessed}, Success: ${uploadsSucceeded}, Failed: ${uploadsFailed}`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Files Deleted: ${totalFilesDeleted}, Collections Dropped: ${totalCollectionsDropped}`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Total Duration: ${duration}ms`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Usage (End): RSS=${(memUsageEnd.rss / 1024 / 1024).toFixed(2)}MB, Heap=${(memUsageEnd.heapUsed / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] Memory Delta: RSS=${((memUsageEnd.rss - memUsageStart.rss) / 1024 / 1024).toFixed(2)}MB, Heap=${((memUsageEnd.heapUsed - memUsageStart.heapUsed) / 1024 / 1024).toFixed(2)}MB`);
+ console.log(`[UPLOAD-CLEANUP-SCHEDULER] CPU Usage: User=${(cpuUsageEnd.user / 1000).toFixed(2)}ms, System=${(cpuUsageEnd.system / 1000).toFixed(2)}ms`);
+
+ if (duration > 30000) {
+ console.error(`[UPLOAD-CLEANUP-SCHEDULER] 🚨 CRITICAL: Cron execution took ${duration}ms (>30s threshold) - HIGH CPU RISK!`);
+ } else if (duration > 10000) {
+ console.warn(`[UPLOAD-CLEANUP-SCHEDULER] ⚠️ WARNING: Cron execution took ${duration}ms (>10s threshold)`);
}
+
+ console.log('========================================');
}
}
diff --git a/apps/web/assets/images/companies/nirvana.svg b/apps/web/assets/images/companies/nirvana.svg
deleted file mode 100644
index 6d2bd08c8..000000000
--- a/apps/web/assets/images/companies/nirvana.svg
+++ /dev/null
@@ -1 +0,0 @@
-
\ No newline at end of file
diff --git a/apps/web/config/constants.config.ts b/apps/web/config/constants.config.ts
index 9b944f385..b6b5622ca 100644
--- a/apps/web/config/constants.config.ts
+++ b/apps/web/config/constants.config.ts
@@ -821,7 +821,6 @@ export const companyLogos = [
{ id: 'aklamio', src: AklamioLogo, alt: 'Aklamio' },
{ id: 'artha', src: ArthaLogo, alt: 'Artha' },
{ id: 'nasscom', src: NasscomLogo, alt: 'Nasscom' },
- { id: 'nirvana', src: NirvanaLogo, alt: 'Nirvana' },
{ id: 'omniva', src: OmnivaLogo, alt: 'Omniva' },
{ id: 'orbit', src: OrbitLogo, alt: 'Orbit' },
{ id: 'ubico', src: UbicoLogo, alt: 'Ubico' },
diff --git a/apps/web/layouts/OnboardLayout/LeftSideContent.tsx b/apps/web/layouts/OnboardLayout/LeftSideContent.tsx
index 51cea2d95..dd99a8b8c 100644
--- a/apps/web/layouts/OnboardLayout/LeftSideContent.tsx
+++ b/apps/web/layouts/OnboardLayout/LeftSideContent.tsx
@@ -114,18 +114,18 @@ export function LeftSideContent() {
/>
- {/* First row: 6 logos */}
+ {/* First row: 4 logos */}
- {companyLogos.slice(0, 6).map((company: any) => (
+ {companyLogos.slice(0, 4).map((company: any) => (
))}
- {/* Second row: 2 logos (centered) */}
+ {/* Second row: 3 logos (centered) */}
- {companyLogos.slice(6, 8).map((company: any) => (
+ {companyLogos.slice(4, 7).map((company: any) => (