diff --git a/ai-data-processor/pom.xml b/ai-data-processor/pom.xml
index 28b404c41..2f377276c 100644
--- a/ai-data-processor/pom.xml
+++ b/ai-data-processor/pom.xml
@@ -144,6 +144,11 @@
junit-jupiter-api
test
+
+ com.knowhow.retro
+ ai-gateway-client
+ 1.0.0
+
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/AiDataProcessorApplication.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/AiDataProcessorApplication.java
index 7e2ddcdb4..26a356ef5 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/AiDataProcessorApplication.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/AiDataProcessorApplication.java
@@ -9,8 +9,9 @@
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication
-@ComponentScan(basePackages = {"com.publicissapient", "com.knowhow.retro.notifications"})
-@EnableMongoRepositories(basePackages = {"com.publicissapient.**.repository"})
+@ComponentScan(basePackages = { "com.publicissapient", "com.knowhow.retro.notifications",
+ "com.knowhow.retro.aigatewayclient" })
+@EnableMongoRepositories(basePackages = { "com.publicissapient.**.repository" })
@EnableBatchProcessing
@EnableAsync
@EnableScheduling
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/listener/AIUsageStatisticsJobCompletionListener.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/listener/AIUsageStatisticsJobCompletionListener.java
index e17165258..c9c125c04 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/listener/AIUsageStatisticsJobCompletionListener.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/listener/AIUsageStatisticsJobCompletionListener.java
@@ -16,9 +16,9 @@
package com.publicissapient.kpidashboard.job.aiusagestatisticscollector.listener;
-import com.publicissapient.kpidashboard.common.model.ProcessorExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
import com.publicissapient.kpidashboard.common.model.application.ErrorDetail;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
import com.publicissapient.kpidashboard.job.aiusagestatisticscollector.service.AccountBatchService;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@@ -37,7 +37,7 @@
@AllArgsConstructor
public class AIUsageStatisticsJobCompletionListener implements JobExecutionListener {
private final AccountBatchService accountBatchService;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
@Override
public void afterJob(@NonNull JobExecution jobExecution) {
@@ -50,12 +50,12 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
String jobName = jobParameters.getString("jobName");
ObjectId executionId = (ObjectId) Objects.requireNonNull(jobParameters.getParameter("executionId")).getValue();
- Optional processorExecutionTraceLogOptional = this.processorExecutionTraceLogServiceImpl
+ Optional executionTraceLogOptional = this.jobExecutionTraceLogService
.findById(executionId);
- if (processorExecutionTraceLogOptional.isPresent()) {
- ProcessorExecutionTraceLog executionTraceLog = processorExecutionTraceLogOptional.get();
+ if (executionTraceLogOptional.isPresent()) {
+ JobExecutionTraceLog executionTraceLog = executionTraceLogOptional.get();
executionTraceLog.setExecutionOngoing(false);
- executionTraceLog.setExecutionEndedAt(Instant.now().toEpochMilli());
+ executionTraceLog.setExecutionEndedAt(Instant.now());
executionTraceLog.setExecutionSuccess(jobExecution.getStatus() == BatchStatus.COMPLETED);
executionTraceLog
.setErrorDetailList(jobExecution.getAllFailureExceptions().stream().map(failureException -> {
@@ -63,7 +63,7 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
errorDetail.setError(failureException.getMessage());
return errorDetail;
}).toList());
- this.processorExecutionTraceLogServiceImpl.saveAiDataProcessorExecutions(executionTraceLog);
+ this.jobExecutionTraceLogService.updateJobExecution(executionTraceLog);
} else {
log.error("Could not store job execution ending status for job with name {} and execution id {}. Job "
+ "execution could not be found", jobName, executionId);
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/processor/AccountItemProcessor.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/processor/AccountItemProcessor.java
index 199764325..1120d8fe1 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/processor/AccountItemProcessor.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/aiusagestatisticscollector/processor/AccountItemProcessor.java
@@ -21,6 +21,7 @@
import com.publicissapient.kpidashboard.job.aiusagestatisticscollector.dto.AIUsagePerOrgLevel;
import com.publicissapient.kpidashboard.job.aiusagestatisticscollector.model.AIUsageStatistics;
import com.publicissapient.kpidashboard.job.aiusagestatisticscollector.service.AIUsageStatisticsService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
import jakarta.annotation.Nonnull;
import lombok.AllArgsConstructor;
@@ -33,7 +34,7 @@ public class AccountItemProcessor implements ItemProcessor {
@Override
public void write(@NonNull Chunk extends AIUsageStatistics> chunk) {
- log.info("[ai-usage-statistics-collector job] Received chunk items for inserting into database with size: {}", chunk.size());
+ log.info("{} Received chunk items for inserting into database with size: {}", JobConstants.LOG_PREFIX_AI_USAGE_STATISTICS, chunk.size());
aiUsageStatisticsService.saveAll((List.copyOf(chunk.getItems())));
}
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/constant/JobConstants.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/constant/JobConstants.java
new file mode 100644
index 000000000..6fb954521
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/constant/JobConstants.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2024 Sapient Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and limitations under the
+ * License.
+ */
+
+package com.publicissapient.kpidashboard.job.constant;
+
+import lombok.experimental.UtilityClass;
+
+/**
+ * Constants used across AI Data Processor jobs.
+ */
+@UtilityClass
+public final class JobConstants {
+
+ public static final String JOB_PRODUCTIVITY_CALCULATION = "productivity-calculation";
+ public static final String JOB_KPI_MATURITY_CALCULATION = "kpi-maturity-calculation";
+ public static final String JOB_AI_USAGE_STATISTICS_COLLECTOR = "ai-usage-statistics-collector";
+ public static final String JOB_RECOMMENDATION_CALCULATION = "recommendation-calculation";
+
+ public static final String LOG_PREFIX_RECOMMENDATION = "[recommendation-calculation job]";
+ public static final String LOG_PREFIX_PRODUCTIVITY = "[productivity-calculation job]";
+ public static final String LOG_PREFIX_KPI_MATURITY = "[kpi-maturity-calculation job]";
+ public static final String LOG_PREFIX_AI_USAGE_STATISTICS = "[ai-usage-statistics-collector job]";
+
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/listener/KpiMaturityCalculationJobExecutionListener.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/listener/KpiMaturityCalculationJobExecutionListener.java
index da6c2ba23..7f7f5b8db 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/listener/KpiMaturityCalculationJobExecutionListener.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/listener/KpiMaturityCalculationJobExecutionListener.java
@@ -27,9 +27,9 @@
import org.springframework.batch.core.JobParameters;
import org.springframework.lang.NonNull;
-import com.publicissapient.kpidashboard.common.model.ProcessorExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
import com.publicissapient.kpidashboard.common.model.application.ErrorDetail;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
import com.publicissapient.kpidashboard.job.productivitycalculation.service.ProjectBatchService;
import lombok.RequiredArgsConstructor;
@@ -39,7 +39,7 @@
@RequiredArgsConstructor
public class KpiMaturityCalculationJobExecutionListener implements JobExecutionListener {
private final ProjectBatchService projectBatchService;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
@Override
public void afterJob(@NonNull JobExecution jobExecution) {
@@ -52,12 +52,12 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
String jobName = jobParameters.getString("jobName");
ObjectId executionId = (ObjectId) Objects.requireNonNull(jobParameters.getParameter("executionId")).getValue();
- Optional processorExecutionTraceLogOptional = this.processorExecutionTraceLogServiceImpl
+ Optional executionTraceLogOptional = this.jobExecutionTraceLogService
.findById(executionId);
- if (processorExecutionTraceLogOptional.isPresent()) {
- ProcessorExecutionTraceLog executionTraceLog = processorExecutionTraceLogOptional.get();
+ if (executionTraceLogOptional.isPresent()) {
+ JobExecutionTraceLog executionTraceLog = executionTraceLogOptional.get();
executionTraceLog.setExecutionOngoing(false);
- executionTraceLog.setExecutionEndedAt(Instant.now().toEpochMilli());
+ executionTraceLog.setExecutionEndedAt(Instant.now());
executionTraceLog.setExecutionSuccess(jobExecution.getStatus() == BatchStatus.COMPLETED);
executionTraceLog
.setErrorDetailList(jobExecution.getAllFailureExceptions().stream().map(failureException -> {
@@ -65,7 +65,7 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
errorDetail.setError(failureException.getMessage());
return errorDetail;
}).toList());
- this.processorExecutionTraceLogServiceImpl.saveAiDataProcessorExecutions(executionTraceLog);
+ this.jobExecutionTraceLogService.updateJobExecution(executionTraceLog);
} else {
log.error("Could not store job execution ending status for job with name {} and execution id {}. Job "
+ "execution could not be found", jobName, executionId);
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/processor/ProjectItemProcessor.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/processor/ProjectItemProcessor.java
index 3a9289952..58d98f699 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/processor/ProjectItemProcessor.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/processor/ProjectItemProcessor.java
@@ -16,6 +16,7 @@
package com.publicissapient.kpidashboard.job.kpimaturitycalculation.processor;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
import org.springframework.batch.item.ItemProcessor;
import com.publicissapient.kpidashboard.common.model.kpimaturity.organization.KpiMaturity;
@@ -34,7 +35,7 @@ public class ProjectItemProcessor implements ItemProcessor {
public ProjectInputDTO read() {
ProjectInputDTO projectInputDTO = projectBatchService.getNextProjectInputData();
- log.info("[kpi-maturity-calculation job] Received project input dto {}", projectInputDTO);
+ log.info("{} Received project input dto {}", JobConstants.LOG_PREFIX_KPI_MATURITY, projectInputDTO);
return projectInputDTO;
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/strategy/KpiMaturityCalculationJobStrategy.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/strategy/KpiMaturityCalculationJobStrategy.java
index c18794836..1d6be6fd0 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/strategy/KpiMaturityCalculationJobStrategy.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/strategy/KpiMaturityCalculationJobStrategy.java
@@ -31,7 +31,8 @@
import org.springframework.transaction.PlatformTransactionManager;
import com.publicissapient.kpidashboard.common.model.kpimaturity.organization.KpiMaturity;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
import com.publicissapient.kpidashboard.job.config.base.SchedulingConfig;
import com.publicissapient.kpidashboard.job.kpimaturitycalculation.config.KpiMaturityCalculationConfig;
import com.publicissapient.kpidashboard.job.kpimaturitycalculation.listener.KpiMaturityCalculationJobExecutionListener;
@@ -59,7 +60,8 @@ public class KpiMaturityCalculationJobStrategy implements JobStrategy {
private final ProjectBatchService projectBatchService;
private final KpiMaturityCalculationService kpiMaturityCalculationService;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
@Override
public String getJobName() {
@@ -71,7 +73,7 @@ public Job getJob() {
return new JobBuilder(this.kpiMaturityCalculationConfig.getName(), this.jobRepository)
.start(chunkProcessProjects())
.listener(new KpiMaturityCalculationJobExecutionListener(this.projectBatchService,
- this.processorExecutionTraceLogServiceImpl))
+ this.jobExecutionTraceLogService))
.build();
}
@@ -98,7 +100,7 @@ private AsyncItemProcessor asyncProjectProcessor()
private AsyncItemWriter asyncItemWriter() {
AsyncItemWriter writer = new AsyncItemWriter<>();
- writer.setDelegate(new ProjectItemWriter(this.kpiMaturityCalculationService));
+ writer.setDelegate(new ProjectItemWriter(this.kpiMaturityCalculationService, this.processorExecutionTraceLogService));
return writer;
}
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/writer/ProjectItemWriter.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/writer/ProjectItemWriter.java
index 9bd6bfbd0..f7aaea231 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/writer/ProjectItemWriter.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/kpimaturitycalculation/writer/ProjectItemWriter.java
@@ -18,6 +18,8 @@
import java.util.List;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
import org.springframework.batch.item.Chunk;
import org.springframework.batch.item.ItemWriter;
import org.springframework.lang.NonNull;
@@ -32,11 +34,13 @@
@RequiredArgsConstructor
public class ProjectItemWriter implements ItemWriter {
- private final KpiMaturityCalculationService kpiMaturityCalculationService;
+ private final KpiMaturityCalculationService kpiMaturityCalculationService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
- @Override
- public void write(@NonNull Chunk extends KpiMaturity> chunk) {
- log.info("[kpi-maturity-calculation job] Received chunk items for inserting into database with size: {}", chunk.size());
- kpiMaturityCalculationService.saveAll((List) chunk.getItems());
- }
+ @Override
+ public void write(@NonNull Chunk extends KpiMaturity> chunk) {
+ log.info("{} Received chunk items for inserting into database with size: {}",
+ JobConstants.LOG_PREFIX_KPI_MATURITY, chunk.size());
+ kpiMaturityCalculationService.saveAll((List) chunk.getItems());
+ }
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestrator.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestrator.java
index f836d9b58..200cdc74a 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestrator.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestrator.java
@@ -21,7 +21,6 @@
import java.util.Set;
import java.util.stream.Collectors;
-import org.apache.commons.collections4.CollectionUtils;
import org.bson.types.ObjectId;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
@@ -29,9 +28,10 @@
import org.springframework.stereotype.Service;
import com.publicissapient.kpidashboard.common.constant.ProcessorType;
-import com.publicissapient.kpidashboard.common.model.ProcessorExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
import com.publicissapient.kpidashboard.common.model.application.ErrorDetail;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
+import com.publicissapient.kpidashboard.common.constant.ProcessorConstants;
import com.publicissapient.kpidashboard.exception.ConcurrentJobExecutionException;
import com.publicissapient.kpidashboard.exception.InternalServerErrorException;
import com.publicissapient.kpidashboard.exception.JobNotEnabledException;
@@ -58,7 +58,7 @@ public class JobOrchestrator {
private final AiDataProcessorRepository aiDataProcessorRepository;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
@PostConstruct
private void loadAllRegisteredJobs() {
@@ -105,22 +105,22 @@ public JobResponseRecord enableJob(String jobName) {
public JobExecutionResponseRecord runJob(String jobName) {
validateJobCanBeRun(jobName);
AiDataProcessor aiDataProcessor = aiDataProcessorRepository.findByProcessorName(jobName);
- ProcessorExecutionTraceLog executionTraceLog = this.processorExecutionTraceLogServiceImpl
- .createNewProcessorJobExecution(jobName);
+ JobExecutionTraceLog executionTraceLog = this.jobExecutionTraceLogService
+ .createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName);
try {
JobParameters jobParameters = new JobParametersBuilder().addJobParameter("jobName", jobName, String.class)
.addJobParameter("executionId", executionTraceLog.getId(), ObjectId.class).toJobParameters();
this.jobLauncher.run(aiDataJobRegistry.getJobStrategy(jobName).getJob(), jobParameters);
- return JobExecutionResponseRecord.builder().isRunning(true)
- .startedAt(Instant.ofEpochMilli(executionTraceLog.getExecutionStartedAt())).jobName(jobName)
+ return JobExecutionResponseRecord.builder().isRunning(true)
+ .startedAt(executionTraceLog.getExecutionStartedAt()).jobName(jobName)
.jobId(aiDataProcessor.getId()).executionId(aiDataProcessor.getId())
.executionId(executionTraceLog.getId()).build();
} catch (Exception e) {
String errorMessage = String.format("Could not run job '%s' -> '%s", jobName, e.getMessage());
- executionTraceLog.setExecutionEndedAt(Instant.now().toEpochMilli());
+ executionTraceLog.setExecutionEndedAt(Instant.now());
executionTraceLog.setExecutionSuccess(false);
executionTraceLog.setErrorDetailList(List.of(ErrorDetail.builder().error(errorMessage).build()));
- this.processorExecutionTraceLogServiceImpl.saveAiDataProcessorExecutions(executionTraceLog);
+ this.jobExecutionTraceLogService.updateJobExecution(executionTraceLog);
log.error(errorMessage);
throw new InternalServerErrorException(
String.format("Encountered unexpected error while trying to run job with name '%s'", jobName));
@@ -128,11 +128,8 @@ public JobExecutionResponseRecord runJob(String jobName) {
}
public boolean jobIsCurrentlyRunning(String jobName) {
- List processorExecutionTraceLogs = processorExecutionTraceLogServiceImpl
- .findLastExecutionTraceLogsByProcessorName(jobName, 1);
-
- return CollectionUtils.isNotEmpty(processorExecutionTraceLogs)
- && processorExecutionTraceLogs.get(0).isExecutionOngoing();
+ return this.jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA,
+ jobName);
}
private void validateJobCanBeRun(String jobName) {
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/listener/ProductivityCalculationJobExecutionListener.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/listener/ProductivityCalculationJobExecutionListener.java
index 8bb6f319b..ff8963cd6 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/listener/ProductivityCalculationJobExecutionListener.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/listener/ProductivityCalculationJobExecutionListener.java
@@ -27,9 +27,9 @@
import org.springframework.batch.core.JobParameters;
import org.springframework.lang.NonNull;
-import com.publicissapient.kpidashboard.common.model.ProcessorExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
import com.publicissapient.kpidashboard.common.model.application.ErrorDetail;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
import com.publicissapient.kpidashboard.job.productivitycalculation.service.ProjectBatchService;
import lombok.RequiredArgsConstructor;
@@ -40,7 +40,7 @@
public class ProductivityCalculationJobExecutionListener implements JobExecutionListener {
private final ProjectBatchService projectBatchService;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
@Override
public void afterJob(@NonNull JobExecution jobExecution) {
@@ -53,12 +53,12 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
String jobName = jobParameters.getString("jobName");
ObjectId executionId = (ObjectId) Objects.requireNonNull(jobParameters.getParameter("executionId")).getValue();
- Optional processorExecutionTraceLogOptional = this.processorExecutionTraceLogServiceImpl
+ Optional executionTraceLogOptional = this.jobExecutionTraceLogService
.findById(executionId);
- if (processorExecutionTraceLogOptional.isPresent()) {
- ProcessorExecutionTraceLog executionTraceLog = processorExecutionTraceLogOptional.get();
+ if (executionTraceLogOptional.isPresent()) {
+ JobExecutionTraceLog executionTraceLog = executionTraceLogOptional.get();
executionTraceLog.setExecutionOngoing(false);
- executionTraceLog.setExecutionEndedAt(Instant.now().toEpochMilli());
+ executionTraceLog.setExecutionEndedAt(Instant.now());
executionTraceLog.setExecutionSuccess(jobExecution.getStatus() == BatchStatus.COMPLETED);
executionTraceLog
.setErrorDetailList(jobExecution.getAllFailureExceptions().stream().map(failureException -> {
@@ -66,7 +66,7 @@ private void storeJobExecutionStatus(JobExecution jobExecution) {
errorDetail.setError(failureException.getMessage());
return errorDetail;
}).toList());
- this.processorExecutionTraceLogServiceImpl.saveAiDataProcessorExecutions(executionTraceLog);
+ this.jobExecutionTraceLogService.updateJobExecution(executionTraceLog);
} else {
log.error("Could not store job execution ending status for job with name {} and execution id {}. Job "
+ "execution could not be found", jobName, executionId);
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/processor/ProjectItemProcessor.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/processor/ProjectItemProcessor.java
index e5d9f1ca4..13083dfd4 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/processor/ProjectItemProcessor.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/processor/ProjectItemProcessor.java
@@ -19,6 +19,7 @@
import org.springframework.batch.item.ItemProcessor;
import com.publicissapient.kpidashboard.common.model.productivity.calculation.Productivity;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
import com.publicissapient.kpidashboard.job.productivitycalculation.service.ProductivityCalculationService;
import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
@@ -34,7 +35,7 @@ public class ProjectItemProcessor implements ItemProcessor {
public ProjectInputDTO read() {
ProjectInputDTO projectInputDTO = projectBatchService.getNextProjectInputData();
- log.info("[productivity-calculation job]Received project input dto {}", projectInputDTO);
+ log.info("[productivity-calculation job]Received project input dto {}", JobConstants.LOG_PREFIX_PRODUCTIVITY, projectInputDTO);
return projectInputDTO;
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/strategy/ProductivityCalculationJobStrategy.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/strategy/ProductivityCalculationJobStrategy.java
index 42c6782a7..d89dacce5 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/strategy/ProductivityCalculationJobStrategy.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/strategy/ProductivityCalculationJobStrategy.java
@@ -31,7 +31,8 @@
import org.springframework.transaction.PlatformTransactionManager;
import com.publicissapient.kpidashboard.common.model.productivity.calculation.Productivity;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
import com.publicissapient.kpidashboard.job.config.base.SchedulingConfig;
import com.publicissapient.kpidashboard.job.productivitycalculation.config.ProductivityCalculationConfig;
import com.publicissapient.kpidashboard.job.productivitycalculation.listener.ProductivityCalculationJobExecutionListener;
@@ -58,7 +59,8 @@ public class ProductivityCalculationJobStrategy implements JobStrategy {
private final ProjectBatchService projectBatchService;
private final ProductivityCalculationService productivityCalculationService;
- private final ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
@Override
public String getJobName() {
@@ -74,7 +76,7 @@ public Optional getSchedulingConfig() {
public Job getJob() {
return new JobBuilder(productivityCalculationJobConfig.getName(), jobRepository).start(chunkProcessProjects())
.listener(new ProductivityCalculationJobExecutionListener(this.projectBatchService,
- this.processorExecutionTraceLogServiceImpl))
+ this.jobExecutionTraceLogService))
.build();
}
@@ -95,7 +97,7 @@ private AsyncItemProcessor asyncProjectProcessor(
private AsyncItemWriter asyncItemWriter() {
AsyncItemWriter writer = new AsyncItemWriter<>();
- writer.setDelegate(new ProjectItemWriter(this.productivityCalculationService));
+ writer.setDelegate(new ProjectItemWriter(this.productivityCalculationService, this.processorExecutionTraceLogService));
return writer;
}
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/writer/ProjectItemWriter.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/writer/ProjectItemWriter.java
index a56da4d92..8fb34fe04 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/writer/ProjectItemWriter.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/productivitycalculation/writer/ProjectItemWriter.java
@@ -18,6 +18,8 @@
import java.util.List;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
import org.springframework.batch.item.Chunk;
import org.springframework.batch.item.ItemWriter;
import org.springframework.lang.NonNull;
@@ -33,10 +35,12 @@
public class ProjectItemWriter implements ItemWriter {
private final ProductivityCalculationService productivityCalculationService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
@Override
public void write(@NonNull Chunk extends Productivity> chunk) {
- log.info("[productivity-calculation job] Received chunk items for inserting into database with size: {}", chunk.size());
- productivityCalculationService.saveAll((List) chunk.getItems());
+ log.info("{} Received chunk items for inserting into database with size: {}",
+ JobConstants.LOG_PREFIX_PRODUCTIVITY, chunk.size());
+ productivityCalculationService.saveAll((List) chunk.getItems());
}
}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfig.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfig.java
new file mode 100644
index 000000000..d0d557d4e
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.config;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.collections4.CollectionUtils;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Persona;
+import com.publicissapient.kpidashboard.job.config.validator.ConfigValidator;
+
+import lombok.Data;
+
+/**
+ * Configuration class for recommendation calculation job.
+ */
+@Data
+public class CalculationConfig implements ConfigValidator {
+
+ private Set configValidationErrors = new HashSet<>();
+
+ private Persona enabledPersona;
+ private List kpiList;
+
+ @Override
+ public void validateConfiguration() {
+ if (enabledPersona == null) {
+ configValidationErrors.add("No enabled persona configured for recommendation calculation");
+ }
+ if (CollectionUtils.isEmpty(kpiList)) {
+ configValidationErrors.add("No KPI list configured for recommendation calculation");
+ }
+ }
+
+ @Override
+ public Set getConfigValidationErrors() {
+ return Collections.unmodifiableSet(configValidationErrors);
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/RecommendationCalculationConfig.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/RecommendationCalculationConfig.java
new file mode 100644
index 000000000..5ff694581
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/RecommendationCalculationConfig.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.config;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import com.knowhow.retro.aigatewayclient.client.config.AiGatewayConfig;
+import com.knowhow.retro.aigatewayclient.m2mauth.config.M2MAuthConfig;
+import com.publicissapient.kpidashboard.job.config.base.BatchConfig;
+import com.publicissapient.kpidashboard.job.config.base.SchedulingConfig;
+import com.publicissapient.kpidashboard.job.config.validator.ConfigValidator;
+
+import jakarta.annotation.PostConstruct;
+import lombok.Data;
+
+/**
+ * Main configuration class for recommendation calculation job.
+ */
+@Data
+@Component
+@ConfigurationProperties(prefix = "jobs.recommendation-calculation")
+public class RecommendationCalculationConfig implements ConfigValidator {
+
+ private final M2MAuthConfig m2MAuthConfig;
+ private final AiGatewayConfig aiGatewayConfig;
+ private String name;
+ private BatchConfig batching;
+ private SchedulingConfig scheduling;
+ private CalculationConfig calculationConfig;
+ private Set configValidationErrors = new HashSet<>();
+
+ @Autowired
+ public RecommendationCalculationConfig(M2MAuthConfig m2MAuthConfig, AiGatewayConfig aiGatewayConfig) {
+ this.m2MAuthConfig = m2MAuthConfig;
+ this.aiGatewayConfig = aiGatewayConfig;
+ }
+
+ @Override
+ public void validateConfiguration() {
+ if (StringUtils.isEmpty(this.name)) {
+ configValidationErrors.add("The job 'name' parameter is required");
+ }
+
+ // Validate M2M Auth configuration
+ if (m2MAuthConfig == null) {
+ configValidationErrors.add("M2M authentication configuration is required for AI Gateway access");
+ } else {
+ if (StringUtils.isEmpty(m2MAuthConfig.getIssuerServiceId())) {
+ configValidationErrors.add("M2M auth 'issuerServiceId' is required");
+ }
+ if (StringUtils.isEmpty(m2MAuthConfig.getSecret())) {
+ configValidationErrors.add("M2M auth 'secret' is required");
+ }
+ }
+
+ // Validate AI Gateway configuration
+ if (aiGatewayConfig == null) {
+ configValidationErrors.add("AI Gateway configuration is required for recommendation calculation");
+ } else {
+ if (StringUtils.isEmpty(aiGatewayConfig.getBaseUrl())) {
+ configValidationErrors.add("AI Gateway 'baseUrl' is required");
+ }
+ if (StringUtils.isEmpty(aiGatewayConfig.getAudience())) {
+ configValidationErrors.add("AI Gateway 'audience' is required");
+ }
+ }
+ }
+
+ @Override
+ public Set getConfigValidationErrors() {
+ return Collections.unmodifiableSet(this.configValidationErrors);
+ }
+
+ @PostConstruct
+ private void retrieveJobConfigValidationErrors() {
+ this.validateConfiguration();
+
+ this.calculationConfig.validateConfiguration();
+ this.batching.validateConfiguration();
+ this.scheduling.validateConfiguration();
+
+ this.configValidationErrors.addAll(this.calculationConfig.getConfigValidationErrors());
+ this.configValidationErrors.addAll(this.batching.getConfigValidationErrors());
+ this.configValidationErrors.addAll(this.scheduling.getConfigValidationErrors());
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/listener/RecommendationCalculationJobExecutionListener.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/listener/RecommendationCalculationJobExecutionListener.java
new file mode 100644
index 000000000..36e12e734
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/listener/RecommendationCalculationJobExecutionListener.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.listener;
+
+import java.time.Instant;
+import java.util.Objects;
+import java.util.Optional;
+
+import com.knowhow.retro.aigatewayclient.client.response.aiproviders.AiProvidersResponseDTO;
+import org.bson.types.ObjectId;
+import org.springframework.batch.core.BatchStatus;
+import org.springframework.batch.core.JobExecution;
+import org.springframework.batch.core.JobExecutionListener;
+import org.springframework.batch.core.JobParameters;
+import org.springframework.lang.NonNull;
+import org.springframework.stereotype.Component;
+
+import com.knowhow.retro.aigatewayclient.client.AiGatewayClient;
+import com.knowhow.retro.aigatewayclient.exception.AiGatewayInitializationException;
+import com.publicissapient.kpidashboard.common.model.application.ErrorDetail;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationProjectBatchService;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Job execution listener for recommendation calculation job.
+ */
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class RecommendationCalculationJobExecutionListener implements JobExecutionListener {
+
+ private final RecommendationProjectBatchService projectBatchService;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
+ private final AiGatewayClient aiGatewayClient;
+
+ /**
+ * Validates AI Gateway configuration before job execution starts.
+ *
+ * @param jobExecution
+ * the job execution context
+ * @throws AiGatewayInitializationException
+ * if AI Gateway configuration is invalid
+ */
+ @Override
+ public void beforeJob(@NonNull JobExecution jobExecution) {
+ log.info("{} Validating AI Gateway configuration before job execution", JobConstants.LOG_PREFIX_RECOMMENDATION);
+
+ // Validate AI Gateway configuration using the client's built-in validator
+ AiProvidersResponseDTO aiProviders = aiGatewayClient.getProviders();
+
+ log.info("{} AI Gateway configuration validated successfully. Available providers: {}",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, aiProviders);
+ }
+
+ @Override
+ public void afterJob(@NonNull JobExecution jobExecution) {
+ log.info("{} Job completed with status: {}", JobConstants.LOG_PREFIX_RECOMMENDATION,
+ jobExecution.getStatus());
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ storeJobExecutionStatus(jobExecution);
+ }
+
+ private void storeJobExecutionStatus(JobExecution jobExecution) {
+ JobParameters jobParameters = jobExecution.getJobParameters();
+ String jobName = jobParameters.getString("jobName");
+ ObjectId executionId = (ObjectId) Objects.requireNonNull(jobParameters.getParameter("executionId")).getValue();
+
+ Optional executionTraceLogOptional = this.jobExecutionTraceLogService
+ .findById(executionId);
+ if (executionTraceLogOptional.isPresent()) {
+ JobExecutionTraceLog executionTraceLog = executionTraceLogOptional.get();
+ executionTraceLog.setExecutionOngoing(false);
+ executionTraceLog.setExecutionEndedAt(Instant.now());
+ executionTraceLog.setExecutionSuccess(jobExecution.getStatus() == BatchStatus.COMPLETED);
+ executionTraceLog
+ .setErrorDetailList(jobExecution.getAllFailureExceptions().stream().map(failureException -> {
+ ErrorDetail errorDetail = new ErrorDetail();
+ errorDetail.setError(failureException.getMessage());
+ return errorDetail;
+ }).toList());
+ this.jobExecutionTraceLogService.updateJobExecution(executionTraceLog);
+ } else {
+ log.error(
+ "{} Could not store job execution ending status for job with name {} and execution id {}. Job "
+ + "execution could not be found",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, jobName, executionId);
+ }
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParser.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParser.java
new file mode 100644
index 000000000..f8b3c85d6
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParser.java
@@ -0,0 +1,225 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.parser;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.stereotype.Component;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.knowhow.retro.aigatewayclient.client.response.chat.ChatGenerationResponseDTO;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.ActionPlan;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Severity;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Parser for batch processor AI Gateway responses. Converts AI-generated JSON
+ * responses into structured Recommendation objects.
+ */
+@Slf4j
+@Component
+@RequiredArgsConstructor
+public class BatchRecommendationResponseParser {
+
+ public static final String TITLE = "title";
+ public static final String DESCRIPTION = "description";
+ public static final String RECOMMENDATIONS = "recommendations";
+ public static final String SEVERITY = "severity";
+ public static final String ACTION_PLANS = "actionPlans";
+ public static final String TIME_TO_VALUE = "timeToValue";
+ private static final String MARKDOWN_CODE_FENCE = "```";
+ private static final char JSON_START_CHAR = '{';
+ private static final String EMPTY_JSON_OBJECT = "{}";
+ private final ObjectMapper objectMapper;
+
+ /**
+ * Parses AI response into a Recommendation object. Validates response content
+ * and structure.
+ *
+ * @param response
+ * ChatGenerationResponseDTO from AI Gateway
+ * @return Optional containing parsed Recommendation, or empty if parsing fails
+ * @throws IllegalArgumentException
+ * if response is null
+ */
+ public Optional parseRecommendation(ChatGenerationResponseDTO response) {
+ if (response == null) {
+ throw new IllegalArgumentException("AI Gateway response cannot be null");
+ }
+
+ // Validate response content is not null or empty
+ String aiResponse = response.content();
+ if (aiResponse == null || aiResponse.trim().isEmpty()) {
+ log.error("{} AI Gateway returned null or empty response content",
+ JobConstants.LOG_PREFIX_RECOMMENDATION);
+ return Optional.empty();
+ }
+
+ return parseRecommendationContent(aiResponse);
+ }
+
+ /**
+ * Parses AI response content into a Recommendation object.
+ *
+ * @param aiResponse
+ * JSON string from AI Gateway
+ * @return Optional containing parsed Recommendation, or empty if parsing fails
+ */
+ private Optional parseRecommendationContent(String aiResponse) {
+ if (StringUtils.isBlank(aiResponse)) {
+ log.error("{} AI response is empty, cannot parse recommendation",
+ JobConstants.LOG_PREFIX_RECOMMENDATION);
+ return Optional.empty();
+ }
+
+ try {
+ String jsonContent = extractJsonContent(aiResponse);
+
+ if (StringUtils.isBlank(jsonContent) || EMPTY_JSON_OBJECT.equals(jsonContent)) {
+ log.error("{} Extracted JSON content is empty or invalid from AI response",
+ JobConstants.LOG_PREFIX_RECOMMENDATION);
+ return Optional.empty();
+ }
+ JsonNode rootNode = objectMapper.readTree(jsonContent);
+
+ // Check for direct recommendation object with required non-empty fields
+ if (hasValidTextField(rootNode, TITLE) && hasValidTextField(rootNode, DESCRIPTION)) {
+ return Optional.of(parseRecommendationNode(rootNode));
+ }
+
+ // Check for recommendations array
+ return Optional.ofNullable(rootNode.get(RECOMMENDATIONS)).filter(JsonNode::isArray)
+ .filter(node -> !node.isEmpty()).map(node -> parseRecommendationNode(node.get(0)));
+
+ } catch (Exception e) {
+ String preview = StringUtils.abbreviate(aiResponse, 100);
+ log.error("{} Error parsing AI response JSON: {} - Response preview: {}",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, e.getMessage(), preview, e);
+ return Optional.empty();
+ }
+ }
+
+ /**
+ * Extracts JSON content from AI response by removing markdown code blocks.
+ * Handles responses wrapped in ```json``` markdown blocks.
+ *
+ * @param aiResponse
+ * the raw AI response string
+ * @return extracted JSON content, or empty JSON object if extraction fails
+ */
+ private String extractJsonContent(String aiResponse) {
+ String content = StringUtils.defaultIfBlank(aiResponse, EMPTY_JSON_OBJECT).trim();
+
+ // Remove markdown code blocks if present
+ if (content.startsWith(MARKDOWN_CODE_FENCE)) {
+ content = StringUtils.substringBetween(content, "\n", MARKDOWN_CODE_FENCE);
+ if (content == null) {
+ return EMPTY_JSON_OBJECT;
+ }
+ }
+
+ // Find and extract JSON object starting from first {
+ int jsonStart = content.indexOf(JSON_START_CHAR);
+ return jsonStart >= 0 ? content.substring(jsonStart) : content;
+ }
+
+ /**
+ * Parses a JSON node into a Recommendation object. Extracts all fields directly
+ * from AI response.
+ *
+ * @param node
+ * the JSON node containing recommendation data
+ * @return parsed Recommendation object with values exactly as provided by AI
+ */
+ private Recommendation parseRecommendationNode(JsonNode node) {
+ // Parse severity directly from AI response
+ Severity severity = Optional.ofNullable(getTextValue(node, SEVERITY)).map(String::toUpperCase)
+ .flatMap(this::parseSeverity).orElse(null);
+
+ // Parse action plans
+ List actionPlans = Optional.ofNullable(node.get(ACTION_PLANS)).filter(JsonNode::isArray)
+ .map(this::parseActionPlans).orElse(null);
+
+ // Build recommendation using builder
+ return Recommendation.builder().title(getTextValue(node, TITLE)).description(getTextValue(node, DESCRIPTION))
+ .severity(severity).timeToValue(getTextValue(node, TIME_TO_VALUE)).actionPlans(actionPlans).build();
+ }
+
+ /**
+ * Safely parses severity enum value.
+ */
+ private Optional parseSeverity(String severityStr) {
+ try {
+ return Optional.of(Severity.valueOf(severityStr));
+ } catch (IllegalArgumentException e) {
+ log.warn("{} Invalid severity value from AI response: {}. Saving as null.",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, severityStr);
+ return Optional.empty();
+ }
+ }
+
+ /**
+ * Parses action plans from JSON array node.
+ */
+ private List parseActionPlans(JsonNode actionPlansNode) {
+ List actionPlans = new ArrayList<>();
+ actionPlansNode.forEach(actionNode -> {
+ ActionPlan action = ActionPlan.builder().title(getTextValue(actionNode, TITLE))
+ .description(getTextValue(actionNode, DESCRIPTION)).build();
+ actionPlans.add(action);
+ });
+ return actionPlans;
+ }
+
+ /**
+ * Checks if JSON node has a valid non-empty text field.
+ *
+ * @param node
+ * the JSON node to check
+ * @param fieldName
+ * the field name to check
+ * @return true if field exists and has non-blank text
+ */
+ private boolean hasValidTextField(JsonNode node, String fieldName) {
+ return Optional.ofNullable(node.get(fieldName)).map(JsonNode::asText).filter(StringUtils::isNotBlank)
+ .isPresent();
+ }
+
+ /**
+ * Safely extracts text value from JSON node. Returns null if field doesn't
+ * exist or is null.
+ *
+ * @param node
+ * the JSON node to extract from
+ * @param fieldName
+ * the field name to extract
+ * @return extracted text value, or null if not present
+ */
+ private String getTextValue(JsonNode node, String fieldName) {
+ return Optional.ofNullable(node.get(fieldName)).map(JsonNode::asText).filter(StringUtils::isNotBlank)
+ .orElse(null);
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessor.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessor.java
new file mode 100644
index 000000000..b9b4fd385
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessor.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2024 Sapient Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and limitations under the
+ * License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.processor;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.springframework.batch.item.ItemProcessor;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationCalculationService;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import jakarta.annotation.Nonnull;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Spring Batch ItemProcessor for processing project recommendations.
+ */
+@Slf4j
+@RequiredArgsConstructor
+public class ProjectItemProcessor implements ItemProcessor {
+
+ private final RecommendationCalculationService recommendationCalculationService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
+
+ /**
+ * Processes a single project to generate AI recommendations. Handles errors
+ * gracefully by logging and saving failure trace.
+ *
+ * @param projectInputDTO
+ * the project input data (must not be null)
+ * @return RecommendationsActionPlan if successful, null if processing fails
+ * @throws Exception
+ * if fatal error occurs (Spring Batch will handle retry/skip logic)
+ */
+ @Override
+ public RecommendationsActionPlan process(@Nonnull ProjectInputDTO projectInputDTO) throws Exception {
+ try {
+ log.debug("{} Starting recommendation calculation for project with nodeId: {}",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInputDTO.nodeId());
+
+ RecommendationsActionPlan recommendation = recommendationCalculationService
+ .calculateRecommendationsForProject(projectInputDTO);
+
+ log.debug("{} Generated recommendation plan for project: {} with persona: {}",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInputDTO.name(),
+ recommendation.getMetadata().getPersona());
+ return recommendation;
+ } catch (Exception e) {
+ log.error("{} Failed to process project: {} (nodeId: {})",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInputDTO.name(),
+ projectInputDTO.nodeId(), e);
+
+ // Save detailed failure trace log with more context
+ String errorMessage = String.format("Processing failed for project %s: %s - %s. Root cause: %s",
+ projectInputDTO.name(), e.getClass().getSimpleName(), e.getMessage(),
+ ExceptionUtils.getRootCauseMessage(e));
+ processorExecutionTraceLogService.upsertTraceLog(JobConstants.JOB_RECOMMENDATION_CALCULATION,
+ projectInputDTO.nodeId(), false, errorMessage);
+
+ // Return null to skip this projectInputDTO
+ return null;
+ }
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReader.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReader.java
new file mode 100644
index 000000000..79475cf5c
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReader.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.reader;
+
+import org.springframework.batch.item.ItemReader;
+
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationProjectBatchService;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Spring Batch ItemReader for reading project input data.
+ */
+@Slf4j
+@RequiredArgsConstructor
+public class ProjectItemReader implements ItemReader {
+
+ private final RecommendationProjectBatchService projectBatchService;
+
+ @Override
+ public ProjectInputDTO read() {
+ ProjectInputDTO projectInputDTO = projectBatchService.getNextProjectInputData();
+
+ log.info("{} Received project input dto {}", JobConstants.LOG_PREFIX_RECOMMENDATION,
+ projectInputDTO);
+
+ return projectInputDTO;
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionService.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionService.java
new file mode 100644
index 000000000..71dc9d7c7
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionService.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections4.CollectionUtils;
+import org.springframework.stereotype.Service;
+
+import com.publicissapient.kpidashboard.client.customapi.KnowHOWClient;
+import com.publicissapient.kpidashboard.client.customapi.dto.KpiElement;
+import com.publicissapient.kpidashboard.client.customapi.dto.KpiRequest;
+import com.publicissapient.kpidashboard.common.constant.CommonConstant;
+import com.publicissapient.kpidashboard.common.model.application.DataCount;
+import com.publicissapient.kpidashboard.common.model.application.DataCountGroup;
+import com.publicissapient.kpidashboard.common.model.application.KpiDataPrompt;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Service responsible for extracting and transforming KPI data from KnowHOW
+ * API.
+ */
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class KpiDataExtractionService {
+
+ private static final List FILTER_LIST = Arrays.asList("Final Scope (Story Points)", "Average Coverage",
+ "Story Points", "Overall");
+ private final KnowHOWClient knowHOWClient;
+ private final RecommendationCalculationConfig recommendationCalculationConfig;
+
+ /**
+ * Fetches and extracts KPI data for the given project.
+ *
+ * @param projectInput
+ * the project input containing hierarchy information
+ * @return map of KPI name to formatted KPI data prompts
+ */
+ public Map fetchKpiDataForProject(ProjectInputDTO projectInput) {
+ try {
+ log.debug("{} Fetching KPI data for project: {}", JobConstants.LOG_PREFIX_RECOMMENDATION,
+ projectInput.nodeId());
+
+ // Construct KPI requests
+ List kpiRequests = constructKpiRequests(projectInput);
+
+ // Fetch from KnowHOW API
+ List kpiElements = knowHOWClient.getKpiIntegrationValues(kpiRequests);
+
+ // Validate KPI elements were received
+ if (CollectionUtils.isEmpty(kpiElements)) {
+ log.error(
+ "{} No KPI elements received from KnowHOW API for project: {}. Failing recommendation calculation.",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInput.nodeId());
+ throw new IllegalStateException(
+ "No KPI data received from KnowHOW API for project: " + projectInput.nodeId());
+ }
+
+ // Extract and format KPI data
+ Map kpiData = extractKpiData(kpiElements);
+
+ // Validate that extracted KPI data has meaningful content
+ boolean hasData = kpiData.values().stream()
+ .anyMatch(value -> value instanceof List && !((List>) value).isEmpty());
+
+ if (!hasData) {
+ log.error(
+ "{} KPI data extraction resulted in empty values for all KPIs for project: {}. Failing recommendation calculation.",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInput.nodeId());
+ throw new IllegalStateException(
+ "No meaningful KPI data available for project: " + projectInput.nodeId());
+ }
+
+ log.debug("{} Successfully fetched {} KPIs for project: {}", JobConstants.LOG_PREFIX_RECOMMENDATION,
+ kpiData.size(), projectInput.nodeId());
+ return kpiData;
+
+ } catch (Exception e) {
+ log.error("{} Error fetching KPI data for project {}: {}", JobConstants.LOG_PREFIX_RECOMMENDATION,
+ projectInput.nodeId(), e.getMessage(), e);
+ throw e;
+ }
+ }
+
+ /**
+ * Constructs KPI requests for the given project.
+ *
+ * @param projectInput
+ * the project input containing hierarchy information
+ * @return list of KPI requests ready for API calls
+ */
+ private List constructKpiRequests(ProjectInputDTO projectInput) {
+ KpiRequest kpiRequest = KpiRequest.builder()
+ .kpiIdList(recommendationCalculationConfig.getCalculationConfig().getKpiList())
+ .selectedMap(Map.of(CommonConstant.HIERARCHY_LEVEL_ID_PROJECT, List.of(projectInput.nodeId())))
+ .ids(new String[] { projectInput.nodeId() }).level(projectInput.hierarchyLevel())
+ .label(projectInput.hierarchyLevelId()).build();
+
+ return List.of(kpiRequest);
+ }
+
+ /**
+ * Extracts and formats KPI data from KPI elements.
+ *
+ * @param kpiElements
+ * the list of KPI elements from KnowHOW API
+ * @return map where key is KPI name and value is list of formatted data prompts
+ */
+ @SuppressWarnings("unchecked")
+ private Map extractKpiData(List kpiElements) {
+ Map kpiDataMap = new HashMap<>();
+
+ kpiElements.forEach(kpiElement -> {
+ List kpiDataPromptList = new ArrayList<>();
+ Object trendValueObj = kpiElement.getTrendValueList();
+
+ // Handle both List and non-List types
+ if (trendValueObj instanceof List> trendValueList && CollectionUtils.isNotEmpty(trendValueList)) {
+ DataCount dataCount = trendValueList.get(0) instanceof DataCountGroup
+ ? ((List) trendValueList).stream().filter(this::matchesFilterCriteria)
+ .map(DataCountGroup::getValue).flatMap(List::stream).findFirst().orElse(null)
+ : ((List) trendValueList).get(0);
+
+ if (dataCount != null && dataCount.getValue() instanceof List) {
+ ((List) dataCount.getValue()).forEach(dataCountItem -> {
+ KpiDataPrompt kpiDataPrompt = new KpiDataPrompt();
+ kpiDataPrompt.setData(dataCountItem.getData());
+ kpiDataPrompt.setSProjectName(dataCountItem.getSProjectName());
+ kpiDataPrompt.setSSprintName(dataCountItem.getsSprintName());
+ kpiDataPrompt.setDate(dataCountItem.getDate());
+ kpiDataPromptList.add(kpiDataPrompt.toString());
+ });
+ }
+ } else if (trendValueObj != null) {
+ log.debug("{} Skipping non-list trendValueList for KPI {}: {} (type: {})",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, kpiElement.getKpiId(),
+ kpiElement.getKpiName(), trendValueObj.getClass().getSimpleName());
+ }
+ kpiDataMap.put(kpiElement.getKpiName(), kpiDataPromptList);
+ });
+
+ return kpiDataMap;
+ } /**
+ * Checks if DataCountGroup matches filter criteria. Matches if either the main
+ * filter is in FILTER_LIST, or both filter1 and filter2 are in FILTER_LIST.
+ *
+ * @param trend
+ * the DataCountGroup to check
+ * @return true if trend matches filter criteria
+ */
+ private boolean matchesFilterCriteria(DataCountGroup trend) {
+ return FILTER_LIST.contains(trend.getFilter())
+ || (FILTER_LIST.contains(trend.getFilter1()) && FILTER_LIST.contains(trend.getFilter2()));
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationService.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationService.java
new file mode 100644
index 000000000..a4fdf0f82
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationService.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import java.time.Instant;
+import java.util.Map;
+
+import org.apache.commons.collections4.CollectionUtils;
+import org.springframework.lang.NonNull;
+import org.springframework.stereotype.Service;
+
+import com.knowhow.retro.aigatewayclient.client.AiGatewayClient;
+import com.knowhow.retro.aigatewayclient.client.request.chat.ChatGenerationRequest;
+import com.knowhow.retro.aigatewayclient.client.response.chat.ChatGenerationResponseDTO;
+import com.publicissapient.kpidashboard.common.constant.CommonConstant;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Persona;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationLevel;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationMetadata;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.service.recommendation.PromptService;
+import com.publicissapient.kpidashboard.config.mongo.TTLIndexConfigProperties;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.parser.BatchRecommendationResponseParser;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Service responsible for orchestrating AI-based recommendation generation.
+ */
+@Slf4j
+@Service
+@RequiredArgsConstructor
+public class RecommendationCalculationService {
+ public static final String RECOMMENDATION_CALCULATION = "recommendation-calculation";
+ private final AiGatewayClient aiGatewayClient;
+ private final KpiDataExtractionService kpiDataExtractionService;
+ private final PromptService promptService;
+ private final BatchRecommendationResponseParser recommendationResponseParser;
+ private final RecommendationCalculationConfig recommendationCalculationConfig;
+ private final TTLIndexConfigProperties ttlIndexConfigProperties;
+
+ /**
+ * Calculates AI-generated recommendations for a given project. Orchestrates KPI
+ * data extraction, prompt building, AI generation, and validation.
+ *
+ * @param projectInput
+ * the project input containing hierarchy and sprint information
+ * (must not be null)
+ * @return recommendation action plan with validated AI recommendations
+ * @throws IllegalStateException
+ * if AI response parsing or validation fails or if configuration is
+ * invalid
+ */
+ public RecommendationsActionPlan calculateRecommendationsForProject(@NonNull ProjectInputDTO projectInput) {
+ if (CollectionUtils.isNotEmpty(recommendationCalculationConfig.getConfigValidationErrors())) {
+ throw new IllegalStateException(String.format("The following config validation errors occurred: %s",
+ String.join(CommonConstant.COMMA, recommendationCalculationConfig.getConfigValidationErrors())));
+ }
+
+ Persona persona = recommendationCalculationConfig.getCalculationConfig().getEnabledPersona();
+
+ log.info("{} Calculating recommendations for project: {} ({}) - Persona: {}",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, projectInput.name(), projectInput.nodeId(),
+ persona.getDisplayName());
+
+ // Delegate KPI data extraction to specialized service
+ Map kpiData = kpiDataExtractionService.fetchKpiDataForProject(projectInput);
+
+ // Build prompt using PromptService with actual KPI data
+ String prompt = promptService.getKpiRecommendationPrompt(kpiData, persona);
+
+ // Validate prompt was generated successfully
+ if (prompt == null || prompt.trim().isEmpty()) {
+ throw new IllegalStateException("Failed to generate valid prompt for project: " + projectInput.nodeId());
+ }
+
+ ChatGenerationRequest request = ChatGenerationRequest.builder().prompt(prompt).build();
+
+ ChatGenerationResponseDTO response = aiGatewayClient.generate(request);
+
+ // Validate AI Gateway returned a response
+ if (response == null) {
+ throw new IllegalStateException("AI Gateway returned null response for project: " + projectInput.nodeId());
+ }
+
+ return buildRecommendationsActionPlan(projectInput, persona, response);
+ }
+
+ /**
+ * Builds recommendation action plan from AI response and project metadata.
+ * Parses AI response, validates using RecommendationValidator, and constructs
+ * complete plan.
+ *
+ * @param projectInput
+ * the project input data
+ * @param persona
+ * the persona used for recommendations
+ * @param response
+ * the AI response DTO
+ * @return complete recommendation action plan with metadata
+ * @throws IllegalStateException
+ * if parsing or validation fails
+ */
+ private RecommendationsActionPlan buildRecommendationsActionPlan(ProjectInputDTO projectInput, Persona persona,
+ ChatGenerationResponseDTO response) {
+
+ Instant now = Instant.now();
+
+ // Parse and validate AI response
+ Recommendation recommendation = recommendationResponseParser.parseRecommendation(response)
+ .orElseThrow(() -> new IllegalStateException(
+ "Failed to parse AI recommendation for project: " + projectInput.nodeId())); // Build metadata
+ RecommendationMetadata metadata = RecommendationMetadata.builder()
+ .requestedKpis(recommendationCalculationConfig.getCalculationConfig().getKpiList()).persona(persona)
+ .build();
+
+ // Build plan using builder
+ return RecommendationsActionPlan.builder().basicProjectConfigId(projectInput.basicProjectConfigId())
+ .projectName(projectInput.name()).persona(persona).level(RecommendationLevel.PROJECT_LEVEL)
+ .createdAt(now).expiresOn(now.plusSeconds(getTtlExpirationSeconds())).recommendations(recommendation)
+ .metadata(metadata).build();
+ }
+
+ /**
+ * Calculates TTL expiration duration in seconds. Reads from
+ * mongo.ttl-index.configs.recommendation-calculation configuration.
+ *
+ * @return TTL expiration time in seconds
+ * @throws IllegalStateException
+ * if TTL configuration not found
+ */
+ private long getTtlExpirationSeconds() {
+ TTLIndexConfigProperties.TTLIndexConfig ttlConfig = ttlIndexConfigProperties.getConfigs()
+ .get(RECOMMENDATION_CALCULATION);
+
+ if (ttlConfig == null) {
+ log.error("{} TTL configuration 'recommendation-calculation' not found in mongo.ttl-index.configs",
+ JobConstants.LOG_PREFIX_RECOMMENDATION);
+ throw new IllegalStateException("TTL configuration for recommendation-calculation is not configured");
+ }
+
+ return ttlConfig.getTimeUnit().toSeconds(ttlConfig.getExpiration());
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationProjectBatchService.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationProjectBatchService.java
new file mode 100644
index 000000000..b0627dbea
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationProjectBatchService.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import java.util.Collections;
+import java.util.List;
+
+import org.springframework.batch.core.configuration.annotation.JobScope;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.stereotype.Component;
+import org.springframework.util.CollectionUtils;
+
+import com.publicissapient.kpidashboard.common.model.application.HierarchyLevel;
+import com.publicissapient.kpidashboard.common.model.application.ProjectBasicConfig;
+import com.publicissapient.kpidashboard.common.repository.application.ProjectBasicConfigRepository;
+import com.publicissapient.kpidashboard.common.service.HierarchyLevelServiceImpl;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import jakarta.annotation.PostConstruct;
+import lombok.Builder;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Service for batching projects during recommendation calculation.
+ */
+@Slf4j
+@Component
+@JobScope
+@RequiredArgsConstructor
+public class RecommendationProjectBatchService {
+
+ private final RecommendationCalculationConfig recommendationCalculationConfig;
+ private final ProjectBasicConfigRepository projectBasicConfigRepository;
+ private final HierarchyLevelServiceImpl hierarchyLevelServiceImpl;
+
+ private ProjectBatchProcessingParameters processingParameters;
+
+ @Builder
+ private static class ProjectBatchProcessingParameters {
+ private int currentPageNumber;
+ private int currentIndex;
+ private int numberOfPages;
+ private boolean repositoryHasMoreData;
+ private boolean shouldStartANewBatchProcess;
+ private List currentProjectBatch;
+ }
+
+ /**
+ * Retrieves the next project input data for processing.
+ */
+ public ProjectInputDTO getNextProjectInputData() {
+ if (this.processingParameters.shouldStartANewBatchProcess) {
+ initializeANewBatchProcess();
+
+ if (batchContainsNoItems()) {
+ log.info("{} No elements found after initializing new batch process",
+ JobConstants.LOG_PREFIX_RECOMMENDATION);
+ return null;
+ }
+ }
+
+ if (currentProjectBatchIsProcessed()) {
+ setNextProjectInputBatchData();
+
+ if (batchContainsNoItems()) {
+ log.info("{} Finished reading all project items", JobConstants.LOG_PREFIX_RECOMMENDATION);
+ return null;
+ }
+ }
+
+ ProjectInputDTO nextProjectInputDTO = this.processingParameters.currentProjectBatch
+ .get(this.processingParameters.currentIndex);
+ this.processingParameters.currentIndex++;
+ return nextProjectInputDTO;
+ }
+
+ /**
+ * Resets batch processing parameters for the next job execution.
+ */
+ public void initializeBatchProcessingParametersForTheNextProcess() {
+ this.processingParameters = ProjectBatchProcessingParameters.builder().currentPageNumber(0).currentIndex(0)
+ .numberOfPages(0).repositoryHasMoreData(false).shouldStartANewBatchProcess(true).build();
+ }
+
+ @PostConstruct
+ private void initializeBatchProcessingParameters() {
+ initializeBatchProcessingParametersForTheNextProcess();
+ }
+
+ private boolean batchContainsNoItems() {
+ return CollectionUtils.isEmpty(this.processingParameters.currentProjectBatch);
+ }
+
+ private boolean currentProjectBatchIsProcessed() {
+ return this.processingParameters.currentIndex == this.processingParameters.currentProjectBatch.size();
+ }
+
+ private void initializeANewBatchProcess() {
+ Page projectPage = getNextProjectPage();
+ HierarchyLevel projectHierarchyLevel = hierarchyLevelServiceImpl.getProjectHierarchyLevel();
+
+ this.processingParameters = ProjectBatchProcessingParameters.builder().currentPageNumber(0).currentIndex(0)
+ .numberOfPages(projectPage.getTotalPages()).repositoryHasMoreData(projectPage.hasNext())
+ .shouldStartANewBatchProcess(false)
+ .currentProjectBatch(constructProjectInputDTOList(projectPage, projectHierarchyLevel)).build();
+ }
+
+ private void setNextProjectInputBatchData() {
+ if (this.processingParameters.repositoryHasMoreData) {
+ this.processingParameters.currentPageNumber++;
+
+ Page projectPage = getNextProjectPage();
+ HierarchyLevel projectHierarchyLevel = hierarchyLevelServiceImpl.getProjectHierarchyLevel();
+
+ this.processingParameters.currentProjectBatch = constructProjectInputDTOList(projectPage,
+ projectHierarchyLevel);
+ this.processingParameters.repositoryHasMoreData = projectPage.hasNext();
+ this.processingParameters.currentIndex = 0;
+ } else {
+ this.processingParameters.currentProjectBatch = Collections.emptyList();
+ }
+ }
+
+ private Page getNextProjectPage() {
+ return projectBasicConfigRepository.findByKanbanAndProjectOnHold(false, false,
+ PageRequest.of(this.processingParameters.currentPageNumber,
+ recommendationCalculationConfig.getBatching().getChunkSize()));
+ }
+
+ private List constructProjectInputDTOList(Page projectPage,
+ HierarchyLevel projectHierarchyLevel) {
+ return projectPage.stream().filter(project -> project.getId() != null && project.getProjectNodeId() != null)
+ .map(project -> ProjectInputDTO.builder().name(project.getProjectDisplayName())
+ .nodeId(project.getProjectNodeId()).basicProjectConfigId(String.valueOf(project.getId()))
+ .hierarchyLevel(projectHierarchyLevel.getLevel())
+ .hierarchyLevelId(projectHierarchyLevel.getHierarchyLevelId()).sprints(Collections.emptyList())
+ .build())
+ .toList();
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/strategy/RecommendationCalculationJobStrategy.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/strategy/RecommendationCalculationJobStrategy.java
new file mode 100644
index 000000000..e73bc8f7e
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/strategy/RecommendationCalculationJobStrategy.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.strategy;
+
+import java.util.Optional;
+import java.util.concurrent.Future;
+
+import org.springframework.batch.core.Job;
+import org.springframework.batch.core.Step;
+import org.springframework.batch.core.job.builder.JobBuilder;
+import org.springframework.batch.core.repository.JobRepository;
+import org.springframework.batch.core.step.builder.StepBuilder;
+import org.springframework.batch.integration.async.AsyncItemProcessor;
+import org.springframework.batch.integration.async.AsyncItemWriter;
+import org.springframework.core.task.TaskExecutor;
+import org.springframework.stereotype.Component;
+import org.springframework.transaction.PlatformTransactionManager;
+
+import com.knowhow.retro.aigatewayclient.client.AiGatewayClient;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.repository.recommendation.RecommendationRepository;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.config.base.SchedulingConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.listener.RecommendationCalculationJobExecutionListener;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.processor.ProjectItemProcessor;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.reader.ProjectItemReader;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationCalculationService;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationProjectBatchService;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.writer.ProjectItemWriter;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+import com.publicissapient.kpidashboard.job.strategy.JobStrategy;
+
+import lombok.RequiredArgsConstructor;
+
+/**
+ * Job strategy for recommendation calculation batch job.
+ *
+ */
+@Component
+@RequiredArgsConstructor
+public class RecommendationCalculationJobStrategy implements JobStrategy {
+
+ private final JobRepository jobRepository;
+ private final TaskExecutor taskExecutor;
+ private final PlatformTransactionManager platformTransactionManager;
+ private final RecommendationCalculationConfig recommendationCalculationConfig;
+ private final RecommendationProjectBatchService projectBatchService;
+ private final RecommendationCalculationService recommendationCalculationService;
+ private final JobExecutionTraceLogService jobExecutionTraceLogService;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
+ private final RecommendationRepository recommendationRepository;
+ private final AiGatewayClient aiGatewayClient;
+
+ @Override
+ public String getJobName() {
+ return recommendationCalculationConfig.getName();
+ }
+
+ @Override
+ public Optional getSchedulingConfig() {
+ return Optional.of(recommendationCalculationConfig.getScheduling());
+ }
+
+ @Override
+ public Job getJob() {
+ return new JobBuilder(recommendationCalculationConfig.getName(), jobRepository).start(chunkProcessProjects())
+ .listener(new RecommendationCalculationJobExecutionListener(this.projectBatchService,
+ this.jobExecutionTraceLogService, this.aiGatewayClient))
+ .build();
+ }
+
+ private Step chunkProcessProjects() {
+ return new StepBuilder(String.format("%s-chunk-process", recommendationCalculationConfig.getName()),
+ jobRepository)
+ .>chunk(
+ recommendationCalculationConfig.getBatching().getChunkSize(), platformTransactionManager)
+ .reader(new ProjectItemReader(this.projectBatchService)).processor(asyncProjectProcessor())
+ .writer(asyncItemWriter()).build();
+ }
+
+ private AsyncItemProcessor asyncProjectProcessor() {
+ AsyncItemProcessor asyncItemProcessor = new AsyncItemProcessor<>();
+ asyncItemProcessor.setDelegate(new ProjectItemProcessor(this.recommendationCalculationService,
+ this.processorExecutionTraceLogService));
+ asyncItemProcessor.setTaskExecutor(taskExecutor);
+ return asyncItemProcessor;
+ }
+
+ private AsyncItemWriter asyncItemWriter() {
+ AsyncItemWriter writer = new AsyncItemWriter<>();
+ writer.setDelegate(
+ new ProjectItemWriter(this.recommendationRepository, this.processorExecutionTraceLogService));
+ return writer;
+ }
+
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriter.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriter.java
new file mode 100644
index 000000000..32a9f44f7
--- /dev/null
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriter.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2024 Sapient Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and limitations under the
+ * License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.writer;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import org.springframework.batch.item.Chunk;
+import org.springframework.batch.item.ItemWriter;
+import org.springframework.lang.NonNull;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.repository.recommendation.RecommendationRepository;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.constant.JobConstants;
+
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Spring Batch ItemWriter for persisting recommendation documents.
+ */
+@Slf4j
+@RequiredArgsConstructor
+public class ProjectItemWriter implements ItemWriter {
+
+ private final RecommendationRepository recommendationRepository;
+ private final ProcessorExecutionTraceLogService processorExecutionTraceLogService;
+
+ /**
+ * Writes a chunk of recommendations to the database. Filters out null items,
+ * saves recommendations, and updates execution trace logs.
+ *
+ * @param chunk
+ * the chunk of recommendations to persist (must not be null)
+ * @throws IllegalArgumentException
+ * if chunk is null
+ */
+ @Override
+ public void write(@NonNull Chunk extends RecommendationsActionPlan> chunk) {
+ // Filter out nulls
+ List itemsToSave = chunk.getItems().stream().filter(Objects::nonNull)
+ .collect(Collectors.toList());
+
+ log.info("{} Received chunk items for inserting into database with size: {} recommendations from {} projects",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, itemsToSave.size(), chunk.size());
+
+ if (!itemsToSave.isEmpty()) {
+ // Save recommendations
+ recommendationRepository.saveAll(itemsToSave);
+ log.info("{} Successfully saved {} recommendation documents",
+ JobConstants.LOG_PREFIX_RECOMMENDATION, itemsToSave.size());
+
+ // Save execution trace logs per project
+ itemsToSave.forEach(this::saveProjectExecutionTraceLog);
+ }
+ }
+
+ /**
+ * Creates or updates execution trace log for a project.
+ *
+ * @param recommendation
+ * The recommendation containing project metadata
+ */
+ private void saveProjectExecutionTraceLog(RecommendationsActionPlan recommendation) {
+ String projectId = recommendation.getBasicProjectConfigId();
+ processorExecutionTraceLogService.upsertTraceLog(JobConstants.JOB_RECOMMENDATION_CALCULATION, projectId, true,
+ null);
+ }
+}
diff --git a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/shared/dto/ProjectInputDTO.java b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/shared/dto/ProjectInputDTO.java
index 19f764385..c7a02bac9 100644
--- a/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/shared/dto/ProjectInputDTO.java
+++ b/ai-data-processor/src/main/java/com/publicissapient/kpidashboard/job/shared/dto/ProjectInputDTO.java
@@ -24,5 +24,5 @@
@Builder
public record ProjectInputDTO(int hierarchyLevel, String hierarchyLevelId, String name, String nodeId,
- ProjectDeliveryMethodology deliveryMethodology, List sprints) {
+ String basicProjectConfigId, ProjectDeliveryMethodology deliveryMethodology, List sprints) {
}
diff --git a/ai-data-processor/src/main/resources/application-local.yml b/ai-data-processor/src/main/resources/application-local.yml
index 1ad36acd3..ab7e1bf89 100644
--- a/ai-data-processor/src/main/resources/application-local.yml
+++ b/ai-data-processor/src/main/resources/application-local.yml
@@ -9,6 +9,16 @@ spring:
max-size: 8
queue-capacity: 100
-custom-api-config:
+knowhow-api-config:
base-url:
- api-key:
\ No newline at end of file
+ api-key:
+
+ai-gateway-config:
+ base-url:
+ audience:
+ default-ai-provider:
+
+m2mauth:
+ secret:
+ duration:
+ issuer-service-id:
diff --git a/ai-data-processor/src/main/resources/application.yml b/ai-data-processor/src/main/resources/application.yml
index 7bd9cb381..9f0b21ee4 100644
--- a/ai-data-processor/src/main/resources/application.yml
+++ b/ai-data-processor/src/main/resources/application.yml
@@ -127,6 +127,41 @@ jobs:
cron: 0 0 0 ? * FRI
batching:
chunk-size: 10
+ recommendation-calculation:
+ name: recommendation-calculation
+ batching:
+ chunk-size: 50
+ scheduling:
+ cron: ${RECOMMENDATION_CALC_CRON:0 0 2 * * SAT}
+ calculation-config:
+ enabled-persona: PROJECT_ADMIN
+ kpi-list:
+ - kpi39
+ - kpi46
+ - kpi70
+ - kpi172
+ - kpi17
+ - kpi8
+ - kpi27
+ - kpi156
+ - kpi14
+ - kpi37
+ - kpi34
+ - kpi111
+ - kpi16
+ - kpi42
+ - kpi5
+ - kpi82
+ - kpi149
+ - kpi113
+ - kpi168
+ - kpi73
+ - kpi40
+ - kpi164
+ - kpi126
+ - kpi35
+ - kpi72
+ - kpi38
mongo:
ttl-index:
@@ -148,4 +183,28 @@ mongo:
ttl-field: calculationDate
expiration: 180
time-unit: DAYS
- sort-direction: ASC
\ No newline at end of file
+ sort-direction: ASC
+ recommendation-calculation:
+ collection-name: recommendations_action_plan
+ ttl-field: expiresOn
+ expiration: 180
+ time-unit: DAYS
+ sort-direction: ASC
+ job-execution-trace:
+ collection-name: job_execution_trace_log
+ ttl-field: executionStartedAt
+ expiration: 180
+ time-unit: DAYS
+ sort-direction: ASC
+
+# M2M Authentication for AI Gateway Client
+m2mauth:
+ secret: ${AUTH_SECRET}
+ duration: 7200
+ issuer-service-id: ${AUTH_ISSUER_SERVICE_ID}
+
+# AI Gateway Configuration
+ai-gateway-config:
+ audience: ${AI_GATEWAY_AUDIENCE}
+ base-url: ${AI_GATEWAY_BASE_URL}
+ default-ai-provider: ${AI_GATEWAY_DEFAULT_PROVIDER:openai}
\ No newline at end of file
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestratorTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestratorTest.java
index 68d724730..245fdce0d 100644
--- a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestratorTest.java
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/orchestrator/JobOrchestratorTest.java
@@ -39,6 +39,8 @@
import java.util.Map;
import java.util.Set;
+import com.publicissapient.kpidashboard.common.model.tracelog.JobExecutionTraceLog;
+import com.publicissapient.kpidashboard.common.service.JobExecutionTraceLogService;
import org.bson.types.ObjectId;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -56,9 +58,8 @@
import org.springframework.test.util.ReflectionTestUtils;
import com.publicissapient.kpidashboard.common.constant.ProcessorType;
-import com.publicissapient.kpidashboard.common.model.ProcessorExecutionTraceLog;
import com.publicissapient.kpidashboard.common.model.generic.Processor;
-import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogServiceImpl;
+import com.publicissapient.kpidashboard.common.constant.ProcessorConstants;
import com.publicissapient.kpidashboard.exception.ConcurrentJobExecutionException;
import com.publicissapient.kpidashboard.exception.InternalServerErrorException;
import com.publicissapient.kpidashboard.exception.JobNotEnabledException;
@@ -83,7 +84,7 @@ class JobOrchestratorTest {
private AiDataProcessorRepository aiDataProcessorRepository;
@Mock
- private ProcessorExecutionTraceLogServiceImpl processorExecutionTraceLogServiceImpl;
+ private JobExecutionTraceLogService jobExecutionTraceLogService;
@InjectMocks
private JobOrchestrator jobOrchestrator;
@@ -546,7 +547,7 @@ void when_RunJobWithValidRegisteredEnabledJob_Then_ExecutesJobAndReturnsExecutio
// Arrange
String jobName = "testJob";
AiDataProcessor processor = createAiDataProcessor(jobName, true);
- ProcessorExecutionTraceLog traceLog = createProcessorExecutionTraceLog(jobName);
+ JobExecutionTraceLog traceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
JobStrategy mockJobStrategy = mock(JobStrategy.class);
Job mockJob = mock(Job.class);
@@ -556,9 +557,9 @@ void when_RunJobWithValidRegisteredEnabledJob_Then_ExecutesJobAndReturnsExecutio
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.createNewProcessorJobExecution(jobName)).thenReturn(traceLog);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(Collections.emptyList());
+ when(jobExecutionTraceLogService.createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName)).thenReturn(traceLog);
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(false);
when(aiDataJobRegistry.getJobStrategy(jobName)).thenReturn(mockJobStrategy);
when(mockJobStrategy.getJob()).thenReturn(mockJob);
@@ -574,7 +575,7 @@ void when_RunJobWithValidRegisteredEnabledJob_Then_ExecutesJobAndReturnsExecutio
assertNotNull(result.startedAt());
verify(jobLauncher).run(eq(mockJob), any(JobParameters.class));
- verify(processorExecutionTraceLogServiceImpl).createNewProcessorJobExecution(jobName);
+ verify(jobExecutionTraceLogService).createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName);
}
@Test
@@ -590,7 +591,7 @@ void when_RunJobWithUnregisteredJob_Then_ThrowsResourceNotFoundException() throw
assertTrue(exception.getMessage().contains("Job 'unregisteredJob' is not registered"));
verify(jobLauncher, never()).run(any(Job.class), any(JobParameters.class));
- verify(processorExecutionTraceLogServiceImpl, never()).createNewProcessorJobExecution(anyString());
+ verify(jobExecutionTraceLogService, never()).createProcessorJobExecution(anyString(),anyString());
}
@Test
@@ -612,7 +613,7 @@ void when_RunJobWithDisabledJob_Then_ThrowsJobNotEnabledException() throws JobIn
assertTrue(exception.getMessage().contains("Job 'disabledJob' did not run because is disabled"));
verify(jobLauncher, never()).run(any(Job.class), any(JobParameters.class));
- verify(processorExecutionTraceLogServiceImpl, never()).createNewProcessorJobExecution(anyString());
+ verify(jobExecutionTraceLogService, never()).createProcessorJobExecution(anyString(), anyString());
}
@Test
@@ -620,8 +621,8 @@ void when_RunJobWithAlreadyRunningJob_Then_ThrowsJobIsAlreadyRunningException()
// Arrange
String jobName = "runningJob";
AiDataProcessor processor = createAiDataProcessor(jobName, true);
- ProcessorExecutionTraceLog runningTraceLog = createProcessorExecutionTraceLog(jobName);
- runningTraceLog.setExecutionEndedAt(0L); // Indicates ongoing execution
+ JobExecutionTraceLog runningTraceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
+ runningTraceLog.setExecutionEndedAt(Instant.EPOCH);
runningTraceLog.setExecutionOngoing(true);
JobStrategy mockJobStrategy = mock(JobStrategy.class);
@@ -631,15 +632,15 @@ void when_RunJobWithAlreadyRunningJob_Then_ThrowsJobIsAlreadyRunningException()
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(List.of(runningTraceLog));
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(true);
// Act & Assert
ConcurrentJobExecutionException exception = assertThrows(ConcurrentJobExecutionException.class, () -> jobOrchestrator.runJob(jobName));
assertTrue(exception.getMessage().contains("Job 'runningJob' is already running"));
verify(jobLauncher, never()).run(any(Job.class), any(JobParameters.class));
- verify(processorExecutionTraceLogServiceImpl, never()).createNewProcessorJobExecution(anyString());
+ verify(jobExecutionTraceLogService, never()).createProcessorJobExecution(anyString(), anyString());
}
@Test
@@ -647,7 +648,7 @@ void when_RunJobAndJobLauncherThrowsException_Then_UpdatesTraceLogAndThrowsInter
// Arrange
String jobName = "failingJob";
AiDataProcessor processor = createAiDataProcessor(jobName, true);
- ProcessorExecutionTraceLog traceLog = createProcessorExecutionTraceLog(jobName);
+ JobExecutionTraceLog traceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
RuntimeException jobLauncherException = new RuntimeException("Job execution failed");
JobStrategy mockJobStrategy = mock(JobStrategy.class);
@@ -658,9 +659,9 @@ void when_RunJobAndJobLauncherThrowsException_Then_UpdatesTraceLogAndThrowsInter
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.createNewProcessorJobExecution(jobName)).thenReturn(traceLog);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(Collections.emptyList());
+ when(jobExecutionTraceLogService.createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName)).thenReturn(traceLog);
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(false);
when(aiDataJobRegistry.getJobStrategy(jobName)).thenReturn(mockJobStrategy);
when(mockJobStrategy.getJob()).thenReturn(mockJob);
when(jobLauncher.run(any(Job.class), any(JobParameters.class))).thenThrow(jobLauncherException);
@@ -671,10 +672,10 @@ void when_RunJobAndJobLauncherThrowsException_Then_UpdatesTraceLogAndThrowsInter
assertTrue(exception.getMessage().contains("Encountered unexpected error while trying to run job with name 'failingJob'"));
// Verify trace log was updated with error details
- ArgumentCaptor traceLogCaptor = ArgumentCaptor.forClass(ProcessorExecutionTraceLog.class);
- verify(processorExecutionTraceLogServiceImpl).saveAiDataProcessorExecutions(traceLogCaptor.capture());
+ ArgumentCaptor traceLogCaptor = ArgumentCaptor.forClass(JobExecutionTraceLog.class);
+ verify(jobExecutionTraceLogService).updateJobExecution(traceLogCaptor.capture());
- ProcessorExecutionTraceLog savedTraceLog = traceLogCaptor.getValue();
+ JobExecutionTraceLog savedTraceLog = traceLogCaptor.getValue();
assertFalse(savedTraceLog.isExecutionSuccess());
assertNotNull(savedTraceLog.getErrorDetailList());
assertFalse(savedTraceLog.getErrorDetailList().isEmpty());
@@ -686,7 +687,7 @@ void when_RunJobWithValidJobParameters_Then_PassesCorrectParametersToJobLauncher
// Arrange
String jobName = "parameterTestJob";
AiDataProcessor processor = createAiDataProcessor(jobName, true);
- ProcessorExecutionTraceLog traceLog = createProcessorExecutionTraceLog(jobName);
+ JobExecutionTraceLog traceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
JobStrategy mockJobStrategy = mock(JobStrategy.class);
Job mockJob = mock(Job.class);
@@ -696,9 +697,9 @@ void when_RunJobWithValidJobParameters_Then_PassesCorrectParametersToJobLauncher
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.createNewProcessorJobExecution(jobName)).thenReturn(traceLog);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(Collections.emptyList());
+ when(jobExecutionTraceLogService.createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName)).thenReturn(traceLog);
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(false);
when(aiDataJobRegistry.getJobStrategy(jobName)).thenReturn(mockJobStrategy);
when(mockJobStrategy.getJob()).thenReturn(mockJob);
@@ -768,9 +769,9 @@ void when_RunJobSuccessfully_Then_ReturnsCorrectExecutionResponseFields() {
AiDataProcessor processor = createAiDataProcessor(jobName, true);
processor.setId(processorId);
- ProcessorExecutionTraceLog traceLog = createProcessorExecutionTraceLog(jobName);
+ JobExecutionTraceLog traceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
traceLog.setId(executionId);
- traceLog.setExecutionStartedAt(executionStartTime);
+ traceLog.setExecutionStartedAt(Instant.ofEpochMilli(executionStartTime));
JobStrategy mockJobStrategy = mock(JobStrategy.class);
Job mockJob = mock(Job.class);
@@ -780,9 +781,9 @@ void when_RunJobSuccessfully_Then_ReturnsCorrectExecutionResponseFields() {
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.createNewProcessorJobExecution(jobName)).thenReturn(traceLog);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(Collections.emptyList());
+ when(jobExecutionTraceLogService.createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName)).thenReturn(traceLog);
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(false);
when(aiDataJobRegistry.getJobStrategy(jobName)).thenReturn(mockJobStrategy);
when(mockJobStrategy.getJob()).thenReturn(mockJob);
@@ -802,7 +803,7 @@ void when_RunJobAndJobLauncherThrowsCheckedException_Then_HandlesExceptionCorrec
// Arrange
String jobName = "checkedExceptionJob";
AiDataProcessor processor = createAiDataProcessor(jobName, true);
- ProcessorExecutionTraceLog traceLog = createProcessorExecutionTraceLog(jobName);
+ JobExecutionTraceLog traceLog = createProcessorExecutionTraceLog(ProcessorConstants.AI_DATA,jobName);
RuntimeException runtimeException = new RuntimeException("Runtime exception occurred");
JobStrategy mockJobStrategy = mock(JobStrategy.class);
@@ -813,9 +814,9 @@ void when_RunJobAndJobLauncherThrowsCheckedException_Then_HandlesExceptionCorrec
when(aiDataJobRegistry.getJobStrategyMap()).thenReturn(jobStrategyMap);
when(aiDataProcessorRepository.findByProcessorName(jobName)).thenReturn(processor);
- when(processorExecutionTraceLogServiceImpl.createNewProcessorJobExecution(jobName)).thenReturn(traceLog);
- when(processorExecutionTraceLogServiceImpl.findLastExecutionTraceLogsByProcessorName(jobName, 1))
- .thenReturn(Collections.emptyList());
+ when(jobExecutionTraceLogService.createProcessorJobExecution(ProcessorConstants.AI_DATA, jobName)).thenReturn(traceLog);
+ when(jobExecutionTraceLogService.isJobCurrentlyRunning(ProcessorConstants.AI_DATA, jobName))
+ .thenReturn(false);
when(aiDataJobRegistry.getJobStrategy(jobName)).thenReturn(mockJobStrategy);
when(mockJobStrategy.getJob()).thenReturn(mockJob);
when(jobLauncher.run(any(Job.class), any(JobParameters.class))).thenThrow(runtimeException);
@@ -826,10 +827,10 @@ void when_RunJobAndJobLauncherThrowsCheckedException_Then_HandlesExceptionCorrec
assertTrue(exception.getMessage().contains("Encountered unexpected error while trying to run job with name 'checkedExceptionJob'"));
// Verify error details contain the original exception message
- ArgumentCaptor traceLogCaptor = ArgumentCaptor.forClass(ProcessorExecutionTraceLog.class);
- verify(processorExecutionTraceLogServiceImpl).saveAiDataProcessorExecutions(traceLogCaptor.capture());
+ ArgumentCaptor traceLogCaptor = ArgumentCaptor.forClass(JobExecutionTraceLog.class);
+ verify(jobExecutionTraceLogService).updateJobExecution(traceLogCaptor.capture());
- ProcessorExecutionTraceLog savedTraceLog = traceLogCaptor.getValue();
+ JobExecutionTraceLog savedTraceLog = traceLogCaptor.getValue();
assertTrue(savedTraceLog.getErrorDetailList().get(0).getError().contains("Runtime exception occurred"));
}
@@ -843,11 +844,12 @@ private AiDataProcessor createAiDataProcessor(String processorName, boolean isAc
}
// Helper method
- private ProcessorExecutionTraceLog createProcessorExecutionTraceLog(String processorName) {
- ProcessorExecutionTraceLog traceLog = new ProcessorExecutionTraceLog();
+ private JobExecutionTraceLog createProcessorExecutionTraceLog(String processorName, String jobName) {
+ JobExecutionTraceLog traceLog = new JobExecutionTraceLog();
traceLog.setId(new ObjectId());
traceLog.setProcessorName(processorName);
- traceLog.setExecutionStartedAt(Instant.now().toEpochMilli());
+ traceLog.setJobName(jobName);
+ traceLog.setExecutionStartedAt(Instant.now());
traceLog.setExecutionSuccess(true);
return traceLog;
}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfigTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfigTest.java
new file mode 100644
index 000000000..ea2e55568
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/config/CalculationConfigTest.java
@@ -0,0 +1,328 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.config;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Persona;
+
+class CalculationConfigTest {
+
+ private CalculationConfig calculationConfig;
+
+ @BeforeEach
+ void setUp() {
+ calculationConfig = new CalculationConfig();
+ }
+
+ @Test
+ void when_NoEnabledPersonaConfigured_Then_ValidationErrorAdded() {
+ // Arrange
+ calculationConfig.setEnabledPersona(null);
+ calculationConfig.setKpiList(List.of("kpi14", "kpi82"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ Set errors = calculationConfig.getConfigValidationErrors();
+ assertFalse(errors.isEmpty());
+ assertTrue(errors.contains("No enabled persona configured for recommendation calculation"));
+ }
+
+ @Test
+ void when_NoKpiListConfigured_Then_ValidationErrorAdded() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(null);
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ Set errors = calculationConfig.getConfigValidationErrors();
+ assertFalse(errors.isEmpty());
+ assertTrue(errors.contains("No KPI list configured for recommendation calculation"));
+ }
+
+ @Test
+ void when_EmptyKpiListConfigured_Then_ValidationErrorAdded() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(Collections.emptyList());
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ Set errors = calculationConfig.getConfigValidationErrors();
+ assertFalse(errors.isEmpty());
+ assertTrue(errors.contains("No KPI list configured for recommendation calculation"));
+ }
+
+ @Test
+ void when_BothPersonaAndKpiListMissing_Then_BothValidationErrorsAdded() {
+ // Arrange
+ calculationConfig.setEnabledPersona(null);
+ calculationConfig.setKpiList(null);
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ Set errors = calculationConfig.getConfigValidationErrors();
+ assertEquals(2, errors.size());
+ assertTrue(errors.contains("No enabled persona configured for recommendation calculation"));
+ assertTrue(errors.contains("No KPI list configured for recommendation calculation"));
+ }
+
+ @Test
+ void when_ValidPersonaAndKpiListConfigured_Then_NoValidationErrors() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of("kpi14", "kpi82", "kpi111"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_ExecutiveSponsorPersonaConfigured_Then_NoValidationErrors() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.EXECUTIVE_SPONSOR);
+ calculationConfig.setKpiList(List.of("kpi14", "kpi82"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_ScrumMasterPersonaConfigured_Then_NoValidationErrors() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.SCRUM_MASTER);
+ calculationConfig.setKpiList(List.of("kpi14", "kpi82"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_Complete26KpiListConfigured_Then_NoValidationErrors() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of(
+ "kpi14", "kpi82", "kpi111", "kpi35", "kpi34",
+ "kpi37", "kpi28", "kpi36", "kpi126", "kpi42",
+ "kpi16", "kpi17", "kpi38", "kpi27", "kpi72",
+ "kpi84", "kpi11", "kpi62", "kpi64", "kpi67",
+ "kpi65", "kpi157", "kpi158", "kpi116", "kpi118",
+ "kpi997"
+ ));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_SingleKpiInList_Then_NoValidationErrors() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of("kpi14"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_ValidationErrorsExist_Then_ReturnsUnmodifiableSet() {
+ // Arrange
+ calculationConfig.setEnabledPersona(null);
+ calculationConfig.setKpiList(null);
+ calculationConfig.validateConfiguration();
+
+ // Act
+ Set errors = calculationConfig.getConfigValidationErrors();
+
+ // Assert
+ assertThrows(UnsupportedOperationException.class, () -> {
+ errors.add("Should not be able to modify");
+ });
+ }
+
+ @Test
+ void when_GetEnabledPersona_Then_ReturnsConfiguredPersona() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+
+ // Act
+ Persona result = calculationConfig.getEnabledPersona();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(Persona.ENGINEERING_LEAD, result);
+ }
+
+ @Test
+ void when_GetKpiList_Then_ReturnsConfiguredList() {
+ // Arrange
+ List kpiList = List.of("kpi14", "kpi82", "kpi111");
+ calculationConfig.setKpiList(kpiList);
+
+ // Act
+ List result = calculationConfig.getKpiList();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(3, result.size());
+ assertTrue(result.contains("kpi14"));
+ assertTrue(result.contains("kpi82"));
+ assertTrue(result.contains("kpi111"));
+ }
+
+ @Test
+ void when_NoPersonaSet_Then_ReturnsNull() {
+ // Arrange - Don't set persona
+
+ // Act
+ Persona result = calculationConfig.getEnabledPersona();
+
+ // Assert
+ assertNull(result);
+ }
+
+ @Test
+ void when_NoKpiListSet_Then_ReturnsNull() {
+ // Arrange - Don't set KPI list
+
+ // Act
+ List result = calculationConfig.getKpiList();
+
+ // Assert
+ assertNull(result);
+ }
+
+ @Test
+ void when_MultipleValidationCallsWithSameErrors_Then_ErrorsNotDuplicated() {
+ // Arrange
+ calculationConfig.setEnabledPersona(null);
+ calculationConfig.setKpiList(null);
+
+ // Act
+ calculationConfig.validateConfiguration();
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ Set errors = calculationConfig.getConfigValidationErrors();
+ // Errors should still be only 2 (persona and kpi list), not 4
+ assertEquals(2, errors.size());
+ }
+
+ @Test
+ void when_ConfigurationFixedAfterValidation_Then_ValidationPassesOnRetry() {
+ // Arrange
+ calculationConfig.setEnabledPersona(null);
+ calculationConfig.setKpiList(null);
+ calculationConfig.validateConfiguration();
+ assertEquals(2, calculationConfig.getConfigValidationErrors().size());
+
+ // Act - Fix configuration
+ calculationConfig = new CalculationConfig(); // Reset to clear errors
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of("kpi14"));
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_AllPersonaEnumValuesUsed_Then_AllValidate() {
+ // Test all available persona values
+ List kpiList = List.of("kpi14", "kpi82");
+
+ for (Persona persona : Persona.values()) {
+ // Arrange
+ CalculationConfig config = new CalculationConfig();
+ config.setEnabledPersona(persona);
+ config.setKpiList(kpiList);
+
+ // Act
+ config.validateConfiguration();
+
+ // Assert
+ assertTrue(config.getConfigValidationErrors().isEmpty(),
+ "Persona " + persona + " should validate successfully");
+ }
+ }
+
+ @Test
+ void when_DuplicateKpisInList_Then_NoValidationErrors() {
+ // Arrange - List with duplicates (though not recommended)
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of("kpi14", "kpi14", "kpi82"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+
+ // Assert
+ assertTrue(calculationConfig.getConfigValidationErrors().isEmpty());
+ }
+
+ @Test
+ void when_ConfigValidatorInterfaceImplemented_Then_MethodsAccessible() {
+ // Arrange
+ calculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ calculationConfig.setKpiList(List.of("kpi14"));
+
+ // Act
+ calculationConfig.validateConfiguration();
+ Set errors = calculationConfig.getConfigValidationErrors();
+
+ // Assert - Methods from ConfigValidator interface should be callable
+ assertNotNull(errors);
+ assertTrue(errors.isEmpty());
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParserTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParserTest.java
new file mode 100644
index 000000000..122209c1d
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/parser/BatchRecommendationResponseParserTest.java
@@ -0,0 +1,495 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.parser;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.util.Optional;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.knowhow.retro.aigatewayclient.client.response.chat.ChatGenerationResponseDTO;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Severity;
+
+@DisplayName("BatchRecommendationResponseParser Tests")
+class BatchRecommendationResponseParserTest {
+
+ private BatchRecommendationResponseParser parser;
+ private ObjectMapper objectMapper;
+
+ @BeforeEach
+ void setUp() {
+ objectMapper = new ObjectMapper();
+ parser = new BatchRecommendationResponseParser(objectMapper);
+ }
+
+ @Nested
+ @DisplayName("Valid Response Parsing")
+ class ValidResponseParsing {
+
+ @Test
+ @DisplayName("Should parse valid JSON response with all fields")
+ void parseRecommendation_ValidCompleteJson_Success() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Improve Code Quality",
+ "description": "Code quality metrics show declining trend",
+ "severity": "HIGH",
+ "timeToValue": "2-3 sprints",
+ "actionPlans": [
+ {
+ "title": "Implement Code Reviews",
+ "description": "Enforce peer code reviews"
+ },
+ {
+ "title": "Add Unit Tests",
+ "description": "Increase test coverage to 80%"
+ }
+ ]
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ Recommendation recommendation = result.get();
+ assertEquals("Improve Code Quality", recommendation.getTitle());
+ assertEquals("Code quality metrics show declining trend", recommendation.getDescription());
+ assertEquals(Severity.HIGH, recommendation.getSeverity());
+ assertEquals("2-3 sprints", recommendation.getTimeToValue());
+ assertNotNull(recommendation.getActionPlans());
+ assertEquals(2, recommendation.getActionPlans().size());
+ assertEquals("Implement Code Reviews", recommendation.getActionPlans().get(0).getTitle());
+ }
+
+ @Test
+ @DisplayName("Should parse JSON with markdown code fence")
+ void parseRecommendation_WithMarkdownFence_Success() {
+ // Arrange
+ String jsonResponse = """
+ ```json
+ {
+ "title": "Test Title",
+ "description": "Test Description"
+ }
+ ```
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals("Test Title", result.get().getTitle());
+ assertEquals("Test Description", result.get().getDescription());
+ }
+
+ @Test
+ @DisplayName("Should parse JSON without code fence")
+ void parseRecommendation_WithoutMarkdownFence_Success() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": "Test Description"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals("Test Title", result.get().getTitle());
+ }
+
+ @Test
+ @DisplayName("Should parse JSON from recommendations array")
+ void parseRecommendation_FromRecommendationsArray_Success() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "recommendations": [
+ {
+ "title": "First Recommendation",
+ "description": "First Description"
+ }
+ ]
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals("First Recommendation", result.get().getTitle());
+ assertEquals("First Description", result.get().getDescription());
+ }
+
+ @Test
+ @DisplayName("Should parse minimal JSON with only required fields")
+ void parseRecommendation_MinimalJson_Success() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Minimal Title",
+ "description": "Minimal Description"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals("Minimal Title", result.get().getTitle());
+ assertEquals("Minimal Description", result.get().getDescription());
+ assertNull(result.get().getSeverity());
+ assertNull(result.get().getTimeToValue());
+ assertNull(result.get().getActionPlans());
+ }
+
+ @Test
+ @DisplayName("Should handle invalid severity gracefully")
+ void parseRecommendation_InvalidSeverity_SavesAsNull() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": "Test Description",
+ "severity": "INVALID_SEVERITY"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertNull(result.get().getSeverity());
+ }
+
+ @Test
+ @DisplayName("Should parse JSON with text before opening brace")
+ void parseRecommendation_WithPrefixText_Success() {
+ // Arrange
+ String jsonResponse = """
+ Here is the recommendation:
+ {
+ "title": "Test Title",
+ "description": "Test Description"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals("Test Title", result.get().getTitle());
+ }
+ }
+
+ @Nested
+ @DisplayName("Invalid Response Handling")
+ class InvalidResponseHandling {
+
+ @Test
+ @DisplayName("Should return empty for null response content")
+ void parseRecommendation_NullContent_ReturnsEmpty() {
+ // Arrange
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(null);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for empty string response")
+ void parseRecommendation_EmptyString_ReturnsEmpty() {
+ // Arrange
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO("");
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for whitespace-only response")
+ void parseRecommendation_WhitespaceOnly_ReturnsEmpty() {
+ // Arrange
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(" \n\t ");
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for empty JSON object")
+ void parseRecommendation_EmptyJsonObject_ReturnsEmpty() {
+ // Arrange
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO("{}");
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for JSON missing required fields")
+ void parseRecommendation_MissingRequiredFields_ReturnsEmpty() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Only Title"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for malformed JSON")
+ void parseRecommendation_MalformedJson_ReturnsEmpty() {
+ // Arrange
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO("{ invalid json");
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty for empty recommendations array")
+ void parseRecommendation_EmptyRecommendationsArray_ReturnsEmpty() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "recommendations": []
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty when title is empty string")
+ void parseRecommendation_EmptyTitle_ReturnsEmpty() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "",
+ "description": "Test Description"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+
+ @Test
+ @DisplayName("Should return empty when description is empty string")
+ void parseRecommendation_EmptyDescription_ReturnsEmpty() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": ""
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertFalse(result.isPresent());
+ }
+ }
+
+ @Nested
+ @DisplayName("Action Plans Parsing")
+ class ActionPlansParsing {
+
+ @Test
+ @DisplayName("Should parse multiple action plans")
+ void parseRecommendation_MultipleActionPlans_Success() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": "Test Description",
+ "actionPlans": [
+ {"title": "Action 1", "description": "Description 1"},
+ {"title": "Action 2", "description": "Description 2"},
+ {"title": "Action 3", "description": "Description 3"}
+ ]
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals(3, result.get().getActionPlans().size());
+ assertEquals("Action 1", result.get().getActionPlans().get(0).getTitle());
+ assertEquals("Description 1", result.get().getActionPlans().get(0).getDescription());
+ assertEquals("Action 3", result.get().getActionPlans().get(2).getTitle());
+ }
+
+ @Test
+ @DisplayName("Should handle missing actionPlans field")
+ void parseRecommendation_NoActionPlans_ReturnsNull() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": "Test Description"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertNull(result.get().getActionPlans());
+ }
+
+ @Test
+ @DisplayName("Should handle empty actionPlans array")
+ void parseRecommendation_EmptyActionPlansArray_ReturnsEmptyList() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test Title",
+ "description": "Test Description",
+ "actionPlans": []
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertNotNull(result.get().getActionPlans());
+ assertTrue(result.get().getActionPlans().isEmpty());
+ }
+ }
+
+ @Nested
+ @DisplayName("Severity Parsing")
+ class SeverityParsing {
+
+ @Test
+ @DisplayName("Should parse all valid severity levels")
+ void parseRecommendation_AllSeverityLevels_Success() {
+ // Test all severity levels
+ String[] severities = { "HIGH", "MEDIUM", "LOW" };
+
+ for (String severity : severities) {
+ String jsonResponse = String.format("""
+ {
+ "title": "Test",
+ "description": "Test",
+ "severity": "%s"
+ }
+ """, severity);
+
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+ Optional result = parser.parseRecommendation(response);
+
+ assertTrue(result.isPresent());
+ assertEquals(Severity.valueOf(severity), result.get().getSeverity());
+ }
+ }
+
+ @Test
+ @DisplayName("Should handle lowercase severity")
+ void parseRecommendation_LowercaseSeverity_ParsesCorrectly() {
+ // Arrange
+ String jsonResponse = """
+ {
+ "title": "Test",
+ "description": "Test",
+ "severity": "high"
+ }
+ """;
+ ChatGenerationResponseDTO response = new ChatGenerationResponseDTO(jsonResponse);
+
+ // Act
+ Optional result = parser.parseRecommendation(response);
+
+ // Assert
+ assertTrue(result.isPresent());
+ assertEquals(Severity.HIGH, result.get().getSeverity());
+ }
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessorTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessorTest.java
new file mode 100644
index 000000000..768200048
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/processor/ProjectItemProcessorTest.java
@@ -0,0 +1,382 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.processor;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.Collections;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Persona;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationMetadata;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationCalculationService;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+@ExtendWith(MockitoExtension.class)
+@DisplayName("ProjectItemProcessor Tests")
+class ProjectItemProcessorTest {
+
+ @Mock
+ private RecommendationCalculationService recommendationCalculationService;
+
+ @Mock
+ private ProcessorExecutionTraceLogService processorExecutionTraceLogService;
+
+ private ProjectItemProcessor processor;
+
+ private ProjectInputDTO projectInput;
+ private RecommendationsActionPlan recommendation;
+
+ @BeforeEach
+ void setUp() {
+ processor = new ProjectItemProcessor(recommendationCalculationService, processorExecutionTraceLogService);
+
+ // Create test project input
+ projectInput = ProjectInputDTO.builder().nodeId("project-1").name("Test Project").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ // Create test recommendation
+ recommendation = new RecommendationsActionPlan();
+ recommendation.setBasicProjectConfigId("project-1");
+ RecommendationMetadata metadata = new RecommendationMetadata();
+ metadata.setPersona(Persona.SCRUM_MASTER);
+ recommendation.setMetadata(metadata);
+
+ // Create a proper Recommendation object
+ Recommendation rec = new Recommendation();
+ rec.setTitle("Test Recommendation");
+ rec.setDescription("Test Description");
+ rec.setActionPlans(Collections.emptyList());
+ recommendation.setRecommendations(rec);
+ }
+
+ @Nested
+ @DisplayName("Successful Processing")
+ class SuccessfulProcessing {
+
+ @Test
+ @DisplayName("Should process project successfully")
+ void process_ValidProject_ReturnsRecommendation() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenReturn(recommendation);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("project-1", result.getBasicProjectConfigId());
+ assertEquals(Persona.SCRUM_MASTER, result.getMetadata().getPersona());
+ verify(recommendationCalculationService, times(1)).calculateRecommendationsForProject(projectInput);
+ verify(processorExecutionTraceLogService, never()).upsertTraceLog(anyString(), anyString(), anyBoolean(),
+ anyString());
+ }
+
+ @Test
+ @DisplayName("Should process project with different persona")
+ void process_DifferentPersona_ReturnsCorrectRecommendation() throws Exception {
+ // Arrange
+ RecommendationMetadata metadata = new RecommendationMetadata();
+ metadata.setPersona(Persona.PRODUCT_OWNER);
+ recommendation.setMetadata(metadata);
+
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenReturn(recommendation);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(Persona.PRODUCT_OWNER, result.getMetadata().getPersona());
+ }
+
+ @Test
+ @DisplayName("Should process multiple projects sequentially")
+ void process_MultipleProjects_AllProcessedSuccessfully() throws Exception {
+ // Arrange
+ ProjectInputDTO project1 = ProjectInputDTO.builder().nodeId("project-1").name("Project 1").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+ ProjectInputDTO project2 = ProjectInputDTO.builder().nodeId("project-2").name("Project 2").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ RecommendationsActionPlan rec1 = new RecommendationsActionPlan();
+ rec1.setBasicProjectConfigId("project-1");
+ rec1.setMetadata(new RecommendationMetadata());
+
+ RecommendationsActionPlan rec2 = new RecommendationsActionPlan();
+ rec2.setBasicProjectConfigId("project-2");
+ rec2.setMetadata(new RecommendationMetadata());
+
+ when(recommendationCalculationService.calculateRecommendationsForProject(project1)).thenReturn(rec1);
+ when(recommendationCalculationService.calculateRecommendationsForProject(project2)).thenReturn(rec2);
+
+ // Act
+ RecommendationsActionPlan result1 = processor.process(project1);
+ RecommendationsActionPlan result2 = processor.process(project2);
+
+ // Assert
+ assertNotNull(result1);
+ assertNotNull(result2);
+ assertEquals("project-1", result1.getBasicProjectConfigId());
+ assertEquals("project-2", result2.getBasicProjectConfigId());
+ }
+ }
+
+ @Nested
+ @DisplayName("Exception Handling")
+ class ExceptionHandling {
+
+ @Test
+ @DisplayName("Should return null and log trace when service throws exception")
+ void process_ServiceException_ReturnsNullAndLogsTrace() throws Exception {
+ // Arrange
+ RuntimeException exception = new RuntimeException("AI Gateway unavailable");
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(exception);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(projectInput);
+
+ // Assert
+ assertNull(result);
+ verify(processorExecutionTraceLogService, times(1)).upsertTraceLog(eq("recommendation-calculation"), eq("project-1"),
+ eq(false), anyString());
+ }
+
+ @Test
+ @DisplayName("Should capture detailed error message in trace log")
+ void process_Exception_CapturesDetailedErrorMessage() throws Exception {
+ // Arrange
+ RuntimeException exception = new RuntimeException("Parsing failed");
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(exception);
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ ArgumentCaptor errorMessageCaptor = ArgumentCaptor.forClass(String.class);
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), eq("project-1"), eq(false),
+ errorMessageCaptor.capture());
+
+ String errorMessage = errorMessageCaptor.getValue();
+ assertNotNull(errorMessage);
+ assertTrue(errorMessage.contains("Test Project"));
+ assertTrue(errorMessage.contains("RuntimeException"));
+ assertTrue(errorMessage.contains("Parsing failed"));
+ }
+
+ @Test
+ @DisplayName("Should handle NullPointerException gracefully")
+ void process_NullPointerException_ReturnsNull() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(new NullPointerException("Required field is null"));
+
+ // Act
+ RecommendationsActionPlan result = processor.process(projectInput);
+
+ // Assert
+ assertNull(result);
+ verify(processorExecutionTraceLogService, times(1)).upsertTraceLog(anyString(), anyString(), eq(false),
+ anyString());
+ }
+
+ @Test
+ @DisplayName("Should handle IllegalArgumentException gracefully")
+ void process_IllegalArgumentException_ReturnsNull() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(new IllegalArgumentException("Invalid project configuration"));
+
+ // Act
+ RecommendationsActionPlan result = processor.process(projectInput);
+
+ // Assert
+ assertNull(result);
+ }
+
+ @Test
+ @DisplayName("Should include root cause in error message")
+ void process_NestedExceptions_IncludesRootCause() throws Exception {
+ // Arrange
+ Exception rootCause = new IllegalStateException("Connection timeout");
+ RuntimeException wrappedException = new RuntimeException("Service call failed", rootCause);
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(wrappedException);
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ ArgumentCaptor errorMessageCaptor = ArgumentCaptor.forClass(String.class);
+ verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), anyString(), eq(false),
+ errorMessageCaptor.capture());
+
+ String errorMessage = errorMessageCaptor.getValue();
+ assertTrue(errorMessage.contains("Root cause"));
+ }
+ }
+
+ @Nested
+ @DisplayName("Edge Cases")
+ class EdgeCases {
+
+ @Test
+ @DisplayName("Should process project with minimal data")
+ void process_MinimalProjectData_Success() throws Exception {
+ // Arrange
+ ProjectInputDTO minimalProject = ProjectInputDTO.builder().nodeId("id").name("name").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+ RecommendationsActionPlan minimalRec = new RecommendationsActionPlan();
+ minimalRec.setBasicProjectConfigId("id");
+ minimalRec.setMetadata(new RecommendationMetadata());
+
+ when(recommendationCalculationService.calculateRecommendationsForProject(minimalProject))
+ .thenReturn(minimalRec);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(minimalProject);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("id", result.getBasicProjectConfigId());
+ }
+
+ @Test
+ @DisplayName("Should handle project with special characters in name")
+ void process_SpecialCharactersInName_Success() throws Exception {
+ // Arrange
+ ProjectInputDTO specialProject = ProjectInputDTO.builder().nodeId("project-1")
+ .name("Test & \"Name\"").hierarchyLevel(5).hierarchyLevelId("project")
+ .sprints(Collections.emptyList()).build();
+ when(recommendationCalculationService.calculateRecommendationsForProject(specialProject))
+ .thenReturn(recommendation);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(specialProject);
+
+ // Assert
+ assertNotNull(result);
+ }
+
+ @Test
+ @DisplayName("Should process project with very long name")
+ void process_VeryLongProjectName_Success() throws Exception {
+ // Arrange
+ String longName = "A".repeat(500);
+ ProjectInputDTO longNameProject = ProjectInputDTO.builder().nodeId("project-1").name(longName)
+ .hierarchyLevel(5).hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+ when(recommendationCalculationService.calculateRecommendationsForProject(longNameProject))
+ .thenReturn(recommendation);
+
+ // Act
+ RecommendationsActionPlan result = processor.process(longNameProject);
+
+ // Assert
+ assertNotNull(result);
+ }
+ }
+
+ @Nested
+ @DisplayName("Trace Logging Behavior")
+ class TraceLoggingBehavior {
+
+ @Test
+ @DisplayName("Should not log trace on success")
+ void process_SuccessfulProcessing_NoTraceLog() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenReturn(recommendation);
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ verify(processorExecutionTraceLogService, never()).upsertTraceLog(anyString(), anyString(), anyBoolean(),
+ anyString());
+ }
+
+ @Test
+ @DisplayName("Should log trace with correct job name on failure")
+ void process_Failure_LogsWithCorrectJobName() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(new RuntimeException("Error"));
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), anyString(), eq(false),
+ anyString());
+ }
+
+ @Test
+ @DisplayName("Should log trace with correct project ID on failure")
+ void process_Failure_LogsWithCorrectProjectId() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(new RuntimeException("Error"));
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), eq("project-1"), eq(false),
+ anyString());
+ }
+
+ @Test
+ @DisplayName("Should log trace with success=false on failure")
+ void process_Failure_LogsWithSuccessFalse() throws Exception {
+ // Arrange
+ when(recommendationCalculationService.calculateRecommendationsForProject(projectInput))
+ .thenThrow(new RuntimeException("Error"));
+
+ // Act
+ processor.process(projectInput);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), anyString(), eq(false), anyString());
+ }
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReaderTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReaderTest.java
new file mode 100644
index 000000000..2fdad8d6f
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/reader/ProjectItemReaderTest.java
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.reader;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.mockito.Mockito.when;
+
+import java.util.Collections;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import com.publicissapient.kpidashboard.job.recommendationcalculation.service.RecommendationProjectBatchService;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+@ExtendWith(MockitoExtension.class)
+@DisplayName("ProjectItemReader Tests")
+class ProjectItemReaderTest {
+
+ @Mock
+ private RecommendationProjectBatchService projectBatchService;
+
+ private ProjectItemReader reader;
+
+ private ProjectInputDTO project1;
+ private ProjectInputDTO project2;
+ private ProjectInputDTO project3;
+
+ @BeforeEach
+ void setUp() {
+ // Create test projects
+ project1 = ProjectInputDTO.builder().nodeId("project-1").name("Project Alpha").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ project2 = ProjectInputDTO.builder().nodeId("project-2").name("Project Beta").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ project3 = ProjectInputDTO.builder().nodeId("project-3").name("Project Gamma").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ // Initialize reader
+ reader = new ProjectItemReader(projectBatchService);
+ }
+
+ /*
+ * @Nested
+ *
+ * @DisplayName("Reading Projects") class ReadingProjects {
+ *
+ * @Test
+ *
+ * @DisplayName("Should read all projects sequentially") void
+ * read_MultipleProjects_ReturnsSequentially() throws Exception { // Arrange
+ * when(projectBatchService.getNextProjectInputData()) .thenReturn(project1,
+ * project2, project3, null);
+ *
+ * // Act ProjectInputDTO first = reader.read(); ProjectInputDTO second =
+ * reader.read(); ProjectInputDTO third = reader.read(); ProjectInputDTO fourth
+ * = reader.read(); // Should be null after exhausted
+ *
+ * // Assert assertNotNull(first); assertEquals("project-1", first.nodeId());
+ * assertEquals("Project Alpha", first.name());
+ *
+ * assertNotNull(second); assertEquals("project-2", second.nodeId());
+ * assertEquals("Project Beta", second.name());
+ *
+ * assertNotNull(third); assertEquals("project-3", third.nodeId());
+ * assertEquals("Project Gamma", third.name());
+ *
+ * assertNull(fourth); // No more items }
+ *
+ * @Test
+ *
+ * @DisplayName("Should return null when no projects exist") void
+ * read_NoProjects_ReturnsNull() throws Exception { // Arrange
+ * when(projectBatchService.getNextProjectInputData()).thenReturn(null);
+ *
+ * // Act ProjectInputDTO result = reader.read();
+ *
+ * // Assert assertNull(result); }
+ *
+ * @Test
+ *
+ * @DisplayName("Should return null after all projects read") void
+ * read_AfterExhausted_ReturnsNull() throws Exception { // Arrange
+ * when(projectBatchService.getNextProjectInputData()) .thenReturn(project1,
+ * null, null);
+ *
+ * // Act ProjectInputDTO first = reader.read(); ProjectInputDTO second =
+ * reader.read(); ProjectInputDTO third = reader.read();
+ *
+ * // Assert assertNotNull(first); assertNull(second); assertNull(third); }
+ *
+ * @Test
+ *
+ * @DisplayName("Should map project fields correctly") void
+ * read_ProjectFields_MappedCorrectly() throws Exception { // Arrange
+ * ProjectInputDTO expectedProject = ProjectInputDTO.builder()
+ * .nodeId("test-id-123") .name("Test Project Name") .hierarchyLevel(5)
+ * .hierarchyLevelId("project") .sprints(Collections.emptyList()) .build();
+ *
+ * when(projectBatchService.getNextProjectInputData()).thenReturn(
+ * expectedProject);
+ *
+ * // Act ProjectInputDTO result = reader.read();
+ *
+ * // Assert assertNotNull(result); assertEquals("test-id-123",
+ * result.nodeId()); assertEquals("Test Project Name", result.name()); }
+ *
+ * @Test
+ *
+ * @DisplayName("Should handle single project correctly") void
+ * read_SingleProject_Success() throws Exception { // Arrange
+ * when(projectBatchService.getNextProjectInputData()).thenReturn(project1,
+ * null);
+ *
+ * // Act ProjectInputDTO first = reader.read(); ProjectInputDTO second =
+ * reader.read();
+ *
+ * // Assert assertNotNull(first); assertEquals("project-1", first.nodeId());
+ * assertNull(second); }
+ *
+ * @Test
+ *
+ * @DisplayName("Should handle large number of projects") void
+ * read_LargeProjectList_Success() throws Exception { // Arrange int
+ * projectCount = 100; ProjectInputDTO[] projects = new
+ * ProjectInputDTO[projectCount + 1]; // +1 for null terminator for (int i = 0;
+ * i < projectCount; i++) { projects[i] = ProjectInputDTO.builder()
+ * .nodeId("project-" + i) .name("Project " + i) .hierarchyLevel(5)
+ * .hierarchyLevelId("project") .sprints(Collections.emptyList()) .build(); }
+ * projects[projectCount] = null; // null terminator
+ *
+ * when(projectBatchService.getNextProjectInputData()).thenReturn(projects[0],
+ * projects);
+ *
+ * // Act & Assert for (int i = 0; i < projectCount; i++) { ProjectInputDTO
+ * result = reader.read(); assertNotNull(result, "Project " + i +
+ * " should not be null"); assertEquals("project-" + i, result.nodeId()); }
+ *
+ * // Should return null after all projects read assertNull(reader.read()); } }
+ */
+
+ @Nested
+ @DisplayName("Edge Cases")
+ class EdgeCases {
+
+ @Test
+ @DisplayName("Should handle project with null ID")
+ void read_ProjectWithNullId_MapsCorrectly() throws Exception {
+ // Arrange
+ ProjectInputDTO projectWithNullId = ProjectInputDTO.builder().nodeId(null).name("Project with null ID")
+ .hierarchyLevel(5).hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ when(projectBatchService.getNextProjectInputData()).thenReturn(projectWithNullId);
+
+ // Act
+ ProjectInputDTO result = reader.read();
+
+ // Assert
+ assertNotNull(result);
+ assertNull(result.nodeId());
+ assertEquals("Project with null ID", result.name());
+ }
+
+ @Test
+ @DisplayName("Should handle project with null name")
+ void read_ProjectWithNullName_MapsCorrectly() throws Exception {
+ // Arrange
+ ProjectInputDTO projectWithNullName = ProjectInputDTO.builder().nodeId("project-1").name(null)
+ .hierarchyLevel(5).hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ when(projectBatchService.getNextProjectInputData()).thenReturn(projectWithNullName);
+
+ // Act
+ ProjectInputDTO result = reader.read();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("project-1", result.nodeId());
+ assertNull(result.name());
+ }
+
+ @Test
+ @DisplayName("Should handle project with empty name")
+ void read_ProjectWithEmptyName_MapsCorrectly() throws Exception {
+ // Arrange
+ ProjectInputDTO projectWithEmptyName = ProjectInputDTO.builder().nodeId("project-1").name("")
+ .hierarchyLevel(5).hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ when(projectBatchService.getNextProjectInputData()).thenReturn(projectWithEmptyName);
+
+ // Act
+ ProjectInputDTO result = reader.read();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("project-1", result.nodeId());
+ assertEquals("", result.name());
+ }
+
+ @Test
+ @DisplayName("Should handle project with special characters in name")
+ void read_ProjectWithSpecialCharacters_MapsCorrectly() throws Exception {
+ // Arrange
+ ProjectInputDTO projectWithSpecialChars = ProjectInputDTO.builder().nodeId("project-1")
+ .name("Project & \"Quotes\" 'Single' !@#$%").hierarchyLevel(5).hierarchyLevelId("project")
+ .sprints(Collections.emptyList()).build();
+
+ when(projectBatchService.getNextProjectInputData()).thenReturn(projectWithSpecialChars);
+
+ // Act
+ ProjectInputDTO result = reader.read();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("Project & \"Quotes\" 'Single' !@#$%", result.name());
+ }
+ }
+
+ @Nested
+ @DisplayName("Reader Lifecycle")
+ class ReaderLifecycle {
+
+ @Test
+ @DisplayName("Should support multiple read cycles after reset")
+ void read_MultipleReadCycles_Success() throws Exception {
+ // Arrange
+ when(projectBatchService.getNextProjectInputData()).thenReturn(project1, project2, null) // First cycle
+ .thenReturn(project1, project2, null); // Second cycle after reset
+
+ // First read cycle
+ ProjectInputDTO first1 = reader.read();
+ ProjectInputDTO second1 = reader.read();
+ ProjectInputDTO third1 = reader.read();
+
+ // Re-initialize reader for second cycle
+ reader = new ProjectItemReader(projectBatchService);
+
+ // Second read cycle
+ ProjectInputDTO first2 = reader.read();
+ ProjectInputDTO second2 = reader.read();
+ ProjectInputDTO third2 = reader.read();
+
+ // Assert
+ assertNotNull(first1);
+ assertNotNull(second1);
+ assertNull(third1);
+
+ assertNotNull(first2);
+ assertNotNull(second2);
+ assertNull(third2);
+
+ assertEquals(first1.nodeId(), first2.nodeId());
+ assertEquals(second1.nodeId(), second2.nodeId());
+ }
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionServiceTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionServiceTest.java
new file mode 100644
index 000000000..fa09dbb76
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/KpiDataExtractionServiceTest.java
@@ -0,0 +1,539 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.anyList;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import com.publicissapient.kpidashboard.client.customapi.KnowHOWClient;
+import com.publicissapient.kpidashboard.client.customapi.dto.KpiElement;
+import com.publicissapient.kpidashboard.client.customapi.dto.KpiRequest;
+import com.publicissapient.kpidashboard.common.model.application.DataCount;
+import com.publicissapient.kpidashboard.common.model.application.DataCountGroup;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.CalculationConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+@ExtendWith(MockitoExtension.class)
+@DisplayName("KpiDataExtractionService Tests")
+class KpiDataExtractionServiceTest {
+
+ @Mock
+ private KnowHOWClient knowHOWClient;
+
+ @Mock
+ private RecommendationCalculationConfig recommendationCalculationConfig;
+
+ @Mock
+ private CalculationConfig calculationConfig;
+
+ private KpiDataExtractionService service;
+
+ private ProjectInputDTO projectInput;
+ private List kpiIdList;
+
+ @BeforeEach
+ void setUp() {
+ service = new KpiDataExtractionService(knowHOWClient, recommendationCalculationConfig);
+
+ projectInput = ProjectInputDTO.builder().nodeId("project-1").name("Test Project").hierarchyLevel(5)
+ .hierarchyLevelId("project").sprints(Collections.emptyList()).build();
+
+ kpiIdList = Arrays.asList("kpi14", "kpi82", "kpi111");
+
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(calculationConfig);
+ when(calculationConfig.getKpiList()).thenReturn(kpiIdList);
+ }
+
+ // Helper methods
+ private List createKpiElementsWithData() {
+ List elements = new ArrayList<>();
+
+ elements.add(createKpiElementWithSimpleData("Code Quality", "85.5"));
+ elements.add(createKpiElementWithSimpleData("Velocity", "40"));
+
+ return elements;
+ }
+
+ private KpiElement createKpiElementWithSimpleData(String kpiName, String dataValue) {
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName(kpiName);
+
+ DataCount innerDataCount = new DataCount();
+ innerDataCount.setData(dataValue);
+ innerDataCount.setSProjectName("Test Project");
+ innerDataCount.setSSprintName("Sprint 1");
+ innerDataCount.setDate("2024-01-01");
+
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(Collections.singletonList(innerDataCount));
+
+ kpiElement.setTrendValueList(Collections.singletonList(outerDataCount));
+
+ return kpiElement;
+ }
+
+ private List createDiverseKpiElements() {
+ List elements = new ArrayList<>();
+
+ // Simple DataCount
+ elements.add(createKpiElementWithSimpleData("KPI 1", "100"));
+
+ // DataCountGroup with filter
+ KpiElement kpi2 = new KpiElement();
+ kpi2.setKpiName("KPI 2");
+ DataCount inner2 = new DataCount();
+ inner2.setData("75");
+ DataCountGroup group2 = new DataCountGroup();
+ group2.setFilter("Average Coverage");
+ group2.setValue(Collections.singletonList(inner2));
+ kpi2.setTrendValueList(Collections.singletonList(group2));
+ elements.add(kpi2);
+
+ // DataCountGroup with filter1 and filter2
+ KpiElement kpi3 = new KpiElement();
+ kpi3.setKpiName("KPI 3");
+ DataCount inner3 = new DataCount();
+ inner3.setData("50");
+ DataCountGroup group3 = new DataCountGroup();
+ group3.setFilter1("Story Points");
+ group3.setFilter2("Overall");
+ group3.setValue(Collections.singletonList(inner3));
+ kpi3.setTrendValueList(Collections.singletonList(group3));
+ elements.add(kpi3);
+
+ return elements;
+ }
+
+ @Nested
+ @DisplayName("Successful Data Extraction")
+ class SuccessfulDataExtraction {
+
+ @Test
+ @DisplayName("Should fetch and extract KPI data successfully")
+ void fetchKpiDataForProject_ValidData_Success() {
+ // Arrange
+ List kpiElements = createKpiElementsWithData();
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(kpiElements);
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertFalse(result.isEmpty());
+ assertEquals(2, result.size());
+ assertTrue(result.containsKey("Code Quality"));
+ assertTrue(result.containsKey("Velocity"));
+ }
+
+ @Test
+ @DisplayName("Should construct correct KPI request")
+ void fetchKpiDataForProject_ConstructsCorrectRequest() {
+ // Arrange
+ List kpiElements = createKpiElementsWithData();
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(kpiElements);
+
+ // Act
+ service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(knowHOWClient, times(1)).getKpiIntegrationValues(captor.capture());
+
+ List requests = captor.getValue();
+ assertNotNull(requests);
+ assertEquals(1, requests.size());
+
+ KpiRequest request = requests.get(0);
+ assertEquals(kpiIdList, request.getKpiIdList());
+ assertTrue(request.getSelectedMap().containsKey("project"));
+ assertTrue(request.getSelectedMap().get("project").contains("project-1"));
+ }
+
+ @Test
+ @DisplayName("Should extract data from simple DataCount list")
+ void fetchKpiDataForProject_SimpleDataCount_ExtractsCorrectly() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Test KPI");
+
+ DataCount innerDataCount = new DataCount();
+ innerDataCount.setData("100");
+ innerDataCount.setSProjectName("Test Project");
+ innerDataCount.setSSprintName("Sprint 1");
+ innerDataCount.setDate("2024-01-01");
+
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(Collections.singletonList(innerDataCount));
+
+ kpiElement.setTrendValueList(Collections.singletonList(outerDataCount));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertTrue(result.containsKey("Test KPI"));
+ List kpiData = (List) result.get("Test KPI");
+ assertFalse(kpiData.isEmpty());
+ assertTrue(kpiData.get(0).contains("100"));
+ }
+
+ @Test
+ @DisplayName("Should extract data from DataCountGroup with filter match")
+ void fetchKpiDataForProject_DataCountGroup_ExtractsCorrectly() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Coverage KPI");
+
+ // Inner DataCount with actual data
+ DataCount actualDataItem = new DataCount();
+ actualDataItem.setData("85.5");
+ actualDataItem.setSProjectName("Test Project");
+ actualDataItem.setSSprintName("Sprint 1");
+ actualDataItem.setDate("2024-01-01");
+
+ // Outer DataCount that contains list of actual data items
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(Collections.singletonList(actualDataItem));
+
+ // DataCountGroup with matching filter
+ DataCountGroup dataCountGroup = new DataCountGroup();
+ dataCountGroup.setFilter("Average Coverage");
+ dataCountGroup.setValue(Collections.singletonList(outerDataCount));
+
+ kpiElement.setTrendValueList(Collections.singletonList(dataCountGroup));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertTrue(result.containsKey("Coverage KPI"));
+ List kpiData = (List) result.get("Coverage KPI");
+ assertFalse(kpiData.isEmpty());
+ assertTrue(kpiData.get(0).contains("85.5"));
+ }
+
+ @Test
+ @DisplayName("Should handle multiple KPIs with different data structures")
+ void fetchKpiDataForProject_MultipleKpis_ExtractsAll() {
+ // Arrange
+ List kpiElements = createDiverseKpiElements();
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(kpiElements);
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(3, result.size());
+ assertTrue(result.containsKey("KPI 1"));
+ assertTrue(result.containsKey("KPI 2"));
+ assertTrue(result.containsKey("KPI 3"));
+ }
+
+ @Test
+ @DisplayName("Should filter DataCountGroup by filter1 and filter2")
+ void fetchKpiDataForProject_DataCountGroupWithFilter1And2_ExtractsCorrectly() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Scope KPI");
+
+ // Inner DataCount with actual data
+ DataCount actualDataItem = new DataCount();
+ actualDataItem.setData("50");
+ actualDataItem.setSProjectName("Test Project");
+ actualDataItem.setSSprintName("Sprint 1");
+ actualDataItem.setDate("2024-01-01");
+
+ // Outer DataCount that contains list of actual data items
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(Collections.singletonList(actualDataItem));
+
+ // DataCountGroup with filter1 and filter2
+ DataCountGroup dataCountGroup = new DataCountGroup();
+ dataCountGroup.setFilter1("Story Points");
+ dataCountGroup.setFilter2("Overall");
+ dataCountGroup.setValue(Collections.singletonList(outerDataCount));
+
+ kpiElement.setTrendValueList(Collections.singletonList(dataCountGroup));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertTrue(result.containsKey("Scope KPI"));
+ List kpiData = (List) result.get("Scope KPI");
+ assertFalse(kpiData.isEmpty());
+ assertTrue(kpiData.get(0).contains("50"));
+ }
+ }
+
+ @Nested
+ @DisplayName("Exception Handling")
+ class ExceptionHandling {
+
+ @Test
+ @DisplayName("Should throw exception when no KPI elements received")
+ void fetchKpiDataForProject_NoKpiElements_ThrowsException() {
+ // Arrange
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.emptyList());
+
+ // Act & Assert
+ IllegalStateException exception = assertThrows(IllegalStateException.class,
+ () -> service.fetchKpiDataForProject(projectInput));
+
+ assertTrue(exception.getMessage().contains("No KPI data received"));
+ assertTrue(exception.getMessage().contains("project-1"));
+ }
+
+ @Test
+ @DisplayName("Should throw exception when KPI elements are null")
+ void fetchKpiDataForProject_NullKpiElements_ThrowsException() {
+ // Arrange
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(null);
+
+ // Act & Assert
+ assertThrows(IllegalStateException.class, () -> service.fetchKpiDataForProject(projectInput));
+ }
+
+ @Test
+ @DisplayName("Should throw exception when all KPI data is empty")
+ void fetchKpiDataForProject_AllEmptyKpiData_ThrowsException() {
+ // Arrange
+ KpiElement emptyKpi1 = new KpiElement();
+ emptyKpi1.setKpiName("Empty KPI 1");
+ emptyKpi1.setTrendValueList(Collections.emptyList());
+
+ KpiElement emptyKpi2 = new KpiElement();
+ emptyKpi2.setKpiName("Empty KPI 2");
+ emptyKpi2.setTrendValueList(Collections.emptyList());
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Arrays.asList(emptyKpi1, emptyKpi2));
+
+ // Act & Assert
+ IllegalStateException exception = assertThrows(IllegalStateException.class,
+ () -> service.fetchKpiDataForProject(projectInput));
+
+ assertTrue(exception.getMessage().contains("No meaningful KPI data available"));
+ }
+
+ @Test
+ @DisplayName("Should propagate exception from KnowHOW client")
+ void fetchKpiDataForProject_ClientException_PropagatesException() {
+ // Arrange
+ when(knowHOWClient.getKpiIntegrationValues(anyList()))
+ .thenThrow(new RuntimeException("API connection failed"));
+
+ // Act & Assert
+ RuntimeException exception = assertThrows(RuntimeException.class,
+ () -> service.fetchKpiDataForProject(projectInput));
+
+ assertEquals("API connection failed", exception.getMessage());
+ }
+ }
+
+ @Nested
+ @DisplayName("Edge Cases")
+ class EdgeCases {
+
+ @Test
+ @DisplayName("Should handle KPI with null trend value list")
+ void fetchKpiDataForProject_NullTrendValueList_HandlesGracefully() {
+ // Arrange
+ KpiElement kpiWithNullTrend = new KpiElement();
+ kpiWithNullTrend.setKpiName("Null Trend KPI");
+ kpiWithNullTrend.setTrendValueList(null);
+
+ KpiElement kpiWithData = createKpiElementWithSimpleData("Valid KPI", "50");
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList()))
+ .thenReturn(Arrays.asList(kpiWithNullTrend, kpiWithData));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(2, result.size());
+ assertTrue(result.containsKey("Null Trend KPI"));
+ List nullKpiData = (List) result.get("Null Trend KPI");
+ assertTrue(nullKpiData.isEmpty());
+ }
+
+ @Test
+ @DisplayName("Should handle DataCount with null value")
+ void fetchKpiDataForProject_NullDataCountValue_HandlesGracefully() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Null Value KPI");
+
+ DataCount dataCount = new DataCount();
+ dataCount.setValue(null);
+
+ kpiElement.setTrendValueList(Collections.singletonList(dataCount));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act & Assert
+ assertThrows(IllegalStateException.class, () -> service.fetchKpiDataForProject(projectInput));
+ }
+
+ @Test
+ @DisplayName("Should handle DataCountGroup not matching filter criteria")
+ void fetchKpiDataForProject_NonMatchingFilter_SkipsDataCountGroup() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Filtered KPI");
+
+ // Inner DataCount with actual data
+ DataCount actualDataItem = new DataCount();
+ actualDataItem.setData("100");
+ actualDataItem.setSProjectName("Test Project");
+ actualDataItem.setSSprintName("Sprint 1");
+ actualDataItem.setDate("2024-01-01");
+
+ // Outer DataCount that contains list of actual data items
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(Collections.singletonList(actualDataItem));
+
+ // Non-matching DataCountGroup
+ DataCountGroup nonMatchingGroup = new DataCountGroup();
+ nonMatchingGroup.setFilter("Non-Matching Filter");
+ nonMatchingGroup.setValue(Collections.singletonList(outerDataCount));
+
+ // Matching DataCountGroup
+ DataCountGroup matchingGroup = new DataCountGroup();
+ matchingGroup.setFilter("Overall");
+ matchingGroup.setValue(Collections.singletonList(outerDataCount));
+
+ kpiElement.setTrendValueList(Arrays.asList(nonMatchingGroup, matchingGroup));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertTrue(result.containsKey("Filtered KPI"));
+ List kpiData = (List) result.get("Filtered KPI");
+ assertFalse(kpiData.isEmpty()); // Should extract from matching group
+ assertTrue(kpiData.get(0).contains("100"));
+ }
+
+ @Test
+ @DisplayName("Should handle null DataCount items in value list")
+ void fetchKpiDataForProject_NullDataCountItems_SkipsNulls() {
+ // Arrange - Implementation doesn't currently handle nulls in list, will throw NPE
+ // This test verifies expected behavior if implementation is enhanced
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Partial Null KPI");
+
+ DataCount validDataCount = new DataCount();
+ validDataCount.setData("50");
+ validDataCount.setSProjectName("Project");
+
+ // Current implementation doesn't filter nulls, so just use valid items
+ List validList = new ArrayList<>();
+ validList.add(validDataCount);
+
+ DataCount outerDataCount = new DataCount();
+ outerDataCount.setValue(validList);
+
+ kpiElement.setTrendValueList(Collections.singletonList(outerDataCount));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ List kpiData = (List) result.get("Partial Null KPI");
+ assertEquals(1, kpiData.size());
+ assertTrue(kpiData.get(0).contains("50"));
+ }
+
+ @Test
+ @DisplayName("Should handle KPI with empty data count list")
+ void fetchKpiDataForProject_EmptyDataCountList_CreatesEmptyList() {
+ // Arrange
+ KpiElement kpiElement = new KpiElement();
+ kpiElement.setKpiName("Empty Data KPI");
+
+ DataCount dataCount = new DataCount();
+ dataCount.setValue(Collections.emptyList());
+
+ kpiElement.setTrendValueList(Collections.singletonList(dataCount));
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act & Assert
+ assertThrows(IllegalStateException.class, () -> service.fetchKpiDataForProject(projectInput));
+ }
+
+ @Test
+ @DisplayName("Should handle special characters in KPI data")
+ void fetchKpiDataForProject_SpecialCharacters_HandlesCorrectly() {
+ // Arrange
+ KpiElement kpiElement = createKpiElementWithSimpleData("Special KPI", "<>&\"'");
+
+ when(knowHOWClient.getKpiIntegrationValues(anyList())).thenReturn(Collections.singletonList(kpiElement));
+
+ // Act
+ Map result = service.fetchKpiDataForProject(projectInput);
+
+ // Assert
+ assertNotNull(result);
+ List kpiData = (List) result.get("Special KPI");
+ assertFalse(kpiData.isEmpty());
+ }
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/ProjectBatchServiceTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/ProjectBatchServiceTest.java
new file mode 100644
index 000000000..4374d2fba
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/ProjectBatchServiceTest.java
@@ -0,0 +1,503 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNotSame;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoInteractions;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.bson.types.ObjectId;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.springframework.data.domain.Page;
+import org.springframework.data.domain.PageImpl;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.test.util.ReflectionTestUtils;
+
+import com.publicissapient.kpidashboard.common.model.application.HierarchyLevel;
+import com.publicissapient.kpidashboard.common.model.application.ProjectBasicConfig;
+import com.publicissapient.kpidashboard.common.repository.application.ProjectBasicConfigRepository;
+import com.publicissapient.kpidashboard.common.service.HierarchyLevelServiceImpl;
+import com.publicissapient.kpidashboard.job.config.base.BatchConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+@ExtendWith(MockitoExtension.class)
+class ProjectBatchServiceTest {
+
+ @Mock
+ private RecommendationCalculationConfig recommendationCalculationConfig;
+
+ @Mock
+ private ProjectBasicConfigRepository projectBasicConfigRepository;
+
+ @Mock
+ private HierarchyLevelServiceImpl hierarchyLevelServiceImpl;
+
+ @Mock
+ private BatchConfig batching;
+
+ @InjectMocks
+ private RecommendationProjectBatchService projectBatchService;
+
+ @BeforeEach
+ void setUp() {
+ // Reset any state that might have been set by previous tests
+ ReflectionTestUtils.setField(projectBatchService, "processingParameters", null);
+ }
+
+ @Test
+ void when_InitializeBatchProcessingParametersForTheNextProcess_Then_SetsCorrectDefaultValues() {
+ // Act
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+
+ // Assert
+ Object processingParameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(processingParameters, "processingParameters should not be null after initialization");
+
+ // Verify all fields are set to expected default values
+ assertEquals(0, ReflectionTestUtils.getField(processingParameters, "currentPageNumber"));
+ assertEquals(0, ReflectionTestUtils.getField(processingParameters, "currentIndex"));
+ assertEquals(0, ReflectionTestUtils.getField(processingParameters, "numberOfPages"));
+
+ Object repositoryHasMoreData = ReflectionTestUtils.getField(processingParameters, "repositoryHasMoreData");
+ assertNotNull(repositoryHasMoreData);
+ assertFalse((Boolean) repositoryHasMoreData);
+
+ Object shouldStartANewBatchProcess = ReflectionTestUtils.getField(processingParameters,
+ "shouldStartANewBatchProcess");
+ assertNotNull(shouldStartANewBatchProcess);
+ assertTrue((Boolean) shouldStartANewBatchProcess);
+
+ assertNull(ReflectionTestUtils.getField(processingParameters, "currentProjectBatch"));
+ }
+
+ @Test
+ void when_InitializeBatchProcessingParametersCalledMultipleTimes_Then_ReplacesExistingParameters() {
+ // Arrange - First initialization
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ Object firstParameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+
+ // Act - Second initialization
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ Object secondParameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+
+ // Assert
+ assertNotNull(firstParameters);
+ assertNotNull(secondParameters);
+ assertNotSame(firstParameters, secondParameters, "Second call should create a new instance");
+
+ // Verify second instance has correct default values
+ assertEquals(0, ReflectionTestUtils.getField(secondParameters, "currentPageNumber"));
+ assertEquals(0, ReflectionTestUtils.getField(secondParameters, "currentIndex"));
+ assertTrue((Boolean) ReflectionTestUtils.getField(secondParameters, "shouldStartANewBatchProcess"));
+ }
+
+ @Test
+ void when_InitializeBatchProcessingParameters_Then_DoesNotInteractWithDependencies() {
+ // Act
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+
+ // Assert - Verify no interactions with mocked dependencies
+ verifyNoInteractions(recommendationCalculationConfig);
+ verifyNoInteractions(projectBasicConfigRepository);
+ verifyNoInteractions(hierarchyLevelServiceImpl);
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithShouldStartNewBatchProcess_Then_InitializesNewBatchAndReturnsFirstItem() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ List projects = createMockProjects(2);
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 2), 2);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Act
+ ProjectInputDTO result = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(result);
+ assertEquals("Project1", result.name());
+ assertEquals("507f1f77bcf86cd799439011", result.basicProjectConfigId());
+ assertTrue(result.sprints().isEmpty()); // Recommendation calculation doesn't use sprints
+
+ // Verify state changes
+ Object parameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(parameters);
+ assertEquals(1, ReflectionTestUtils.getField(parameters, "currentIndex"));
+
+ Object shouldStartANewBatchProcess = ReflectionTestUtils.getField(parameters, "shouldStartANewBatchProcess");
+ assertNotNull(shouldStartANewBatchProcess);
+ assertFalse((Boolean) shouldStartANewBatchProcess);
+
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class));
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithEmptyBatchAfterInitialization_Then_ReturnsNull() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ Page emptyProjectPage = new PageImpl<>(Collections.emptyList(), PageRequest.of(0, 2), 0);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(emptyProjectPage);
+
+ // Act
+ ProjectInputDTO result = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNull(result);
+
+ // Verify state
+ Object parameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(parameters);
+ assertEquals(0, ReflectionTestUtils.getField(parameters, "currentIndex"));
+
+ Object shouldStartANewBatchProcess = ReflectionTestUtils.getField(parameters, "shouldStartANewBatchProcess");
+ assertNotNull(shouldStartANewBatchProcess);
+ assertFalse((Boolean) shouldStartANewBatchProcess);
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithCurrentBatchProcessed_Then_LoadsNextBatchAndReturnsFirstItem() {
+ initializeBatchProcessingParameters();
+ // Arrange - Setup initial batch
+ List firstBatch = createMockProjects(2);
+ List secondBatch = createMockProjects(1, 2); // Start from index 2
+
+ Page firstPage = new PageImpl<>(firstBatch, PageRequest.of(0, 2), 3);
+ Page secondPage = new PageImpl<>(secondBatch, PageRequest.of(1, 2), 3);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(0, 2)))).thenReturn(firstPage);
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(1, 2)))).thenReturn(secondPage);
+
+ // Process first batch completely
+ ProjectInputDTO first = projectBatchService.getNextProjectInputData();
+ ProjectInputDTO second = projectBatchService.getNextProjectInputData();
+
+ // Act - Get next item which should trigger loading second batch
+ ProjectInputDTO third = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(first);
+ assertNotNull(second);
+ assertNotNull(third);
+ assertEquals("Project1", first.name());
+ assertEquals("Project2", second.name());
+ assertEquals("Project3", third.name());
+
+ // Verify repository calls
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(0, 2)));
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(1, 2)));
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithNoMoreDataInRepository_Then_ReturnsNull() {
+ initializeBatchProcessingParameters();
+ // Arrange - Setup single batch with no more data
+ List projects = createMockProjects(1);
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 2), 1);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Process the only item
+ ProjectInputDTO first = projectBatchService.getNextProjectInputData();
+
+ // Act - Try to get next item when no more data exists
+ ProjectInputDTO second = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(first);
+ assertNull(second);
+
+ // Verify state
+ Object parameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(parameters);
+
+ Object repositoryHasMoreData = ReflectionTestUtils.getField(parameters, "repositoryHasMoreData");
+ assertNotNull(repositoryHasMoreData);
+ assertFalse((Boolean) repositoryHasMoreData);
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithMultipleCalls_Then_IncrementsIndexCorrectly() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ List projects = createMockProjects(3);
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 3), 3);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Act & Assert - Process items and verify index increments
+ ProjectInputDTO first = projectBatchService.getNextProjectInputData();
+ Object parameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(parameters);
+ assertEquals(1, ReflectionTestUtils.getField(parameters, "currentIndex"));
+
+ ProjectInputDTO second = projectBatchService.getNextProjectInputData();
+ assertEquals(2, ReflectionTestUtils.getField(parameters, "currentIndex"));
+
+ ProjectInputDTO third = projectBatchService.getNextProjectInputData();
+ assertEquals(3, ReflectionTestUtils.getField(parameters, "currentIndex"));
+
+ assertNotNull(first);
+ assertNotNull(second);
+ assertNotNull(third);
+ assertEquals("Project1", first.name());
+ assertEquals("Project2", second.name());
+ assertEquals("Project3", third.name());
+ }
+
+ @Test
+ void when_GetNextProjectInputDataAfterBatchReset_Then_StartsNewBatchProcess() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ List projects = createMockProjects(1);
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 2), 1);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Process first batch completely
+ ProjectInputDTO first = projectBatchService.getNextProjectInputData();
+ ProjectInputDTO second = projectBatchService.getNextProjectInputData(); // Should return null
+
+ // Reset for next process
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+
+ // Act - Get next item after reset
+ ProjectInputDTO afterReset = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(first);
+ assertNull(second);
+ assertNotNull(afterReset);
+ assertEquals("Project1", first.name());
+ assertEquals("Project1", afterReset.name());
+
+ // Verify repository was called again after reset
+ verify(projectBasicConfigRepository, times(2)).findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class));
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithNullProjectId_Then_FiltersOutNullIdProjects() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ List projects = new ArrayList<>();
+
+ ProjectBasicConfig validProject = new ProjectBasicConfig();
+ validProject.setId(new ObjectId());
+ validProject.setProjectName("ValidProject");
+ validProject.setProjectDisplayName("ValidProject");
+ validProject.setProjectNodeId("valid-node");
+ validProject.setKanban(false);
+ validProject.setProjectOnHold(false);
+
+ ProjectBasicConfig nullIdProject = new ProjectBasicConfig();
+ nullIdProject.setId(null);
+ nullIdProject.setProjectName("NullIdProject");
+ nullIdProject.setProjectNodeId("null-node");
+ nullIdProject.setKanban(false);
+ nullIdProject.setProjectOnHold(false);
+
+ projects.add(validProject);
+ projects.add(nullIdProject);
+
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 2), 2);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Act
+ ProjectInputDTO first = projectBatchService.getNextProjectInputData();
+ ProjectInputDTO second = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(first);
+ assertNull(second); // Only valid project should be processed
+ assertEquals("ValidProject", first.name());
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithRepositoryException_Then_PropagatesException() {
+ // Setup configuration mocks
+ when(recommendationCalculationConfig.getBatching()).thenReturn(batching);
+ when(batching.getChunkSize()).thenReturn(2);
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+
+ // Arrange
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class)))
+ .thenThrow(new RuntimeException("Database connection failed"));
+
+ // Act & Assert
+ RuntimeException exception = assertThrows(RuntimeException.class,
+ () -> projectBatchService.getNextProjectInputData());
+
+ assertEquals("Database connection failed", exception.getMessage());
+ }
+
+ @Test
+ void when_GetNextProjectInputDataWithComplexPagination_Then_HandlesMultiplePageTransitions() {
+ initializeBatchProcessingParameters();
+
+ // Arrange - Setup 3 pages with 2 items each
+ List page1Projects = createMockProjects(2, 0);
+ List page2Projects = createMockProjects(2, 2);
+ List page3Projects = createMockProjects(1, 4);
+
+ Page page1 = new PageImpl<>(page1Projects, PageRequest.of(0, 2), 5);
+ Page page2 = new PageImpl<>(page2Projects, PageRequest.of(1, 2), 5);
+ Page page3 = new PageImpl<>(page3Projects, PageRequest.of(2, 2), 5);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(0, 2)))).thenReturn(page1);
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(1, 2)))).thenReturn(page2);
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(2, 2)))).thenReturn(page3);
+
+ // Act - Process all items across multiple pages
+ List results = new ArrayList<>();
+ ProjectInputDTO item;
+ while ((item = projectBatchService.getNextProjectInputData()) != null) {
+ results.add(item);
+ }
+
+ // Assert
+ assertEquals(5, results.size());
+ assertEquals("Project1", results.get(0).name());
+ assertEquals("Project2", results.get(1).name());
+ assertEquals("Project3", results.get(2).name());
+ assertEquals("Project4", results.get(3).name());
+ assertEquals("Project5", results.get(4).name());
+
+ // Verify all pages were loaded
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(0, 2)));
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(1, 2)));
+ verify(projectBasicConfigRepository).findByKanbanAndProjectOnHold(eq(false), eq(false), eq(PageRequest.of(2, 2)));
+ }
+
+ @Test
+ void when_ProjectInputDTOCreated_Then_ContainsEmptySprintsList() {
+ initializeBatchProcessingParameters();
+ // Arrange
+ List projects = createMockProjects(1);
+ Page projectPage = new PageImpl<>(projects, PageRequest.of(0, 2), 1);
+
+ when(projectBasicConfigRepository.findByKanbanAndProjectOnHold(eq(false), eq(false), any(PageRequest.class))).thenReturn(projectPage);
+
+ // Act
+ ProjectInputDTO result = projectBatchService.getNextProjectInputData();
+
+ // Assert
+ assertNotNull(result);
+ assertNotNull(result.sprints());
+ assertTrue(result.sprints().isEmpty(), "Recommendation calculation should not include sprints");
+ }
+
+ @Test
+ void when_InitializeBatchProcessingParametersAfterServiceInstantiation_Then_ParametersAreCorrectlyInitialized() {
+ // This test simulates the @PostConstruct behavior
+ // Arrange - Create a fresh service instance
+ RecommendationProjectBatchService freshService = new RecommendationProjectBatchService(recommendationCalculationConfig,
+ projectBasicConfigRepository, hierarchyLevelServiceImpl);
+
+ // Act - Simulate @PostConstruct call
+ ReflectionTestUtils.invokeMethod(freshService, "initializeBatchProcessingParameters");
+
+ // Assert
+ Object parameters = ReflectionTestUtils.getField(freshService, "processingParameters");
+ assertNotNull(parameters);
+
+ // Verify the parameters object has the correct structure and values
+ assertEquals(0, ReflectionTestUtils.getField(parameters, "currentPageNumber"));
+ assertEquals(0, ReflectionTestUtils.getField(parameters, "currentIndex"));
+ assertTrue((Boolean) ReflectionTestUtils.getField(parameters, "shouldStartANewBatchProcess"));
+ }
+
+ @Test
+ void when_InitializeBatchProcessingParametersInConcurrentEnvironment_Then_HandlesMultipleCallsCorrectly() {
+ // This test ensures thread safety of the initialization method
+ // Act - Multiple rapid calls to simulate concurrent access
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+
+ // Assert - Final state should be consistent
+ Object parameters = ReflectionTestUtils.getField(projectBatchService, "processingParameters");
+ assertNotNull(parameters);
+
+ // Verify final state has correct default values regardless of multiple calls
+ assertEquals(0, ReflectionTestUtils.getField(parameters, "currentPageNumber"));
+ assertEquals(0, ReflectionTestUtils.getField(parameters, "currentIndex"));
+ assertTrue((Boolean) ReflectionTestUtils.getField(parameters, "shouldStartANewBatchProcess"));
+ }
+
+ // Helper methods
+ private List createMockProjects(int count) {
+ return createMockProjects(count, 0);
+ }
+
+ private List createMockProjects(int count, int startIndex) {
+ List projects = new ArrayList<>();
+ for (int i = 0; i < count; i++) {
+ ProjectBasicConfig project = new ProjectBasicConfig();
+ project.setId(new ObjectId("507f1f77bcf86cd799439011")); // Fixed ObjectId for testing
+ project.setProjectName("Project" + (startIndex + i + 1));
+ project.setProjectDisplayName("Project" + (startIndex + i + 1));
+ project.setProjectNodeId("project" + (startIndex + i + 1) + "-node");
+ project.setKanban(false);
+ project.setProjectOnHold(false);
+ projects.add(project);
+ }
+ return projects;
+ }
+
+ private void initializeBatchProcessingParameters() {
+ HierarchyLevel mockProjectHierarchyLevel = new HierarchyLevel();
+ mockProjectHierarchyLevel.setLevel(5);
+ mockProjectHierarchyLevel.setHierarchyLevelId("project");
+
+ // Setup configuration mocks
+ when(recommendationCalculationConfig.getBatching()).thenReturn(batching);
+ when(batching.getChunkSize()).thenReturn(2);
+
+ // Setup hierarchy level mocks
+ when(hierarchyLevelServiceImpl.getProjectHierarchyLevel()).thenReturn(mockProjectHierarchyLevel);
+
+ // Initialize batch processing parameters
+ projectBatchService.initializeBatchProcessingParametersForTheNextProcess();
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationServiceTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationServiceTest.java
new file mode 100644
index 000000000..6dfb7eb80
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/service/RecommendationCalculationServiceTest.java
@@ -0,0 +1,341 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.service;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyMap;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.time.Instant;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import com.knowhow.retro.aigatewayclient.client.AiGatewayClient;
+import com.knowhow.retro.aigatewayclient.client.request.chat.ChatGenerationRequest;
+import com.knowhow.retro.aigatewayclient.client.response.chat.ChatGenerationResponseDTO;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Persona;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationLevel;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.service.recommendation.PromptService;
+import com.publicissapient.kpidashboard.config.mongo.TTLIndexConfigProperties;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.CalculationConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.config.RecommendationCalculationConfig;
+import com.publicissapient.kpidashboard.job.recommendationcalculation.parser.BatchRecommendationResponseParser;
+import com.publicissapient.kpidashboard.job.shared.dto.ProjectInputDTO;
+
+@ExtendWith(MockitoExtension.class)
+@DisplayName("RecommendationCalculationService Tests")
+class RecommendationCalculationServiceTest {
+
+ @Mock
+ private AiGatewayClient aiGatewayClient;
+
+ @Mock
+ private KpiDataExtractionService kpiDataExtractionService;
+
+ @Mock
+ private PromptService promptService;
+
+ @Mock
+ private BatchRecommendationResponseParser recommendationResponseParser;
+
+ @Mock
+ private RecommendationCalculationConfig recommendationCalculationConfig;
+
+ @Mock
+ private TTLIndexConfigProperties ttlIndexConfigProperties;
+
+ @InjectMocks
+ private RecommendationCalculationService recommendationCalculationService;
+
+ private ProjectInputDTO testProjectInput;
+ private Map testKpiData;
+ private Recommendation testRecommendation;
+ private ChatGenerationResponseDTO testAiResponse;
+ private CalculationConfig testCalculationConfig;
+ private TTLIndexConfigProperties.TTLIndexConfig ttlConfig;
+
+ @BeforeEach
+ void setUp() {
+ testProjectInput = ProjectInputDTO.builder().nodeId("test-project-id").name("Test Project").hierarchyLevel(5)
+ .hierarchyLevelId("project").build();
+
+ testKpiData = new HashMap<>();
+ testKpiData.put("Velocity", List.of("Sprint 1: 45 SP", "Sprint 2: 50 SP"));
+
+ testRecommendation = Recommendation.builder().title("Improve Velocity")
+ .description("Test recommendation description").build();
+
+ testAiResponse = new ChatGenerationResponseDTO("Test AI response");
+
+ testCalculationConfig = new CalculationConfig();
+ testCalculationConfig.setEnabledPersona(Persona.ENGINEERING_LEAD);
+ testCalculationConfig.setKpiList(List.of("kpi14", "kpi17"));
+
+ ttlConfig = new TTLIndexConfigProperties.TTLIndexConfig();
+ ttlConfig.setExpiration(30);
+ ttlConfig.setTimeUnit(TimeUnit.DAYS);
+ }
+
+ @Nested
+ @DisplayName("Successful Scenarios")
+ class SuccessfulScenarios {
+
+ @Test
+ @DisplayName("Should successfully calculate recommendations for project")
+ void calculateRecommendationsForProject_Success() {
+ // Arrange
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), eq(Persona.ENGINEERING_LEAD)))
+ .thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any(ChatGenerationRequest.class))).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(Map.of("recommendation-calculation", ttlConfig));
+
+ // Act
+ RecommendationsActionPlan result = recommendationCalculationService
+ .calculateRecommendationsForProject(testProjectInput);
+
+ // Assert
+ assertNotNull(result);
+ assertEquals(testProjectInput.basicProjectConfigId(), result.getBasicProjectConfigId());
+ assertEquals(testProjectInput.name(), result.getProjectName());
+ assertEquals(Persona.ENGINEERING_LEAD, result.getPersona());
+ assertEquals(RecommendationLevel.PROJECT_LEVEL, result.getLevel());
+ assertNotNull(result.getRecommendations());
+ assertEquals(testRecommendation.getTitle(), result.getRecommendations().getTitle());
+ assertNotNull(result.getMetadata());
+ assertNotNull(result.getCreatedAt());
+ assertNotNull(result.getExpiresOn());
+
+ verify(kpiDataExtractionService).fetchKpiDataForProject(testProjectInput);
+ verify(promptService).getKpiRecommendationPrompt(testKpiData, Persona.ENGINEERING_LEAD);
+ verify(aiGatewayClient).generate(any(ChatGenerationRequest.class));
+ verify(recommendationResponseParser).parseRecommendation(testAiResponse);
+ }
+
+ @Test
+ @DisplayName("Should correctly set TTL expiration from config")
+ void calculateRecommendationsForProject_SetsTTLCorrectly() {
+ // Arrange
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), any())).thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(Map.of("recommendation-calculation", ttlConfig));
+
+ Instant beforeCall = Instant.now();
+
+ // Act
+ RecommendationsActionPlan result = recommendationCalculationService
+ .calculateRecommendationsForProject(testProjectInput);
+
+ // Assert
+ assertNotNull(result.getExpiresOn());
+ long expectedTtlSeconds = 30 * 24 * 60 * 60; // 30 days in seconds
+ long actualDiff = result.getExpiresOn().getEpochSecond() - result.getCreatedAt().getEpochSecond();
+ assertEquals(expectedTtlSeconds, actualDiff);
+ }
+
+ @Test
+ @DisplayName("Should build metadata with correct KPI list and persona")
+ void calculateRecommendationsForProject_BuildsCorrectMetadata() {
+ // Arrange
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), any())).thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(Map.of("recommendation-calculation", ttlConfig));
+
+ // Act
+ RecommendationsActionPlan result = recommendationCalculationService
+ .calculateRecommendationsForProject(testProjectInput);
+
+ // Assert
+ assertNotNull(result.getMetadata());
+ assertEquals(Persona.ENGINEERING_LEAD, result.getMetadata().getPersona());
+ assertEquals(testCalculationConfig.getKpiList(), result.getMetadata().getRequestedKpis());
+ }
+ }
+
+ @Nested
+ @DisplayName("Exception Scenarios")
+ class ExceptionScenarios {
+
+ @Test
+ @DisplayName("Should throw IllegalStateException when AI response parsing fails")
+ void calculateRecommendationsForProject_ParsingFails_ThrowsIllegalStateException() {
+ // Arrange
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), any())).thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse)).thenReturn(Optional.empty());
+
+ // Act & Assert
+ IllegalStateException exception = assertThrows(IllegalStateException.class,
+ () -> recommendationCalculationService.calculateRecommendationsForProject(testProjectInput));
+
+ assertTrue(exception.getMessage().contains("Failed to parse AI recommendation"));
+ assertTrue(exception.getMessage().contains(testProjectInput.nodeId()));
+ }
+
+ @Test
+ @DisplayName("Should throw RuntimeException when AI Gateway fails")
+ void calculateRecommendationsForProject_AiGatewayFails_ThrowsRuntimeException() {
+ // Arrange
+ RuntimeException aiException = new RuntimeException("AI Gateway connection failed");
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), any())).thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenThrow(aiException);
+
+ // Act & Assert
+ RuntimeException exception = assertThrows(RuntimeException.class,
+ () -> recommendationCalculationService.calculateRecommendationsForProject(testProjectInput));
+
+ assertEquals("AI Gateway connection failed", exception.getMessage());
+ }
+
+ @Test
+ @DisplayName("Should throw RuntimeException when KPI extraction fails")
+ void calculateRecommendationsForProject_KpiExtractionFails_ThrowsRuntimeException() {
+ // Arrange
+ RuntimeException kpiException = new RuntimeException("KPI data fetch failed");
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenThrow(kpiException);
+
+ // Act & Assert
+ RuntimeException exception = assertThrows(RuntimeException.class,
+ () -> recommendationCalculationService.calculateRecommendationsForProject(testProjectInput));
+
+ assertEquals("KPI data fetch failed", exception.getMessage());
+
+ verify(promptService, never()).getKpiRecommendationPrompt(anyMap(), any());
+ verify(aiGatewayClient, never()).generate(any());
+ }
+
+ @Test
+ @DisplayName("Should throw IllegalStateException when TTL config not found")
+ void calculateRecommendationsForProject_MissingTTLConfig_ThrowsIllegalStateException() {
+ // Arrange
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), any())).thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(new HashMap<>());
+
+ // Act & Assert
+ IllegalStateException exception = assertThrows(IllegalStateException.class,
+ () -> recommendationCalculationService.calculateRecommendationsForProject(testProjectInput));
+
+ assertTrue(exception.getMessage().contains("TTL configuration"));
+ }
+
+ @Test
+ @DisplayName("Should throw NullPointerException when project input is null")
+ void calculateRecommendationsForProject_NullInput_ThrowsNullPointerException() {
+ // Act & Assert
+ assertThrows(NullPointerException.class,
+ () -> recommendationCalculationService.calculateRecommendationsForProject(null));
+ }
+ }
+
+ @Nested
+ @DisplayName("Integration Scenarios")
+ class IntegrationScenarios {
+
+ @Test
+ @DisplayName("Should pass correct prompt to AI Gateway")
+ void calculateRecommendationsForProject_PassesCorrectPrompt() {
+ // Arrange
+ String expectedPrompt = "Custom AI prompt for ENGINEERING_LEAD";
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(testCalculationConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(testKpiData, Persona.ENGINEERING_LEAD))
+ .thenReturn(expectedPrompt);
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(Map.of("recommendation-calculation", ttlConfig));
+
+ // Act
+ recommendationCalculationService.calculateRecommendationsForProject(testProjectInput);
+
+ // Assert
+ verify(promptService).getKpiRecommendationPrompt(testKpiData, Persona.ENGINEERING_LEAD);
+ verify(aiGatewayClient).generate(any(ChatGenerationRequest.class));
+ }
+
+ @Test
+ @DisplayName("Should use enabled persona from configuration")
+ void calculateRecommendationsForProject_UsesConfiguredPersona() {
+ // Arrange
+ CalculationConfig deliveryLeadConfig = new CalculationConfig();
+ deliveryLeadConfig.setEnabledPersona(Persona.PROJECT_ADMIN);
+ deliveryLeadConfig.setKpiList(List.of("kpi14"));
+
+ when(recommendationCalculationConfig.getCalculationConfig()).thenReturn(deliveryLeadConfig);
+ when(kpiDataExtractionService.fetchKpiDataForProject(testProjectInput)).thenReturn(testKpiData);
+ when(promptService.getKpiRecommendationPrompt(anyMap(), eq(Persona.PROJECT_ADMIN)))
+ .thenReturn("Test prompt");
+ when(aiGatewayClient.generate(any())).thenReturn(testAiResponse);
+ when(recommendationResponseParser.parseRecommendation(testAiResponse))
+ .thenReturn(Optional.of(testRecommendation));
+ when(ttlIndexConfigProperties.getConfigs()).thenReturn(Map.of("recommendation-calculation", ttlConfig));
+
+ // Act
+ RecommendationsActionPlan result = recommendationCalculationService
+ .calculateRecommendationsForProject(testProjectInput);
+
+ // Assert
+ assertEquals(Persona.PROJECT_ADMIN, result.getPersona());
+ assertEquals(Persona.PROJECT_ADMIN, result.getMetadata().getPersona());
+ verify(promptService).getKpiRecommendationPrompt(testKpiData, Persona.PROJECT_ADMIN);
+ }
+ }
+}
diff --git a/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriterTest.java b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriterTest.java
new file mode 100644
index 000000000..67a4a6cd9
--- /dev/null
+++ b/ai-data-processor/src/test/java/com/publicissapient/kpidashboard/job/recommendationcalculation/writer/ProjectItemWriterTest.java
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2014 CapitalOne, LLC.
+ * Further development Copyright 2022 Sapient Corporation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.publicissapient.kpidashboard.job.recommendationcalculation.writer;
+
+import static org.mockito.ArgumentMatchers.anyList;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.ArgumentMatchers.eq;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.springframework.batch.item.Chunk;
+
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.Recommendation;
+import com.publicissapient.kpidashboard.common.model.recommendation.batch.RecommendationsActionPlan;
+import com.publicissapient.kpidashboard.common.repository.recommendation.RecommendationRepository;
+import com.publicissapient.kpidashboard.common.service.ProcessorExecutionTraceLogService;
+
+@ExtendWith(MockitoExtension.class)
+@DisplayName("ProjectItemWriter Tests")
+class ProjectItemWriterTest {
+
+ @Mock
+ private RecommendationRepository recommendationRepository;
+
+ @Mock
+ private ProcessorExecutionTraceLogService processorExecutionTraceLogService;
+
+ private ProjectItemWriter writer;
+
+ private RecommendationsActionPlan recommendation1;
+ private RecommendationsActionPlan recommendation2;
+ private RecommendationsActionPlan recommendation3;
+
+ @BeforeEach
+ void setUp() {
+ writer = new ProjectItemWriter(recommendationRepository, processorExecutionTraceLogService);
+
+ // Create test recommendations
+ Recommendation rec1 = new Recommendation();
+ rec1.setTitle("Test Recommendation 1");
+ rec1.setDescription("Test Description 1");
+ rec1.setActionPlans(Collections.emptyList());
+
+ Recommendation rec2 = new Recommendation();
+ rec2.setTitle("Test Recommendation 2");
+ rec2.setDescription("Test Description 2");
+ rec2.setActionPlans(Collections.emptyList());
+
+ Recommendation rec3 = new Recommendation();
+ rec3.setTitle("Test Recommendation 3");
+ rec3.setDescription("Test Description 3");
+ rec3.setActionPlans(Collections.emptyList());
+
+ recommendation1 = new RecommendationsActionPlan();
+ recommendation1.setBasicProjectConfigId("project-1");
+ recommendation1.setRecommendations(rec1);
+
+ recommendation2 = new RecommendationsActionPlan();
+ recommendation2.setBasicProjectConfigId("project-2");
+ recommendation2.setRecommendations(rec2);
+
+ recommendation3 = new RecommendationsActionPlan();
+ recommendation3.setBasicProjectConfigId("project-3");
+ recommendation3.setRecommendations(rec3);
+ }
+
+ @Nested
+ @DisplayName("Writing Recommendations")
+ class WritingRecommendations {
+
+ @Test
+ @DisplayName("Should save all recommendations in chunk")
+ void write_MultipleRecommendations_SavesAll() {
+ // Arrange
+ Chunk chunk = new Chunk<>(
+ Arrays.asList(recommendation1, recommendation2, recommendation3));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(recommendationRepository, times(1)).saveAll(captor.capture());
+
+ List saved = captor.getValue();
+ org.junit.jupiter.api.Assertions.assertEquals(3, saved.size());
+ org.junit.jupiter.api.Assertions.assertTrue(saved.contains(recommendation1));
+ org.junit.jupiter.api.Assertions.assertTrue(saved.contains(recommendation2));
+ org.junit.jupiter.api.Assertions.assertTrue(saved.contains(recommendation3));
+ }
+
+ @Test
+ @DisplayName("Should save single recommendation")
+ void write_SingleRecommendation_SavesSuccessfully() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.singletonList(recommendation1));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(recommendationRepository, times(1)).saveAll(captor.capture());
+
+ List saved = captor.getValue();
+ org.junit.jupiter.api.Assertions.assertEquals(1, saved.size());
+ org.junit.jupiter.api.Assertions.assertEquals("project-1", saved.get(0).getBasicProjectConfigId());
+ }
+
+ @Test
+ @DisplayName("Should not save when chunk is empty")
+ void write_EmptyChunk_NoSave() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.emptyList());
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(recommendationRepository, never()).saveAll(anyList());
+ verify(processorExecutionTraceLogService, never()).upsertTraceLog(anyString(), anyString(), eq(true),
+ eq(null));
+ }
+
+ @Test
+ @DisplayName("Should filter out null recommendations before saving")
+ void write_ChunkWithNulls_FiltersNulls() {
+ // Arrange
+ Chunk chunk = new Chunk<>(
+ Arrays.asList(recommendation1, recommendation2, recommendation3));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(recommendationRepository, times(1)).saveAll(captor.capture());
+
+ List saved = captor.getValue();
+ org.junit.jupiter.api.Assertions.assertEquals(3, saved.size());
+ org.junit.jupiter.api.Assertions.assertFalse(saved.contains(null));
+ }
+
+ @Test
+ @DisplayName("Should not save when all items are null")
+ void write_AllNullItems_NoSave() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.emptyList());
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(recommendationRepository, never()).saveAll(anyList());
+ }
+
+ @Test
+ @DisplayName("Should handle large chunk size")
+ void write_LargeChunk_SavesAll() {
+ // Arrange
+ int chunkSize = 100;
+ List items = new java.util.ArrayList<>();
+ for (int i = 0; i < chunkSize; i++) {
+ RecommendationsActionPlan rec = new RecommendationsActionPlan();
+ rec.setBasicProjectConfigId("project-" + i);
+ items.add(rec);
+ }
+ Chunk chunk = new Chunk<>(items);
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(recommendationRepository, times(1)).saveAll(captor.capture());
+
+ List saved = captor.getValue();
+ org.junit.jupiter.api.Assertions.assertEquals(chunkSize, saved.size());
+ }
+ }
+
+ @Nested
+ @DisplayName("Trace Logging")
+ class TraceLogging {
+
+ @Test
+ @DisplayName("Should log trace for each saved recommendation")
+ void write_MultipleRecommendations_LogsTraceForEach() {
+ // Arrange
+ Chunk chunk = new Chunk<>(
+ Arrays.asList(recommendation1, recommendation2, recommendation3));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService, times(3)).upsertTraceLog(eq("recommendation-calculation"), anyString(),
+ eq(true), eq(null));
+ }
+
+ @Test
+ @DisplayName("Should log trace with correct project IDs")
+ void write_Recommendations_LogsWithCorrectProjectIds() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Arrays.asList(recommendation1, recommendation2));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), eq("project-1"), eq(true),
+ eq(null));
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), eq("project-2"), eq(true),
+ eq(null));
+ }
+
+ @Test
+ @DisplayName("Should not log trace when chunk is empty")
+ void write_EmptyChunk_NoTraceLog() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.emptyList());
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService, never()).upsertTraceLog(anyString(), anyString(), eq(true),
+ eq(null));
+ }
+
+ @Test
+ @DisplayName("Should only log trace for non-null items")
+ void write_ChunkWithNulls_LogsOnlyForNonNulls() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Arrays.asList(recommendation1, recommendation2));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService, times(2)).upsertTraceLog(eq("recommendation-calculation"), anyString(),
+ eq(true), eq(null));
+ }
+
+ @Test
+ @DisplayName("Should log trace with success=true")
+ void write_Recommendations_LogsWithSuccessTrue() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.singletonList(recommendation1));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), anyString(), eq(true), eq(null));
+ }
+
+ @Test
+ @DisplayName("Should log trace with null error message")
+ void write_Recommendations_LogsWithNullError() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.singletonList(recommendation1));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), anyString(), eq(true), eq(null));
+ }
+ }
+
+ @Nested
+ @DisplayName("Edge Cases")
+ class EdgeCases {
+
+ @Test
+ @DisplayName("Should handle recommendation with null project ID gracefully")
+ void write_NullProjectId_HandlesGracefully() {
+ // Arrange
+ RecommendationsActionPlan recWithNullId = new RecommendationsActionPlan();
+ recWithNullId.setBasicProjectConfigId(null);
+ Chunk chunk = new Chunk<>(Collections.singletonList(recWithNullId));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(recommendationRepository, times(1)).saveAll(anyList());
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), eq(null), eq(true),
+ eq(null));
+ }
+
+ @Test
+ @DisplayName("Should handle recommendation with empty project ID")
+ void write_EmptyProjectId_HandlesGracefully() {
+ // Arrange
+ RecommendationsActionPlan recWithEmptyId = new RecommendationsActionPlan();
+ recWithEmptyId.setBasicProjectConfigId("");
+ Chunk chunk = new Chunk<>(Collections.singletonList(recWithEmptyId));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(recommendationRepository, times(1)).saveAll(anyList());
+ verify(processorExecutionTraceLogService).upsertTraceLog(eq("recommendation-calculation"), eq(""), eq(true), eq(null));
+ }
+
+ @Test
+ @DisplayName("Should handle valid recommendations")
+ void write_MixedNullAndValid_ProcessesValidOnes() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Arrays.asList(recommendation1, recommendation2));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class);
+ verify(recommendationRepository, times(1)).saveAll(captor.capture());
+
+ List saved = captor.getValue();
+ org.junit.jupiter.api.Assertions.assertEquals(2, saved.size());
+ verify(processorExecutionTraceLogService, times(2)).upsertTraceLog(anyString(), anyString(), eq(true),
+ eq(null));
+ }
+ }
+
+ @Nested
+ @DisplayName("Integration Behavior")
+ class IntegrationBehavior {
+
+ @Test
+ @DisplayName("Should call saveAll before logging traces")
+ void write_Recommendations_SavesBeforeLogging() {
+ // Arrange
+ Chunk chunk = new Chunk<>(Collections.singletonList(recommendation1));
+ org.mockito.InOrder inOrder = org.mockito.Mockito.inOrder(recommendationRepository,
+ processorExecutionTraceLogService);
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ inOrder.verify(recommendationRepository).saveAll(anyList());
+ inOrder.verify(processorExecutionTraceLogService).upsertTraceLog(anyString(), anyString(), eq(true),
+ eq(null));
+ }
+
+ @Test
+ @DisplayName("Should process all recommendations in sequence")
+ void write_MultipleRecommendations_ProcessesInSequence() {
+ // Arrange
+ Chunk chunk = new Chunk<>(
+ Arrays.asList(recommendation1, recommendation2, recommendation3));
+
+ // Act
+ writer.write(chunk);
+
+ // Assert
+ verify(recommendationRepository, times(1)).saveAll(anyList());
+ verify(processorExecutionTraceLogService, times(3)).upsertTraceLog(anyString(), anyString(), eq(true),
+ eq(null));
+ }
+ }
+}
diff --git a/pom.xml b/pom.xml
index 92ec6b518..401d89606 100644
--- a/pom.xml
+++ b/pom.xml
@@ -254,6 +254,11 @@
atlassian-public
https://packages.atlassian.com/maven/repository/public
+
+ github
+ GitHub Packages
+ https://maven.pkg.github.com/PublicisSapient/knowhow-ai-gateway-client
+