Skip to content

Commit 73de977

Browse files
committed
fix: queryDataDevelopNodeList bug 1
1 parent 97d1d11 commit 73de977

File tree

1 file changed

+10
-5
lines changed
  • dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/service/impl

1 file changed

+10
-5
lines changed

dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/service/impl/DSSFlowServiceImpl.java

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1850,23 +1850,28 @@ public List<DataDevelopNodeInfo> dataDevelopNodeResultFilter(DataDevelopNodeRequ
18501850

18511851
// 新增 Spark 相关属性筛选
18521852
if (!StringUtils.isBlank(request.getSparkDriverMemory()) && flag) {
1853-
flag = request.getSparkDriverMemory().contains(dataDevelopNodeInfo.getSparkDriverMemory());
1853+
flag = StringUtils.isNotEmpty(dataDevelopNodeInfo.getSparkDriverMemory())
1854+
&& dataDevelopNodeInfo.getSparkDriverMemory().contains(request.getSparkDriverMemory());
18541855
}
18551856

18561857
if (!StringUtils.isBlank(request.getSparkExecutorMemory()) && flag) {
1857-
flag = request.getSparkExecutorMemory().contains(dataDevelopNodeInfo.getSparkExecutorMemory());
1858+
flag = StringUtils.isNotEmpty(dataDevelopNodeInfo.getSparkExecutorMemory()) &&
1859+
dataDevelopNodeInfo.getSparkExecutorMemory().contains(request.getSparkExecutorMemory());
18581860
}
18591861

18601862
if (!StringUtils.isBlank(request.getSparkExecutorCore()) && flag) {
1861-
flag = request.getSparkExecutorCore().contains(dataDevelopNodeInfo.getSparkExecutorCore());
1863+
flag = StringUtils.isNotEmpty(dataDevelopNodeInfo.getSparkExecutorCore()) &&
1864+
dataDevelopNodeInfo.getSparkExecutorCore().contains(request.getSparkExecutorCore());
18621865
}
18631866

18641867
if (!StringUtils.isBlank(request.getSparkConf()) && flag) {
1865-
flag = request.getSparkConf().contains(dataDevelopNodeInfo.getSparkConf());
1868+
flag = StringUtils.isNotEmpty(dataDevelopNodeInfo.getSparkConf())
1869+
&& dataDevelopNodeInfo.getSparkConf().contains(request.getSparkConf());
18661870
}
18671871

18681872
if (!StringUtils.isBlank(request.getSparkExecutorInstances()) && flag) {
1869-
flag = request.getSparkExecutorInstances().contains(dataDevelopNodeInfo.getSparkExecutorInstances());
1873+
flag = StringUtils.isNotEmpty(dataDevelopNodeInfo.getSparkExecutorInstances()) &&
1874+
dataDevelopNodeInfo.getSparkExecutorInstances().contains(request.getSparkExecutorInstances());
18701875
}
18711876

18721877
// 新增 executeCluster 筛选

0 commit comments

Comments
 (0)