Skip to content

Commit 97d1d11

Browse files
committed
queryDataDevelopNodeList 更改 spark参数,精准匹配改为 模糊匹配
1 parent d7d8726 commit 97d1d11

File tree

1 file changed

+5
-5
lines changed
  • dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/service/impl

1 file changed

+5
-5
lines changed

dss-orchestrator/orchestrators/dss-workflow/dss-workflow-server/src/main/java/com/webank/wedatasphere/dss/workflow/service/impl/DSSFlowServiceImpl.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1850,23 +1850,23 @@ public List<DataDevelopNodeInfo> dataDevelopNodeResultFilter(DataDevelopNodeRequ
18501850

18511851
// 新增 Spark 相关属性筛选
18521852
if (!StringUtils.isBlank(request.getSparkDriverMemory()) && flag) {
1853-
flag = request.getSparkDriverMemory().equals(dataDevelopNodeInfo.getSparkDriverMemory());
1853+
flag = request.getSparkDriverMemory().contains(dataDevelopNodeInfo.getSparkDriverMemory());
18541854
}
18551855

18561856
if (!StringUtils.isBlank(request.getSparkExecutorMemory()) && flag) {
1857-
flag = request.getSparkExecutorMemory().equals(dataDevelopNodeInfo.getSparkExecutorMemory());
1857+
flag = request.getSparkExecutorMemory().contains(dataDevelopNodeInfo.getSparkExecutorMemory());
18581858
}
18591859

18601860
if (!StringUtils.isBlank(request.getSparkExecutorCore()) && flag) {
1861-
flag = request.getSparkExecutorCore().equals(dataDevelopNodeInfo.getSparkExecutorCore());
1861+
flag = request.getSparkExecutorCore().contains(dataDevelopNodeInfo.getSparkExecutorCore());
18621862
}
18631863

18641864
if (!StringUtils.isBlank(request.getSparkConf()) && flag) {
1865-
flag = request.getSparkConf().equals(dataDevelopNodeInfo.getSparkConf());
1865+
flag = request.getSparkConf().contains(dataDevelopNodeInfo.getSparkConf());
18661866
}
18671867

18681868
if (!StringUtils.isBlank(request.getSparkExecutorInstances()) && flag) {
1869-
flag = request.getSparkExecutorInstances().equals(dataDevelopNodeInfo.getSparkExecutorInstances());
1869+
flag = request.getSparkExecutorInstances().contains(dataDevelopNodeInfo.getSparkExecutorInstances());
18701870
}
18711871

18721872
// 新增 executeCluster 筛选

0 commit comments

Comments
 (0)