|
15 | 15 |
|
16 | 16 | package com.webank.wedatasphere.streamis.jobmanager.restful.api; |
17 | 17 |
|
| 18 | +import com.fasterxml.jackson.core.JsonProcessingException; |
18 | 19 | import com.github.pagehelper.PageHelper; |
19 | 20 | import com.github.pagehelper.PageInfo; |
20 | 21 | import com.webank.wedatasphere.streamis.jobmanager.exception.JobException; |
21 | 22 | import com.webank.wedatasphere.streamis.jobmanager.exception.JobExceptionManager; |
22 | 23 | import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo; |
23 | 24 | import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager; |
24 | 25 | import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload; |
| 26 | +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo; |
| 27 | +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf; |
25 | 28 | import com.webank.wedatasphere.streamis.jobmanager.manager.entity.MetaJsonInfo; |
26 | 29 | import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; |
27 | 30 | import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion; |
| 31 | +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; |
28 | 32 | import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.*; |
29 | 33 | import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; |
30 | 34 | import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; |
31 | 35 | import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamTaskService; |
32 | 36 | import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent; |
| 37 | +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils; |
| 38 | +import org.apache.commons.collections.CollectionUtils; |
33 | 39 | import org.apache.commons.lang.exception.ExceptionUtils; |
34 | 40 | import org.apache.commons.lang3.StringUtils; |
| 41 | +import org.apache.linkis.httpclient.dws.DWSHttpClient; |
35 | 42 | import org.apache.linkis.server.Message; |
36 | 43 | import org.apache.linkis.server.security.SecurityFilter; |
37 | 44 | import org.slf4j.Logger; |
|
43 | 50 | import javax.annotation.Resource; |
44 | 51 | import javax.servlet.http.HttpServletRequest; |
45 | 52 | import java.io.IOException; |
46 | | -import java.util.ArrayList; |
47 | | -import java.util.List; |
48 | | -import java.util.Map; |
49 | | -import java.util.Objects; |
| 53 | +import java.util.*; |
50 | 54 |
|
51 | 55 | @RequestMapping(path = "/streamis/streamJobManager/job") |
52 | 56 | @RestController |
@@ -210,7 +214,7 @@ public Message detailsJob(HttpServletRequest req, @RequestParam(value = "jobId", |
210 | 214 | realTimeTrafficDTOS.add(realTimeTrafficDTO); |
211 | 215 |
|
212 | 216 |
|
213 | | - jobDetailsVO.setLinkisJobInfo(streamTaskService.getTask(jobId,version)); |
| 217 | + jobDetailsVO.setLinkisJobInfo(streamTaskService.getTaskJobInfo(jobId,version)); |
214 | 218 | jobDetailsVO.setDataNumber(dataNumberDTOS); |
215 | 219 | jobDetailsVO.setLoadCondition(loadConditionDTOs); |
216 | 220 | jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS); |
@@ -238,6 +242,102 @@ public Message executeHistoryJob(HttpServletRequest req, |
238 | 242 | return Message.ok().data("details", details); |
239 | 243 | } |
240 | 244 |
|
| 245 | + @RequestMapping(path = "/addTask", method = RequestMethod.GET) |
| 246 | + public Message addTask(HttpServletRequest req, |
| 247 | + @RequestParam(value = "jobName") String jobName, |
| 248 | + @RequestParam(value = "appId") String appId, |
| 249 | + @RequestParam(value = "appUrl") String appUrl) { |
| 250 | + String username = SecurityFilter.getLoginUsername(req); |
| 251 | + LOG.info("User {} try to add a new task for Streamis job {} with appId: {}, appUrl: {}.", username, jobName, appId, appUrl); |
| 252 | + List<StreamJob> streamJobs = streamJobService.getJobByName(jobName); |
| 253 | + if(CollectionUtils.isEmpty(streamJobs)) { |
| 254 | + return Message.error("Not exits Streamis job " + jobName); |
| 255 | + } else if(streamJobs.size() > 1) { |
| 256 | + return Message.error("Too many Streamis Job named " + jobName + ", we cannot distinguish between them."); |
| 257 | + } else if(!"spark.jar".equals(streamJobs.get(0).getJobType())) { |
| 258 | + return Message.error("Only spark.jar Job support to add new tasks."); |
| 259 | + } |
| 260 | + if (!streamJobService.hasPermission(streamJobs.get(0), username) && |
| 261 | + !this.privilegeService.hasEditPrivilege(req, streamJobs.get(0).getProjectName())) { |
| 262 | + return Message.error("Have no permission to add new task for StreamJob [" + jobName + "]."); |
| 263 | + } |
| 264 | + // 如果存在正在运行的,先将其停止掉 |
| 265 | + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJobs.get(0).getId()); |
| 266 | + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { |
| 267 | + LOG.warn("Streamis Job {} exists running task, update its status to stopped at first.", jobName); |
| 268 | + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); |
| 269 | + streamTaskService.updateTask(streamTask); |
| 270 | + } else { |
| 271 | + // 这里取个巧,从该工程该用户有权限的Job中找到一个Flink的历史作业,作为这个Spark Streaming作业的jobId和jobInfo |
| 272 | + // 替换掉JobInfo中的 yarn 信息,这样我们前端就可以在不修改任何逻辑的情况下正常展示Spark Streaming作业了 |
| 273 | + PageInfo<QueryJobListVo> jobList = streamJobService.getByProList(streamJobs.get(0).getProjectName(), username, null, 0, null); |
| 274 | + Optional<QueryJobListVo> copyJob = jobList.getList().stream().min((job1, job2) -> { |
| 275 | + if (job1.getStatus() > 0) { |
| 276 | + return 0; |
| 277 | + } else { |
| 278 | + return 1; |
| 279 | + } |
| 280 | + }); |
| 281 | + if(!copyJob.isPresent()) { |
| 282 | + return Message.error("If no Flink Job has submitted, the register to Streamis cannot be succeeded."); |
| 283 | + } |
| 284 | + StreamTask copyTask = streamTaskService.getLatestTaskByJobId(copyJob.get().getId()); |
| 285 | + LOG.warn("Streamis Job {} will bind the linkisJobInfo from history Flink Job {} with linkisJobId: {}, linkisJobInfo: {}.", |
| 286 | + jobName, copyJob.get().getName(), copyTask.getLinkisJobId(), copyTask.getLinkisJobInfo()); |
| 287 | + streamTask = streamTaskService.createTask(streamJobs.get(0).getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username); |
| 288 | + streamTask.setLinkisJobId(copyTask.getLinkisJobId()); |
| 289 | + streamTask.setLinkisJobInfo(copyTask.getLinkisJobInfo()); |
| 290 | + } |
| 291 | + streamTask.setStartTime(new Date()); |
| 292 | + streamTask.setLastUpdateTime(new Date()); |
| 293 | + FlinkJobInfo flinkJobInfo; |
| 294 | + try { |
| 295 | + flinkJobInfo = DWSHttpClient.jacksonJson().readValue(streamTask.getLinkisJobInfo(), FlinkJobInfo.class); |
| 296 | + } catch (JsonProcessingException e) { |
| 297 | + LOG.error("Job {} deserialize the jobInfo from history Job failed!", jobName, e); |
| 298 | + return Message.error("Deserialize the jobInfo from history Job failed!"); |
| 299 | + } |
| 300 | + flinkJobInfo.setApplicationId(appId); |
| 301 | + flinkJobInfo.setApplicationUrl(appUrl); |
| 302 | + flinkJobInfo.setName(jobName); |
| 303 | + flinkJobInfo.setStatus(JobConf.getStatusString((Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue())); |
| 304 | + StreamTaskUtils.refreshInfo(streamTask, flinkJobInfo); |
| 305 | + streamTaskService.updateTask(streamTask); |
| 306 | + LOG.info("Streamis Job {} has added a new task successfully.", jobName); |
| 307 | + return Message.ok(); |
| 308 | + } |
| 309 | + |
| 310 | + @RequestMapping(path = "/stopTask", method = RequestMethod.GET) |
| 311 | + public Message stopTask(HttpServletRequest req, |
| 312 | + @RequestParam(value = "jobName") String jobName, |
| 313 | + @RequestParam(value = "appId") String appId, |
| 314 | + @RequestParam(value = "appUrl") String appUrl) { |
| 315 | + String username = SecurityFilter.getLoginUsername(req); |
| 316 | + LOG.info("User {} try to stop task for Streamis job {} with appId: {}, appUrl: {}.", username, jobName, appId, appUrl); |
| 317 | + List<StreamJob> streamJobs = streamJobService.getJobByName(jobName); |
| 318 | + if(CollectionUtils.isEmpty(streamJobs)) { |
| 319 | + return Message.error("Not exits Streamis job " + jobName); |
| 320 | + } else if(streamJobs.size() > 1) { |
| 321 | + return Message.error("Too many Streamis Job named " + jobName + ", we cannot distinguish between them."); |
| 322 | + } else if(!"spark.jar".equals(streamJobs.get(0).getJobType())) { |
| 323 | + return Message.error("Only spark.jar Job support to add new tasks."); |
| 324 | + } |
| 325 | + if (!streamJobService.hasPermission(streamJobs.get(0), username) && |
| 326 | + !this.privilegeService.hasEditPrivilege(req, streamJobs.get(0).getProjectName())) { |
| 327 | + return Message.error("Have no permission to add new task for StreamJob [" + jobName + "]."); |
| 328 | + } |
| 329 | + // 如果存在正在运行的,将其停止掉 |
| 330 | + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJobs.get(0).getId()); |
| 331 | + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { |
| 332 | + LOG.warn("Streamis Job {} is exists running task, update its status to stopped.", jobName); |
| 333 | + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); |
| 334 | + streamTaskService.updateTask(streamTask); |
| 335 | + } else { |
| 336 | + LOG.warn("Streamis Job {} is not exists running task, ignore to stop it.", jobName); |
| 337 | + } |
| 338 | + return Message.ok(); |
| 339 | + } |
| 340 | + |
241 | 341 | @RequestMapping(path = "/progress", method = RequestMethod.GET) |
242 | 342 | public Message progressJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, |
243 | 343 | @RequestParam(value = "version", required = false) String version) throws IOException, JobException { |
|
0 commit comments