|
62 | 62 | import java.net.URI; |
63 | 63 | import java.net.URISyntaxException; |
64 | 64 | import java.net.UnknownServiceException; |
| 65 | +import java.util.AbstractMap; |
65 | 66 | import java.util.AbstractMap.SimpleEntry; |
66 | 67 | import java.util.HashMap; |
67 | 68 | import java.util.Optional; |
@@ -138,7 +139,7 @@ protected void execute(@NotNull ExecutionEnvironment environment, @Nullable Call |
138 | 139 | debugProcessPhaser = new Phaser(1); |
139 | 140 |
|
140 | 141 | Observable.create((Observable.OnSubscribe<String>) ob -> |
141 | | - createDebugJobSession(submitModel).subscribe(debugJobClusterPair -> { |
| 142 | + createDebugJobSession(submitModel, ob).subscribe(debugJobClusterPair -> { |
142 | 143 | final SparkBatchRemoteDebugJob remoteDebugJob = debugJobClusterPair.getKey(); |
143 | 144 | final IClusterDetail clusterDetail = debugJobClusterPair.getValue(); |
144 | 145 | final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); |
@@ -176,16 +177,9 @@ protected void execute(@NotNull ExecutionEnvironment environment, @Nullable Call |
176 | 177 |
|
177 | 178 | ob.onNext("Info: Spark Job Driver debugging started."); |
178 | 179 |
|
179 | | - Subscription livyLogSubscription = submitModel |
180 | | - .jobLogObservable(remoteDebugJob.getBatchId(), clusterDetail) |
181 | | - .subscribeOn(Schedulers.io()) |
182 | | - .subscribe(); |
183 | | - |
184 | 180 | // Await for all debug processes finish |
185 | 181 | debugProcessPhaser.arriveAndAwaitAdvance(); |
186 | 182 | ob.onCompleted(); |
187 | | - |
188 | | - livyLogSubscription.unsubscribe(); |
189 | 183 | }, ob::onError)) |
190 | 184 | .subscribe( |
191 | 185 | info -> HDInsightUtil.showInfoOnSubmissionMessageWindow(project, info), |
@@ -252,7 +246,7 @@ private ExecutionEnvironment buildChildEnvironment(@NotNull ExecutionEnvironment |
252 | 246 | * Create a Debug Spark Job session with building, deploying and submitting |
253 | 247 | */ |
254 | 248 | private Single<SimpleEntry<SparkBatchRemoteDebugJob, IClusterDetail>> createDebugJobSession( |
255 | | - @NotNull SparkSubmitModel submitModel) { |
| 249 | + @NotNull SparkSubmitModel submitModel, Subscriber<? super String> debugSessionSub) { |
256 | 250 | SparkSubmissionParameter submissionParameter = submitModel.getSubmissionParameter(); |
257 | 251 | String selectedClusterName = submissionParameter.getClusterName(); |
258 | 252 |
|
@@ -285,11 +279,23 @@ private Single<SimpleEntry<SparkBatchRemoteDebugJob, IClusterDetail>> createDebu |
285 | 279 | submitModel.tryToCreateBatchSparkDebugJob(selectedClusterDetail); |
286 | 280 | setDebugJob(remoteDebugJob); |
287 | 281 |
|
| 282 | + return new SimpleEntry<>(remoteDebugJob, selectedClusterDetail); |
| 283 | + } catch (Exception e) { |
| 284 | + HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false); |
| 285 | + throw propagate(e); |
| 286 | + }}) |
| 287 | + .flatMap(jobClusterPair -> jobClusterPair.getKey().getSubmissionLog() |
| 288 | + .map(AbstractMap.SimpleImmutableEntry::getValue) |
| 289 | + .doOnNext(debugSessionSub::onNext) |
| 290 | + .doOnError(debugSessionSub::onError) |
| 291 | + .last() |
| 292 | + .toSingle() |
| 293 | + .map(message -> jobClusterPair)) |
| 294 | + .doOnSuccess(jobClusterPair -> { |
| 295 | + try { |
288 | 296 | SparkBatchDebugSession session = createSparkBatchDebugSession( |
289 | | - selectedClusterDetail.getConnectionUrl(), submitModel.getAdvancedConfigModel()).open(); |
| 297 | + jobClusterPair.getValue().getConnectionUrl(), submitModel.getAdvancedConfigModel()).open(); |
290 | 298 | setDebugSession(session); |
291 | | - |
292 | | - return new SimpleEntry<>(remoteDebugJob, selectedClusterDetail); |
293 | 299 | } catch (Exception e) { |
294 | 300 | HDInsightUtil.setJobRunningStatus(submitModel.getProject(), false); |
295 | 301 | throw propagate(e); |
|
0 commit comments