Skip to content

Commit 8fe1ca1

Browse files
committed
Enable the normal user to link a cluster
Signed-off-by: Wei Zhang <[email protected]>
1 parent 4c13b21 commit 8fe1ca1

File tree

10 files changed

+194
-207
lines changed

10 files changed

+194
-207
lines changed

PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/hdinsight/serverexplore/ui/AddNewClusterFrom.form

Lines changed: 117 additions & 133 deletions
Large diffs are not rendered by default.

PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/hdinsight/serverexplore/ui/AddNewClusterFrom.java

Lines changed: 17 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,12 @@
3030
import com.intellij.openapi.ui.popup.IconButton;
3131
import com.microsoft.azure.hdinsight.common.ClusterManagerEx;
3232
import com.microsoft.azure.hdinsight.sdk.cluster.HDInsightAdditionalClusterDetail;
33+
import com.microsoft.azure.hdinsight.sdk.common.AuthenticationException;
3334
import com.microsoft.azure.hdinsight.sdk.common.HDIException;
3435
import com.microsoft.azure.hdinsight.sdk.storage.HDStorageAccount;
3536
import com.microsoft.azure.hdinsight.serverexplore.AddHDInsightAdditionalClusterImpl;
3637
import com.microsoft.azure.hdinsight.serverexplore.hdinsightnode.HDInsightRootModule;
38+
import com.microsoft.azure.hdinsight.spark.jobs.JobUtils;
3739
import com.microsoft.azuretools.azurecommons.helpers.AzureCmdException;
3840
import com.microsoft.azuretools.azurecommons.helpers.StringHelper;
3941
import com.microsoft.azuretools.telemetry.AppInsightsClient;
@@ -146,15 +148,6 @@ private void refreshContainers(@NotNull ClientStorageAccount storageAccount) {
146148
}
147149
}
148150

149-
// Optional<HDStorageAccount> getBlobStorageAccount() {
150-
// if (StringUtils.isBlank(storageName) || StringUtils.isBlank(storageKey) || StringUtils.isBlank(storageContainer)) {
151-
// return Optional.empty();
152-
// }
153-
//
154-
// return Optional.of(new HDStorageAccount(
155-
// null, ClusterManagerEx.getInstance().getBlobFullName(storageName), storageKey, true, storageContainer));
156-
// }
157-
158151
private class HelpAction extends AbstractAction {
159152
private HelpAction() {
160153
this.putValue("Name", CommonBundle.getHelpButtonText());
@@ -192,6 +185,7 @@ protected void doOKAction() {
192185
clusterNameLabel,
193186
storageNameLabel,
194187
storageKeyLabel,
188+
storageContainerLabel,
195189
userNameLabel,
196190
passwordLabel);
197191

@@ -228,15 +222,24 @@ protected void doOKAction() {
228222

229223
if (isCarryOnNextStep) {
230224
getStorageAccount();
231-
}
232225

233-
if (isCarryOnNextStep) {
234-
if (storageAccount != null) {
226+
if (storageAccount == null) {
227+
isCarryOnNextStep = false;
228+
} else {
235229
HDInsightAdditionalClusterDetail hdInsightAdditionalClusterDetail = new HDInsightAdditionalClusterDetail(clusterName, userName, password, storageAccount);
236-
ClusterManagerEx.getInstance().addHDInsightAdditionalCluster(hdInsightAdditionalClusterDetail);
237-
hdInsightModule.refreshWithoutAsync();
230+
try {
231+
JobUtils.authenticate(hdInsightAdditionalClusterDetail);
232+
233+
ClusterManagerEx.getInstance().addHDInsightAdditionalCluster(hdInsightAdditionalClusterDetail);
234+
hdInsightModule.refreshWithoutAsync();
235+
} catch (Exception ignore) {
236+
isCarryOnNextStep = false;
237+
errorMessage = "Wrong username/password to log in";
238+
}
238239
}
240+
}
239241

242+
if (isCarryOnNextStep) {
240243
super.doOKAction();
241244
} else {
242245
errorMessageField.setText(errorMessage);
@@ -258,14 +261,6 @@ private void getStorageAccount() {
258261
addNewClusterPanel.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
259262

260263
ApplicationManager.getApplication().invokeAndWait(() -> {
261-
// try {
262-
// storageAccount = AddHDInsightAdditionalClusterImpl.getStorageAccount(clusterName, storageName, storageKey, userName, password);
263-
// isCarryOnNextStep = true;
264-
// } catch (AzureCmdException | HDIException e) {
265-
// isCarryOnNextStep = false;
266-
// errorMessage = e.getMessage();
267-
// }
268-
269264
storageAccount = new HDStorageAccount(
270265
null, ClusterManagerEx.getInstance().getBlobFullName(storageName), storageKey, false, storageContainer);
271266
isCarryOnNextStep = true;

PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/hdinsight/spark/common/SparkSubmitModel.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,6 @@
4242
import com.microsoft.azure.hdinsight.sdk.common.HDIException;
4343
import com.microsoft.azure.hdinsight.sdk.common.HttpResponse;
4444
import com.microsoft.azure.hdinsight.sdk.common.NotSupportExecption;
45-
import com.microsoft.azure.hdinsight.sdk.storage.HDStorageAccount;
46-
import com.microsoft.azure.hdinsight.sdk.storage.IHDIStorageAccount;
4745
import com.microsoft.azure.hdinsight.spark.jobs.JobUtils;
4846
import com.microsoft.azure.hdinsight.spark.uihelper.InteractiveTableModel;
4947
import com.microsoft.azuretools.azurecommons.helpers.StringHelper;
@@ -67,7 +65,6 @@
6765
import java.util.stream.Stream;
6866

6967
import static com.microsoft.azure.hdinsight.spark.common.SparkSubmitAdvancedConfigModel.SUBMISSION_CONTENT_SSH_CERT;
70-
import static com.microsoft.azure.hdinsight.spark.jobs.JobUtils.updateSparkJobSubmissionStorageConf;
7168

7269
public class SparkSubmitModel {
7370

@@ -340,8 +337,6 @@ public Optional<String> getArtifactPath(@NotNull String selectedArtifactName) {
340337

341338
private void tryToCreateBatchSparkJob(@NotNull final IClusterDetail selectedClusterDetail) throws HDIException,IOException {
342339
SparkBatchSubmission.getInstance().setCredentialsProvider(selectedClusterDetail.getHttpUserName(), selectedClusterDetail.getHttpPassword());
343-
updateSparkJobSubmissionStorageConf(submissionParameter, selectedClusterDetail);
344-
345340
HttpResponse response = SparkBatchSubmission.getInstance().createBatchSparkJob(JobUtils.getLivyConnectionURL(selectedClusterDetail), submissionParameter);
346341

347342
if (response.getCode() == 201 || response.getCode() == 200) {
@@ -390,7 +385,6 @@ public SparkBatchRemoteDebugJob tryToCreateBatchSparkDebugJob(@NotNull final ICl
390385
SparkBatchSubmission.getInstance().setCredentialsProvider(selectedClusterDetail.getHttpUserName(), selectedClusterDetail.getHttpPassword());
391386

392387
try {
393-
updateSparkJobSubmissionStorageConf(submissionParameter, selectedClusterDetail);
394388
SparkBatchRemoteDebugJob debugJob = SparkBatchRemoteDebugJob.factory(
395389
JobUtils.getLivyConnectionURL(selectedClusterDetail),
396390
submissionParameter,

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/common/ClusterManagerEx.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,10 +135,11 @@ public Optional<IClusterDetail> getClusterDetailByName(String clusterName) {
135135
.flatMap(cluster -> {
136136
try {
137137
cluster.getConfigurationInfo();
138+
139+
return Optional.of(cluster);
138140
} catch (Exception ignore) {
141+
return Optional.empty();
139142
}
140-
141-
return cluster.isConfigInfoAvailable() ? Optional.of(cluster) : Optional.empty();
142143
});
143144
}
144145

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/common/StreamUtil.java

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,14 +29,12 @@
2929
import org.apache.commons.io.FileUtils;
3030
import org.apache.http.HttpEntity;
3131
import org.apache.http.client.methods.CloseableHttpResponse;
32+
import org.apache.http.entity.StringEntity;
3233

3334
import javax.swing.*;
3435
import java.io.*;
3536
import java.net.URL;
36-
import java.util.Date;
37-
import java.util.HashMap;
38-
import java.util.List;
39-
import java.util.UUID;
37+
import java.util.*;
4038

4139
public class StreamUtil {
4240

@@ -56,7 +54,9 @@ public static String getResultFromInputStream(InputStream inputStream) throws IO
5654
public static HttpResponse getResultFromHttpResponse(CloseableHttpResponse response) throws IOException {
5755
int code = response.getStatusLine().getStatusCode();
5856
String reason = response.getStatusLine().getReasonPhrase();
59-
HttpEntity entity = response.getEntity();
57+
// Entity for HEAD is empty
58+
HttpEntity entity = Optional.ofNullable(response.getEntity())
59+
.orElse(new StringEntity(""));
6060
try (InputStream inputStream = entity.getContent()) {
6161
String response_content = getResultFromInputStream(inputStream);
6262
return new HttpResponse(code, response_content, new HashMap<String, List<String>>(), reason);

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/serverexplore/HDInsightRootModuleImpl.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
import com.microsoft.azuretools.azurecommons.helpers.AzureCmdException;
3333
import com.microsoft.tooling.msservices.serviceexplorer.Node;
3434
import com.microsoft.tooling.msservices.serviceexplorer.NodeActionEvent;
35+
import com.microsoft.tooling.msservices.serviceexplorer.RefreshableNode;
3536

3637
import java.util.List;
3738

@@ -84,4 +85,10 @@ public void refreshWithoutAsync() {
8485

8586
}
8687

88+
@Override
89+
protected void onNodeClick(NodeActionEvent e) {
90+
if (!initialized) {
91+
this.load(false);
92+
}
93+
}
8794
}

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/serverexplore/hdinsightnode/ClusterNode.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,8 @@ protected void actionPerformed(NodeActionEvent e) {
134134

135135
@Override
136136
protected void refreshItems() {
137+
this.load(false);
138+
137139
if(!clusterDetail.isEmulator()) {
138140
JobViewManager.registerJovViewNode(clusterDetail.getName(), clusterDetail);
139141
JobViewNode jobViewNode = new JobViewNode(this, clusterDetail.getName());

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/spark/common/SparkBatchSubmission.java

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,10 +27,7 @@
2727
import org.apache.http.auth.AuthScope;
2828
import org.apache.http.auth.UsernamePasswordCredentials;
2929
import org.apache.http.client.CredentialsProvider;
30-
import org.apache.http.client.methods.CloseableHttpResponse;
31-
import org.apache.http.client.methods.HttpDelete;
32-
import org.apache.http.client.methods.HttpGet;
33-
import org.apache.http.client.methods.HttpPost;
30+
import org.apache.http.client.methods.*;
3431
import org.apache.http.entity.StringEntity;
3532
import org.apache.http.impl.client.BasicCredentialsProvider;
3633
import org.apache.http.impl.client.CloseableHttpClient;
@@ -91,6 +88,17 @@ public HttpResponse getHttpResponseViaGet(String connectUrl) throws IOException
9188
}
9289
}
9390

91+
public HttpResponse getHttpResponseViaHead(String connectUrl) throws IOException {
92+
CloseableHttpClient httpclient = HttpClients.custom().setDefaultCredentialsProvider(credentialsProvider).build();
93+
94+
HttpHead httpHead = new HttpHead(connectUrl);
95+
httpHead.addHeader("Content-Type", "application/json");
96+
httpHead.addHeader("User-Agent", userAgentName);
97+
httpHead.addHeader("X-Requested-By", "ambari");
98+
try(CloseableHttpResponse response = httpclient.execute(httpHead)) {
99+
return StreamUtil.getResultFromHttpResponse(response);
100+
}
101+
}
94102

95103
/**
96104
* get all batches spark jobs

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/spark/common/SparkSubmissionParameter.java

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,11 @@
2626
import org.json.JSONException;
2727
import org.json.JSONObject;
2828

29-
import java.util.*;
29+
import java.util.ArrayList;
30+
import java.util.Arrays;
31+
import java.util.HashMap;
32+
import java.util.List;
33+
import java.util.Map;
3034

3135

3236
public class SparkSubmissionParameter {
@@ -166,19 +170,6 @@ public void setFilePath(String filePath) {
166170
this.file = filePath;
167171
}
168172

169-
public void setStorageAccount(String storageAccountFullName, String storageAccessKey) {
170-
SparkConfigures sparkConf = Optional.ofNullable(this.getJobConfig().get(Conf))
171-
.map(SparkConfigures::new)
172-
.orElse(new SparkConfigures());
173-
174-
sparkConf.put("spark.hadoop.fs.azure.keyprovider." + storageAccountFullName.split("\\.")[0],
175-
"org.apache.hadoop.fs.azure.SimpleKeyProvider");
176-
sparkConf.put("spark.hadoop.fs.azure.account.key." + storageAccountFullName,
177-
storageAccessKey);
178-
179-
this.getJobConfig().put(Conf, sparkConf);
180-
}
181-
182173
public static List<SparkSubmissionJobConfigCheckResult> checkJobConfigMap(Map<String, Object> jobConfigMap) {
183174

184175
List<SparkSubmissionJobConfigCheckResult> messageList = new ArrayList<>();

Utils/hdinsight-node-common/src/com/microsoft/azure/hdinsight/spark/jobs/JobUtils.java

Lines changed: 26 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -35,11 +35,17 @@
3535
import com.microsoft.azure.hdinsight.common.StreamUtil;
3636
import com.microsoft.azure.hdinsight.sdk.cluster.EmulatorClusterDetail;
3737
import com.microsoft.azure.hdinsight.sdk.cluster.IClusterDetail;
38+
import com.microsoft.azure.hdinsight.sdk.common.AuthenticationException;
3839
import com.microsoft.azure.hdinsight.sdk.common.HDIException;
3940
import com.microsoft.azure.hdinsight.sdk.rest.yarn.rm.App;
4041
import com.microsoft.azure.hdinsight.sdk.rest.yarn.rm.ApplicationMasterLogs;
41-
import com.microsoft.azure.hdinsight.sdk.storage.*;
42-
import com.microsoft.azure.hdinsight.spark.common.*;
42+
import com.microsoft.azure.hdinsight.sdk.storage.HDStorageAccount;
43+
import com.microsoft.azure.hdinsight.sdk.storage.IHDIStorageAccount;
44+
import com.microsoft.azure.hdinsight.sdk.storage.StorageAccountTypeEnum;
45+
import com.microsoft.azure.hdinsight.spark.common.SparkBatchJob;
46+
import com.microsoft.azure.hdinsight.spark.common.SparkBatchSubmission;
47+
import com.microsoft.azure.hdinsight.spark.common.SparkJobException;
48+
import com.microsoft.azure.hdinsight.spark.common.SparkSubmissionParameter;
4349
import com.microsoft.azure.hdinsight.spark.jobs.livy.LivyBatchesInformation;
4450
import com.microsoft.azure.hdinsight.spark.jobs.livy.LivySession;
4551
import com.microsoft.azuretools.azurecommons.helpers.AzureCmdException;
@@ -545,7 +551,6 @@ public static Single<SparkBatchJob> submit(@NotNull IClusterDetail cluster, @Not
545551
return Single.create((SingleSubscriber<? super SparkBatchJob> ob) -> {
546552
try {
547553
SparkBatchSubmission.getInstance().setCredentialsProvider(cluster.getHttpUserName(), cluster.getHttpPassword());
548-
updateSparkJobSubmissionStorageConf(parameter, cluster);
549554

550555
SparkBatchJob sparkJob = new SparkBatchJob(
551556
URI.create(getLivyConnectionURL(cluster)),
@@ -560,24 +565,6 @@ public static Single<SparkBatchJob> submit(@NotNull IClusterDetail cluster, @Not
560565
});
561566
}
562567

563-
public static void updateSparkJobSubmissionStorageConf(@NotNull SparkSubmissionParameter parameter, @NotNull IClusterDetail cluster) {
564-
try {
565-
IHDIStorageAccount storageAccount = cluster.getStorageAccount();
566-
567-
switch (storageAccount.getAccountType()) {
568-
case BLOB:
569-
// Enable blob storage account conf for job uploaded
570-
HDStorageAccount blob = (HDStorageAccount) storageAccount;
571-
parameter.setStorageAccount(blob.getFullStorageBlobName(), blob.getPrimaryKey());
572-
573-
break;
574-
case ADLS:
575-
case UNKNOWN:
576-
default:
577-
}
578-
} catch (HDIException ignored) { }
579-
}
580-
581568
public static Single<SimpleImmutableEntry<IClusterDetail, String>> deployArtifact(@NotNull String artifactLocalPath,
582569
@NotNull String clusterName,
583570
@NotNull Observer<SimpleImmutableEntry<MessageInfoType, String>> logSubject) {
@@ -603,4 +590,22 @@ public static Single<SimpleImmutableEntry<IClusterDetail, String>> deployArtifac
603590
public static Cache getGlobalCache() {
604591
return globalCache;
605592
}
593+
594+
public static AbstractMap.SimpleImmutableEntry<Integer, Map<String, List<String>>>
595+
authenticate(IClusterDetail clusterDetail) throws HDIException, IOException {
596+
SparkBatchSubmission submission = SparkBatchSubmission.getInstance();
597+
submission.setCredentialsProvider(clusterDetail.getHttpUserName(), clusterDetail.getHttpPassword());
598+
String livyUrl = URI.create(ClusterManagerEx.getInstance().getClusterConnectionString(clusterDetail.getName()))
599+
.resolve("/livy/")
600+
.toString();
601+
com.microsoft.azure.hdinsight.sdk.common.HttpResponse response = submission.getHttpResponseViaHead(livyUrl);
602+
603+
int statusCode = response.getCode();
604+
605+
if (statusCode >= 200 && statusCode < 300) {
606+
return new AbstractMap.SimpleImmutableEntry<>(statusCode, response.getHeaders());
607+
}
608+
609+
throw new AuthenticationException("Authentication failed", statusCode);
610+
}
606611
}

0 commit comments

Comments
 (0)