Skip to content

Commit c93815f

Browse files
committed
Fix IDEA203 regression of Spark failure debug in local
The 203 API changes introduce the regression with local debugging Spark failure context with the following error message: `java.nio.file.InvalidPathException: Illegal char <*> at index ...`
1 parent 3e911c9 commit c93815f

File tree

1 file changed

+7
-5
lines changed

1 file changed

+7
-5
lines changed

PluginsAndFeatures/azure-toolkit-for-intellij/src/com/microsoft/azure/hdinsight/spark/run/SparkFailureTaskRunProfileState.kt

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,14 +26,15 @@ import com.intellij.execution.DefaultExecutionResult
2626
import com.intellij.execution.ExecutionException
2727
import com.intellij.execution.ExecutionResult
2828
import com.intellij.execution.Executor
29+
import com.intellij.execution.ShortenCommandLine.MANIFEST
2930
import com.intellij.execution.configurations.GeneralCommandLine
3031
import com.intellij.execution.configurations.JavaParameters
3132
import com.intellij.execution.process.KillableColoredProcessHandler
3233
import com.intellij.execution.process.ProcessAdapter
3334
import com.intellij.execution.process.ProcessEvent
3435
import com.intellij.execution.runners.ProgramRunner
3536
import com.intellij.execution.util.JavaParametersUtil
36-
import com.intellij.util.PathUtil
37+
import com.intellij.openapi.util.io.FileUtil
3738
import com.microsoft.azure.hdinsight.spark.common.SparkFailureTaskDebugConfigurableModel
3839
import com.microsoft.azure.hdinsight.spark.ui.SparkJobLogConsoleView
3940
import com.microsoft.azuretools.telemetrywrapper.ErrorType
@@ -118,18 +119,18 @@ open class SparkFailureTaskRunProfileState(val name: String,
118119

119120
JavaParametersUtil.configureConfiguration(params, settingsConfigModel)
120121

122+
// Put failure context runtime at beginning, after JDK6, the classpath support <dir>/*
123+
params.classPath.addAllFiles(File(FileUtil.toCanonicalPath(settingsConfigModel.workingDirectory), "runtime").listFiles())
124+
121125
// The dependent spark-tools.jar is already in the Maven project lib/ directory
122126
JavaParametersUtil.configureProject(project, params, JavaParameters.JDK_AND_CLASSES_AND_TESTS, null)
123127

124128
// Additional VM parameters
125129
additionalVmParameters.forEach { params.vmParametersList.add(it) }
126130

127-
// Put failure context runtime at beginning, after JDK6, the classpath support <dir>/*
128-
params.classPath.addFirst("${PathUtil.getCanonicalPath(settingsConfigModel.workingDirectory)}/runtime/*")
129-
130131
// Prepare log4j.properties file
131132
settingsConfigModel.log4jProperties?.also { log4jProp ->
132-
val log4jPropertiesFile = File("${PathUtil.getCanonicalPath(settingsConfigModel.workingDirectory)}/conf/log4j.properties")
133+
val log4jPropertiesFile = File("${FileUtil.toCanonicalPath(settingsConfigModel.workingDirectory)}/conf/log4j.properties")
133134
.apply {
134135
parentFile.mkdir()
135136
writeText(log4jProp)
@@ -140,6 +141,7 @@ open class SparkFailureTaskRunProfileState(val name: String,
140141

141142
// Helper Main class
142143
params.mainClass = settingsConfigModel.runClass
144+
params.setShortenCommandLine(MANIFEST, null)
143145

144146
return params.toCommandLine()
145147
}

0 commit comments

Comments
 (0)