Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ abstract class AbstractCommandBuilder {
String master;
String remote;
protected String propertiesFile;
protected boolean loadSparkDefaults;
final List<String> appArgs;
final List<String> jars;
final List<String> files;
Expand Down Expand Up @@ -362,21 +363,35 @@ Map<String, String> getEffectiveConfig() throws IOException {
}

/**
* Loads the configuration file for the application, if it exists. This is either the
* user-specified properties file, or the spark-defaults.conf file under the Spark configuration
* directory.
* Load the configuration file(s) for the application - from the user-specified properties
* file, and/or the spark-defaults.conf file under the Spark configuration directory, if exists.
* Configurations from user-specified properties file take precedence over spark-defaults.conf.
*/
private Properties loadPropertiesFile() throws IOException {
Properties defaultsProps = new Properties();
if (propertiesFile == null || loadSparkDefaults) {
defaultsProps = loadPropertiesFile(new File(getConfDir(), DEFAULT_PROPERTIES_FILE));
}

Properties props = new Properties();
File propsFile;
if (propertiesFile != null) {
propsFile = new File(propertiesFile);
File propsFile = new File(propertiesFile);
checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", propertiesFile);
} else {
propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
props = loadPropertiesFile(propsFile);
}

for (Map.Entry<Object, Object> entry : props.entrySet()) {
if (!defaultsProps.containsKey(entry.getKey())) {
defaultsProps.put(entry.getKey(), entry.getValue());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm? I think the logic should be that if user specified prop file contains any default entries, they must be overwritten. But here it looks like if the user specified entries are not in default prop file, they will be present? So it is a merger instead of overwritting? Is it correct?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, you are right, will fix soon.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@viirya I fixed the behavior, also refactored UT to verify the priority of the configs:

  • --conf
  • then the user-specified properties file
  • then spark-defaults.conf if it exists

}
}

if (propsFile.isFile()) {
return defaultsProps;
}

private Properties loadPropertiesFile(File propsFile) throws IOException {
Properties props = new Properties();
if (propsFile != null && propsFile.isFile()) {
try (InputStreamReader isr = new InputStreamReader(
new FileInputStream(propsFile), StandardCharsets.UTF_8)) {
props.load(isr);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,10 @@ List<String> buildSparkSubmitArgs(boolean includeRemote) {
args.add(propertiesFile);
}

if (loadSparkDefaults) {
args.add(parser.LOAD_SPARK_DEFAULTS);
}

if (isExample) {
jars.addAll(findExamplesJars());
}
Expand Down Expand Up @@ -550,6 +554,7 @@ protected boolean handle(String opt, String value) {
}
case DEPLOY_MODE -> deployMode = value;
case PROPERTIES_FILE -> propertiesFile = value;
case LOAD_SPARK_DEFAULTS -> loadSparkDefaults = true;
case DRIVER_MEMORY -> conf.put(SparkLauncher.DRIVER_MEMORY, value);
case DRIVER_JAVA_OPTIONS -> conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, value);
case DRIVER_LIBRARY_PATH -> conf.put(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH, value);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,18 @@ public static void cleanUp() throws Exception {

@Test
public void testDriverCmdBuilder() throws Exception {
testCmdBuilder(true, null);
testCmdBuilder(true, dummyPropsFile);
testCmdBuilder(true, connectPropsFile);
testCmdBuilder(true, null, false);
testCmdBuilder(true, dummyPropsFile, false);
testCmdBuilder(true, dummyPropsFile, true);
testCmdBuilder(true, connectPropsFile, false);
}

@Test
public void testClusterCmdBuilder() throws Exception {
testCmdBuilder(false, null);
testCmdBuilder(false, dummyPropsFile);
testCmdBuilder(false, connectPropsFile);
testCmdBuilder(true, null, false);
testCmdBuilder(true, dummyPropsFile, false);
testCmdBuilder(true, dummyPropsFile, true);
testCmdBuilder(true, connectPropsFile, false);
}

@Test
Expand Down Expand Up @@ -317,13 +319,15 @@ public void testIsClientMode() {
assertTrue(builder.isClientMode(userProps));
}

private void testCmdBuilder(boolean isDriver, File propertiesFile) throws Exception {
private void testCmdBuilder(
boolean isDriver, File propertiesFile, boolean loadSparkDefaults) throws Exception {
final String DRIVER_DEFAULT_PARAM = "-Ddriver-default";
final String DRIVER_EXTRA_PARAM = "-Ddriver-extra";
String deployMode = isDriver ? "client" : "cluster";

SparkSubmitCommandBuilder launcher =
newCommandBuilder(Collections.emptyList());
launcher.loadSparkDefaults = loadSparkDefaults;
launcher.childEnv.put(CommandBuilderUtils.ENV_SPARK_HOME,
System.getProperty("spark.test.home"));
launcher.master = "yarn";
Expand All @@ -334,11 +338,10 @@ private void testCmdBuilder(boolean isDriver, File propertiesFile) throws Except
launcher.appArgs.add("foo");
launcher.appArgs.add("bar");
launcher.conf.put("spark.foo", "foo");
// either set the property through "--conf" or through default property file
if (propertiesFile == null) {
launcher.childEnv.put("SPARK_CONF_DIR", System.getProperty("spark.test.home")
+ "/launcher/src/test/resources");
} else {
launcher.childEnv.put("SPARK_CONF_DIR", System.getProperty("spark.test.home")
+ "/launcher/src/test/resources");
// either set the property through "--conf" or through properties file(s)
if (propertiesFile != null) {
launcher.setPropertiesFile(propertiesFile.getAbsolutePath());
launcher.conf.put(SparkLauncher.DRIVER_MEMORY, "1g");
launcher.conf.put(SparkLauncher.DRIVER_EXTRA_CLASSPATH, "/driver");
Expand Down Expand Up @@ -369,8 +372,13 @@ private void testCmdBuilder(boolean isDriver, File propertiesFile) throws Except
String[] cp = findArgValue(cmd, "-cp").split(Pattern.quote(File.pathSeparator));
if (isDriver) {
assertTrue(contains("/driver", cp), "Driver classpath should contain provided entry.");
if (propertiesFile == null || loadSparkDefaults) {
assertTrue(contains("/driver-default", cp),
"Driver classpath should contain provided entry.");
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this assertion fails without this patch.

}
} else {
assertFalse(contains("/driver", cp), "Driver classpath should not be in command.");
assertFalse(contains("/driver-default", cp), "Driver classpath should not be in command.");
}

String libPath = env.get(CommandBuilderUtils.getLibPathEnvName());
Expand Down
1 change: 1 addition & 0 deletions launcher/src/test/resources/spark-defaults.conf
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
#

spark.driver.memory=1g
spark.driver.defaultExtraClassPath=/driver-default
spark.driver.extraClassPath=/driver
spark.driver.defaultJavaOptions=-Ddriver-default
spark.driver.extraJavaOptions=-Ddriver-extra
Expand Down