Skip to content

Commit bdf5cfd

Browse files
pan3793viirya
authored andcommitted
[SPARK-53176][DEPLOY] Spark launcher should respect --load-spark-defaults
### What changes were proposed in this pull request? SPARK-48392 introduces `--load-spark-defaults`, but does not apply correctly for the Spark launcher process, this mainly affects the driver when Spark runs in local/client mode. let's say we have ``` $ cat > conf/spark-defaults.conf <<EOF spark.driver.memory=4g EOF $ cat > conf/spark-local.conf <<EOF spark.master=local[4] EOF ``` ``` $ bin/spark-shell --properties-file conf/spark-local.conf --load-spark-defaults ... scala> spark.sql("SET spark.driver.memory").show() +-------------------+-----+ | key|value| +-------------------+-----+ |spark.driver.memory| 4g| +-------------------+-----+ ``` even the spark conf reports that driver uses 4GiB heap memory, but if we check the Java process, the config actually does not take effect, the default 1GiB is used instead. ``` $ jinfo <spark-submit-pid> ... VM Arguments: jvm_args: -Dscala.usejavacp=true -Xmx1g ... ``` ### Why are the changes needed? Bug fix. ### Does this PR introduce _any_ user-facing change? Yes, bug fix. ### How was this patch tested? UT is modified to cover the change, plus manual tests for the above cases. ``` $ jinfo <spark-submit-pid> ... VM Arguments: jvm_args: -Dscala.usejavacp=true -Xmx4g ... ``` ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51905 from pan3793/SPARK-53176. Authored-by: Cheng Pan <[email protected]> Signed-off-by: Liang-Chi Hsieh <[email protected]> (cherry picked from commit b82957c) Signed-off-by: Liang-Chi Hsieh <[email protected]>
1 parent 52ab9ba commit bdf5cfd

File tree

3 files changed

+99
-8
lines changed

3 files changed

+99
-8
lines changed

launcher/src/main/java/org/apache/spark/launcher/AbstractCommandBuilder.java

Lines changed: 23 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ abstract class AbstractCommandBuilder {
4949
String master;
5050
String remote;
5151
protected String propertiesFile;
52+
protected boolean loadSparkDefaults;
5253
final List<String> appArgs;
5354
final List<String> jars;
5455
final List<String> files;
@@ -362,21 +363,35 @@ Map<String, String> getEffectiveConfig() throws IOException {
362363
}
363364

364365
/**
365-
* Loads the configuration file for the application, if it exists. This is either the
366-
* user-specified properties file, or the spark-defaults.conf file under the Spark configuration
367-
* directory.
366+
* Load the configuration file(s) for the application - from the user-specified properties
367+
* file, and/or the spark-defaults.conf file under the Spark configuration directory, if exists.
368+
* Configurations from user-specified properties file take precedence over spark-defaults.conf.
368369
*/
369370
private Properties loadPropertiesFile() throws IOException {
370371
Properties props = new Properties();
371-
File propsFile;
372372
if (propertiesFile != null) {
373-
propsFile = new File(propertiesFile);
373+
File propsFile = new File(propertiesFile);
374374
checkArgument(propsFile.isFile(), "Invalid properties file '%s'.", propertiesFile);
375-
} else {
376-
propsFile = new File(getConfDir(), DEFAULT_PROPERTIES_FILE);
375+
props = loadPropertiesFile(propsFile);
377376
}
378377

379-
if (propsFile.isFile()) {
378+
Properties defaultsProps = new Properties();
379+
if (propertiesFile == null || loadSparkDefaults) {
380+
defaultsProps = loadPropertiesFile(new File(getConfDir(), DEFAULT_PROPERTIES_FILE));
381+
}
382+
383+
for (Map.Entry<Object, Object> entry : defaultsProps.entrySet()) {
384+
if (!props.containsKey(entry.getKey())) {
385+
props.put(entry.getKey(), entry.getValue());
386+
}
387+
}
388+
389+
return props;
390+
}
391+
392+
private Properties loadPropertiesFile(File propsFile) throws IOException {
393+
Properties props = new Properties();
394+
if (propsFile != null && propsFile.isFile()) {
380395
try (InputStreamReader isr = new InputStreamReader(
381396
new FileInputStream(propsFile), StandardCharsets.UTF_8)) {
382397
props.load(isr);

launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -251,6 +251,10 @@ List<String> buildSparkSubmitArgs(boolean includeRemote) {
251251
args.add(propertiesFile);
252252
}
253253

254+
if (loadSparkDefaults) {
255+
args.add(parser.LOAD_SPARK_DEFAULTS);
256+
}
257+
254258
if (isExample) {
255259
jars.addAll(findExamplesJars());
256260
}
@@ -550,6 +554,7 @@ protected boolean handle(String opt, String value) {
550554
}
551555
case DEPLOY_MODE -> deployMode = value;
552556
case PROPERTIES_FILE -> propertiesFile = value;
557+
case LOAD_SPARK_DEFAULTS -> loadSparkDefaults = true;
553558
case DRIVER_MEMORY -> conf.put(SparkLauncher.DRIVER_MEMORY, value);
554559
case DRIVER_JAVA_OPTIONS -> conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, value);
555560
case DRIVER_LIBRARY_PATH -> conf.put(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH, value);

launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,20 +38,91 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
3838

3939
private static File dummyPropsFile;
4040
private static File connectPropsFile;
41+
private static File driverMemPropsFile;
4142
private static SparkSubmitOptionParser parser;
4243

4344
@BeforeAll
4445
public static void setUp() throws Exception {
4546
dummyPropsFile = File.createTempFile("spark", "properties");
4647
connectPropsFile = File.createTempFile("spark", "properties");
4748
Files.writeString(connectPropsFile.toPath(), "spark.remote=sc://connect-server:15002");
49+
driverMemPropsFile = File.createTempFile("spark", "properties");
50+
Files.writeString(driverMemPropsFile.toPath(),
51+
"spark.driver.memory=4g\nspark.driver.memoryOverhead=768m");
4852
parser = new SparkSubmitOptionParser();
4953
}
5054

5155
@AfterAll
5256
public static void cleanUp() throws Exception {
5357
dummyPropsFile.delete();
5458
connectPropsFile.delete();
59+
driverMemPropsFile.delete();
60+
}
61+
62+
@Test
63+
public void testGetEffectiveConfig() throws Exception {
64+
doTestGetEffectiveConfig(null, true, true);
65+
doTestGetEffectiveConfig(null, true, false);
66+
doTestGetEffectiveConfig(null, false, true);
67+
doTestGetEffectiveConfig(null, false, false);
68+
doTestGetEffectiveConfig(driverMemPropsFile, true, true);
69+
doTestGetEffectiveConfig(driverMemPropsFile, true, false);
70+
doTestGetEffectiveConfig(driverMemPropsFile, false, true);
71+
doTestGetEffectiveConfig(driverMemPropsFile, false, false);
72+
}
73+
74+
private void doTestGetEffectiveConfig(
75+
File propertiesFile, boolean loadSparkDefaults, boolean confDriverMemory) throws Exception {
76+
SparkSubmitCommandBuilder launcher =
77+
newCommandBuilder(Collections.emptyList());
78+
launcher.loadSparkDefaults = loadSparkDefaults;
79+
launcher.conf.put("spark.foo", "bar");
80+
launcher.childEnv.put("SPARK_CONF_DIR", System.getProperty("spark.test.home")
81+
+ "/launcher/src/test/resources");
82+
83+
if (propertiesFile != null) {
84+
launcher.setPropertiesFile(propertiesFile.getAbsolutePath());
85+
}
86+
87+
if (confDriverMemory) {
88+
launcher.conf.put(SparkLauncher.DRIVER_MEMORY, "2g");
89+
}
90+
91+
Map<String, String> effectiveConfig = launcher.getEffectiveConfig();
92+
93+
assertEquals("bar", effectiveConfig.get("spark.foo"));
94+
if (confDriverMemory) {
95+
assertEquals("2g", effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
96+
} else if (propertiesFile != null) {
97+
try (FileReader reader = new FileReader(propertiesFile, StandardCharsets.UTF_8)) {
98+
Properties props = new Properties();
99+
props.load(reader);
100+
if (props.containsKey(SparkLauncher.DRIVER_MEMORY)) {
101+
assertEquals(props.getProperty(SparkLauncher.DRIVER_MEMORY),
102+
effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
103+
}
104+
}
105+
} else {
106+
assertEquals("1g", effectiveConfig.get(SparkLauncher.DRIVER_MEMORY));
107+
}
108+
109+
if (propertiesFile != null) {
110+
try (FileReader reader = new FileReader(propertiesFile, StandardCharsets.UTF_8)) {
111+
Properties props = new Properties();
112+
props.load(reader);
113+
if (props.containsKey("spark.driver.memoryOverhead")) {
114+
assertEquals(props.getProperty("spark.driver.memoryOverhead"),
115+
effectiveConfig.get("spark.driver.memoryOverhead"));
116+
}
117+
}
118+
if (loadSparkDefaults) {
119+
assertEquals("/driver", effectiveConfig.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
120+
} else {
121+
assertFalse(effectiveConfig.containsKey(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
122+
}
123+
} else {
124+
assertEquals("/driver", effectiveConfig.get(SparkLauncher.DRIVER_EXTRA_CLASSPATH));
125+
}
55126
}
56127

57128
@Test

0 commit comments

Comments
 (0)