Skip to content
This repository was archived by the owner on Jan 9, 2020. It is now read-only.

Commit c6bc19d

Browse files
authored
Fix conversion from GB to MiB (#470)
* Fix conversion from GB to MiB Previously we applied a value which was in units MiB to Kubernetes _as if_ it were in MB units. Now, place MiB units in variable names and apply them to Kubernetes as the correct MiB units. * Fix test * Update tests
1 parent 6177bf8 commit c6bc19d

File tree

4 files changed

+19
-19
lines changed

4 files changed

+19
-19
lines changed

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/constants.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,5 +101,5 @@ package object constants {
101101
private[spark] val DRIVER_CONTAINER_NAME = "spark-kubernetes-driver"
102102
private[spark] val KUBERNETES_MASTER_INTERNAL_URL = "https://kubernetes.default.svc"
103103
private[spark] val MEMORY_OVERHEAD_FACTOR = 0.10
104-
private[spark] val MEMORY_OVERHEAD_MIN = 384L
104+
private[spark] val MEMORY_OVERHEAD_MIN_MIB = 384L
105105
}

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStep.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -46,13 +46,13 @@ private[spark] class BaseDriverConfigurationStep(
4646
private val driverLimitCores = submissionSparkConf.get(KUBERNETES_DRIVER_LIMIT_CORES)
4747

4848
// Memory settings
49-
private val driverMemoryMb = submissionSparkConf.get(
49+
private val driverMemoryMiB = submissionSparkConf.get(
5050
org.apache.spark.internal.config.DRIVER_MEMORY)
51-
private val memoryOverheadMb = submissionSparkConf
51+
private val memoryOverheadMiB = submissionSparkConf
5252
.get(KUBERNETES_DRIVER_MEMORY_OVERHEAD)
53-
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * driverMemoryMb).toInt,
54-
MEMORY_OVERHEAD_MIN))
55-
private val driverContainerMemoryWithOverhead = driverMemoryMb + memoryOverheadMb
53+
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * driverMemoryMiB).toInt,
54+
MEMORY_OVERHEAD_MIN_MIB))
55+
private val driverContainerMemoryWithOverheadMiB = driverMemoryMiB + memoryOverheadMiB
5656
private val driverDockerImage = submissionSparkConf.get(DRIVER_DOCKER_IMAGE)
5757

5858
override def configureDriver(
@@ -86,10 +86,10 @@ private[spark] class BaseDriverConfigurationStep(
8686
.withAmount(driverCpuCores)
8787
.build()
8888
val driverMemoryQuantity = new QuantityBuilder(false)
89-
.withAmount(s"${driverMemoryMb}M")
89+
.withAmount(s"${driverMemoryMiB}Mi")
9090
.build()
9191
val driverMemoryLimitQuantity = new QuantityBuilder(false)
92-
.withAmount(s"${driverContainerMemoryWithOverhead}M")
92+
.withAmount(s"${driverContainerMemoryWithOverheadMiB}Mi")
9393
.build()
9494
val maybeCpuLimitQuantity = driverLimitCores.map { limitCores =>
9595
("cpu", new QuantityBuilder(false).withAmount(limitCores).build())
@@ -102,7 +102,7 @@ private[spark] class BaseDriverConfigurationStep(
102102
.addToEnv(driverExtraClasspathEnv.toSeq: _*)
103103
.addNewEnv()
104104
.withName(ENV_DRIVER_MEMORY)
105-
.withValue(driverContainerMemoryWithOverhead + "m")
105+
.withValue(driverContainerMemoryWithOverheadMiB + "M") // JVM treats the "M" unit as "Mi"
106106
.endEnv()
107107
.addNewEnv()
108108
.withName(ENV_DRIVER_MAIN_CLASS)

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/kubernetes/KubernetesClusterSchedulerBackend.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -113,16 +113,16 @@ private[spark] class KubernetesClusterSchedulerBackend(
113113
throw new SparkException("Must specify the driver pod name"))
114114
private val executorPodNamePrefix = conf.get(KUBERNETES_EXECUTOR_POD_NAME_PREFIX)
115115

116-
private val executorMemoryMb = conf.get(org.apache.spark.internal.config.EXECUTOR_MEMORY)
116+
private val executorMemoryMiB = conf.get(org.apache.spark.internal.config.EXECUTOR_MEMORY)
117117
private val executorMemoryString = conf.get(
118118
org.apache.spark.internal.config.EXECUTOR_MEMORY.key,
119119
org.apache.spark.internal.config.EXECUTOR_MEMORY.defaultValueString)
120120

121-
private val memoryOverheadMb = conf
121+
private val memoryOverheadMiB = conf
122122
.get(KUBERNETES_EXECUTOR_MEMORY_OVERHEAD)
123-
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * executorMemoryMb).toInt,
124-
MEMORY_OVERHEAD_MIN))
125-
private val executorMemoryWithOverhead = executorMemoryMb + memoryOverheadMb
123+
.getOrElse(math.max((MEMORY_OVERHEAD_FACTOR * executorMemoryMiB).toInt,
124+
MEMORY_OVERHEAD_MIN_MIB))
125+
private val executorMemoryWithOverheadMiB = executorMemoryMiB + memoryOverheadMiB
126126

127127
private val executorCores = conf.getDouble("spark.executor.cores", 1d)
128128
private val executorLimitCores = conf.getOption(KUBERNETES_EXECUTOR_LIMIT_CORES.key)
@@ -441,10 +441,10 @@ private[spark] class KubernetesClusterSchedulerBackend(
441441
SPARK_ROLE_LABEL -> SPARK_POD_EXECUTOR_ROLE) ++
442442
executorLabels
443443
val executorMemoryQuantity = new QuantityBuilder(false)
444-
.withAmount(s"${executorMemoryMb}M")
444+
.withAmount(s"${executorMemoryMiB}Mi")
445445
.build()
446446
val executorMemoryLimitQuantity = new QuantityBuilder(false)
447-
.withAmount(s"${executorMemoryWithOverhead}M")
447+
.withAmount(s"${executorMemoryWithOverheadMiB}Mi")
448448
.build()
449449
val executorCpuQuantity = new QuantityBuilder(false)
450450
.withAmount(executorCores.toString)

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/kubernetes/submit/submitsteps/BaseDriverConfigurationStepSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,17 +81,17 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
8181
.toMap
8282
assert(envs.size === 6)
8383
assert(envs(ENV_SUBMIT_EXTRA_CLASSPATH) === "/opt/spark/spark-exmaples.jar")
84-
assert(envs(ENV_DRIVER_MEMORY) === "456m")
84+
assert(envs(ENV_DRIVER_MEMORY) === "456M")
8585
assert(envs(ENV_DRIVER_MAIN_CLASS) === MAIN_CLASS)
8686
assert(envs(ENV_DRIVER_ARGS) === "arg1 arg2")
8787
assert(envs(DRIVER_CUSTOM_ENV_KEY1) === "customDriverEnv1")
8888
assert(envs(DRIVER_CUSTOM_ENV_KEY2) === "customDriverEnv2")
8989
val resourceRequirements = preparedDriverSpec.driverContainer.getResources
9090
val requests = resourceRequirements.getRequests.asScala
9191
assert(requests("cpu").getAmount === "2")
92-
assert(requests("memory").getAmount === "256M")
92+
assert(requests("memory").getAmount === "256Mi")
9393
val limits = resourceRequirements.getLimits.asScala
94-
assert(limits("memory").getAmount === "456M")
94+
assert(limits("memory").getAmount === "456Mi")
9595
assert(limits("cpu").getAmount === "4")
9696
val driverPodMetadata = preparedDriverSpec.driverPod.getMetadata
9797
assert(driverPodMetadata.getName === "spark-driver-pod")

0 commit comments

Comments
 (0)