Skip to content

Commit fc6106b

Browse files
committed
fix compile error
1 parent 6d5e027 commit fc6106b

File tree

2 files changed

+43
-32
lines changed

2 files changed

+43
-32
lines changed

linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -295,44 +295,44 @@ object EntranceConfiguration {
295295
"01002,01003,13005,13006,13012"
296296
).getValue
297297

298-
val AI_SQL_TEST_MODE =
299-
CommonVars("linkis.entrance.aisql.test.mode", true).getValue
298+
val AI_SQL_TEST_MODE: Boolean =
299+
CommonVars[Boolean]("linkis.entrance.aisql.test.mode", true).getValue
300300

301-
val AI_SQL_ENABLED =
302-
CommonVars("linkis.ai.sql.enabled", true).getValue
301+
val AI_SQL_ENABLED: Boolean =
302+
CommonVars[Boolean]("linkis.ai.sql.enabled", true).getValue
303303

304-
val AI_SQL_DEFAULT_SPARK_ENGINE_TYPE =
305-
CommonVars("linkis.ai.sql.default.spark.engine.type", "spark-3.4.4").getValue
304+
val AI_SQL_DEFAULT_SPARK_ENGINE_TYPE: String =
305+
CommonVars[String]("linkis.ai.sql.default.spark.engine.type", "spark-3.4.4").getValue
306306

307-
val AI_SQL_CREATORS =
308-
CommonVars("linkis.ai.sql.support.creators", "IDE").getValue
307+
val AI_SQL_CREATORS: String =
308+
CommonVars[String]("linkis.ai.sql.support.creators", "IDE").getValue
309309

310310
val AI_SQL_KEY = "linkis.ai.sql"
311311

312312
val RETRY_NUM_KEY = "linkis.ai.retry.num"
313313

314-
val SPARK_SHUFFLE_SERVICE_ENABLED =
315-
CommonVars("linkis.spark.shuffle.service.enabled", true).getValue
314+
val SPARK_SHUFFLE_SERVICE_ENABLED: Boolean =
315+
CommonVars[Boolean]("linkis.spark.shuffle.service.enabled", true).getValue
316316

317-
val SPARK_EXECUTOR_CORES =
318-
CommonVars("spark.executor.cores", 7).getValue
317+
val SPARK_EXECUTOR_CORES: Int =
318+
CommonVars[Int]("spark.executor.cores", 7).getValue
319319

320-
val SPARK_EXECUTOR_MEMORY =
321-
CommonVars("spark.executor.memory", "28G").getValue
320+
val SPARK_EXECUTOR_MEMORY: String =
321+
CommonVars[String]("spark.executor.memory", "28G").getValue
322322

323-
val SPARK_EXECUTOR_INSTANCES =
324-
CommonVars("spark.executor.instances", "0").getValue
323+
val SPARK_EXECUTOR_INSTANCES: Int =
324+
CommonVars[Int]("spark.executor.instances", 0).getValue
325325

326-
val SPARK_DYNAMIC_ALLOCATION_ENABLED =
327-
CommonVars("spark.dynamicAllocation.enabled", true).getValue
326+
val SPARK_DYNAMIC_ALLOCATION_ENABLED: Boolean =
327+
CommonVars[Boolean]("spark.dynamicAllocation.enabled", true).getValue
328328

329-
val SPARK_DYNAMIC_ALLOCATION_MIN_EXECUTORS =
330-
CommonVars("spark.dynamicAllocation.minExecutors", 0).getValue
329+
val SPARK_DYNAMIC_ALLOCATION_MIN_EXECUTORS: Int =
330+
CommonVars[Int]("spark.dynamicAllocation.minExecutors", 0).getValue
331331

332-
val SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS =
333-
CommonVars("spark.dynamicAllocation.maxExecutors", Integer.MAX_VALUE).getValue
332+
val SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS: Int =
333+
CommonVars[Int]("spark.dynamicAllocation.maxExecutors", Integer.MAX_VALUE).getValue
334334

335-
val SPARK_DYNAMIC_ALLOCATION_ADDITIONAL_CONFS =
336-
CommonVars("spark.dynamicAllocation.additional.confs", "").getValue
335+
val SPARK_DYNAMIC_ALLOCATION_ADDITIONAL_CONFS: String =
336+
CommonVars[String]("spark.dynamicAllocation.additional.confs", "").getValue
337337

338338
}

linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/AISQLTransformInterceptor.scala

Lines changed: 19 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ import org.apache.linkis.common.utils.{Logging, Utils}
2121
import org.apache.linkis.common.utils.CodeAndRunTypeUtils.LANGUAGE_TYPE_AI_SQL
2222
import org.apache.linkis.entrance.conf.EntranceConfiguration
2323
import org.apache.linkis.entrance.conf.EntranceConfiguration._
24-
2524
import org.apache.linkis.entrance.interceptor.EntranceInterceptor
2625
import org.apache.linkis.governance.common.entity.job.{JobAiRequest, JobRequest}
2726
import org.apache.linkis.governance.common.protocol.job.JobAiReqInsert
@@ -77,13 +76,25 @@ class AISQLTransformInterceptor extends EntranceInterceptor with Logging {
7776
logger.info("spark3 add dynamic resource.")
7877

7978
// add spark dynamic resource planning
80-
startMap.put("spark.shuffle.service.enabled", SPARK_SHUFFLE_SERVICE_ENABLED)
81-
startMap.put("spark.dynamicAllocation.enabled", SPARK_DYNAMIC_ALLOCATION_ENABLED)
82-
startMap.put("spark.dynamicAllocation.minExecutors", SPARK_DYNAMIC_ALLOCATION_MIN_EXECUTORS)
83-
startMap.put("spark.dynamicAllocation.maxExecutors", SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS)
84-
startMap.put("spark.executor.cores", SPARK_EXECUTOR_CORES)
85-
startMap.put("spark.executor.memory", SPARK_EXECUTOR_MEMORY)
86-
startMap.put("spark.executor.instances", SPARK_EXECUTOR_INSTANCES)
79+
startMap.put(
80+
"spark.shuffle.service.enabled",
81+
SPARK_SHUFFLE_SERVICE_ENABLED.asInstanceOf[AnyRef]
82+
)
83+
startMap.put(
84+
"spark.dynamicAllocation.enabled",
85+
SPARK_DYNAMIC_ALLOCATION_ENABLED.asInstanceOf[AnyRef]
86+
)
87+
startMap.put(
88+
"spark.dynamicAllocation.minExecutors",
89+
SPARK_DYNAMIC_ALLOCATION_MIN_EXECUTORS.asInstanceOf[AnyRef]
90+
)
91+
startMap.put(
92+
"spark.dynamicAllocation.maxExecutors",
93+
SPARK_DYNAMIC_ALLOCATION_MAX_EXECUTORS.asInstanceOf[AnyRef]
94+
)
95+
startMap.put("spark.executor.cores", SPARK_EXECUTOR_CORES.asInstanceOf[AnyRef])
96+
startMap.put("spark.executor.memory", SPARK_EXECUTOR_MEMORY.asInstanceOf[AnyRef])
97+
startMap.put("spark.executor.instances", SPARK_EXECUTOR_INSTANCES.asInstanceOf[AnyRef])
8798

8899
Utils.tryAndWarn {
89100
val extraConfs: String = SPARK_DYNAMIC_ALLOCATION_ADDITIONAL_CONFS

0 commit comments

Comments
 (0)