@@ -14,17 +14,7 @@ def __init__(self):
1414 self .valid_prog_environs = ['PrgEnv-gnu' ]
1515 self .modules = ['Spark' ]
1616 self .sourcesdir = None
17- # `SPARK_CONF` needs to be defined after running `start-all.sh`.
18- self .pre_run = [
19- 'start-all.sh' ,
20- ('SPARK_CONF="--conf spark.default.parallelism=10 '
21- '--conf spark.executor.cores=8 '
22- '--conf spark.executor.memory=15g"' )
23- ]
24- self .executable = (
25- 'spark-submit ${SPARK_CONF} --master $SPARKURL '
26- '--class org.apache.spark.examples.SparkPi '
27- '$EBROOTSPARK/examples/jars/spark-examples_2.11-2.3.1.jar 10000;' )
17+ self .pre_run = ['start-all.sh' ]
2818 self .post_run = ['stop-all.sh' ]
2919 self .num_tasks = 2
3020 self .num_tasks_per_node = 1
@@ -34,17 +24,25 @@ def __init__(self):
3424 self .maintainers = ['TM' , 'TR' ]
3525 self .tags = {'production' }
3626
37- def setup ( self , partition , environ , ** job_opts ):
38- super (). setup ( partition , environ , ** job_opts )
39- if partition . name == ' gpu' :
27+ @ rfm . run_before ( 'run' )
28+ def prepare_run ( self ):
29+ if self . current_partition . fullname in [ 'daint: gpu', 'dom:gpu' ] :
4030 num_workers = 12
31+ exec_cores = 3
4132 else :
4233 num_workers = 36
34+ exec_cores = 9
4335
4436 self .variables = {
4537 'SPARK_WORKER_CORES' : '%s' % num_workers ,
4638 'SPARK_LOCAL_DIRS' : '"/tmp"' ,
4739 }
40+ self .executable = (
41+ 'spark-submit --conf spark.default.parallelism=%s '
42+ '--conf spark.executor.cores=%s --conf spark.executor.memory=15g '
43+ '--master $SPARKURL --class org.apache.spark.examples.SparkPi '
44+ '$EBROOTSPARK/examples/jars/spark-examples_2.11-2.3.1.jar 10000;'
45+ % (num_workers , exec_cores ))
4846 # The job launcher has to be changed since the `spark-submit`
4947 # script is not used with srun.
5048 self .job .launcher = getlauncher ('local' )()
0 commit comments