Skip to content

Commit a229f81

Browse files
committed
Merge branch 'develop' of github.com:pndaproject/platform-deployment-manager into RELEASE-0.4.0
2 parents bce7bc2 + 215d231 commit a229f81

File tree

2 files changed

+5
-3
lines changed

2 files changed

+5
-3
lines changed

api/src/main/resources/plugins/sparkStreaming.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,6 +132,8 @@ def create_component(self, staged_component_path, application_name, component, p
132132
['sudo mkdir -p %s' % remote_component_install_path,
133133
'sudo mv %s %s' % (remote_component_tmp_path + '/log4j.properties', remote_component_install_path + '/log4j.properties')])
134134

135+
if 'component_main_py' in properties:
136+
main_jar_name = None
135137
if 'component_main_jar' in properties:
136138
main_jar_name = properties['component_main_jar']
137139
else:
@@ -141,7 +143,8 @@ def create_component(self, staged_component_path, application_name, component, p
141143
commands.append('sudo cp %s/%s %s' % (remote_component_tmp_path, service_script, service_script_install_path))
142144
commands.append('sudo cp %s/* %s' % (remote_component_tmp_path, remote_component_install_path))
143145
commands.append('sudo chmod a+x %s/yarn-kill.py' % (remote_component_install_path))
144-
commands.append('cd %s && sudo jar uf %s application.properties' % (remote_component_install_path, main_jar_name))
146+
if main_jar_name is not None:
147+
commands.append('cd %s && sudo jar uf %s application.properties' % (remote_component_install_path, main_jar_name))
145148
commands.append('sudo rm -rf %s' % (remote_component_tmp_path))
146149
deployer_utils.exec_ssh(target_host, root_user, key_file, commands)
147150

api/src/main/resources/plugins/upstart.conf.py.tpl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,5 @@ respawn
44
respawn limit unlimited
55
pre-start exec /opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
66
pre-stop exec /opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
7-
env programDir=/opt/${environment_namespace}/${component_application}/${component_name}/
87
chdir /opt/${environment_namespace}/${component_application}/${component_name}/
9-
exec sudo -u hdfs spark-submit --driver-java-options "-Dlog4j.configuration=file:///${programDir}log4j.properties" --conf 'spark.executor.extraJavaOptions=-Dlog4j.configuration=file:///${programDir}log4j.properties' --name '${component_job_name}' --master yarn-cluster --py-files application.properties,${component_py_files} ${component_spark_submit_args} ${component_main_py}
8+
exec sudo -u hdfs spark-submit --driver-java-options "-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties" --conf 'spark.executor.extraJavaOptions=-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties' --name '${component_job_name}' --master yarn-cluster --py-files application.properties,${component_py_files} ${component_spark_submit_args} ${component_main_py}

0 commit comments

Comments
 (0)