Skip to content

Commit ea7e5bf

Browse files
authored
Revert "Change to use a default application user instead of the hdfs user."
1 parent 9fce145 commit ea7e5bf

File tree

10 files changed

+37
-55
lines changed

10 files changed

+37
-55
lines changed

CHANGELOG.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ All notable changes to this project will be documented in this file.
33

44
## [Unreleased]
55
### Added:
6-
- PNDA-3330: Change to use a default application user instead of the hdfs user.
76
- PNDA-2445: Support for Hortonworks HDP
87

98
## [0.4.0] 2017-05-23

api/src/main/resources/plugins/base_creator.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@
3939
import requests
4040
import hbase_descriptor
4141
import opentsdb_descriptor
42-
import getpass
4342
from deployer_utils import HDFS
4443

4544

@@ -167,7 +166,6 @@ def _instantiate_properties(self, application_name, component, property_override
167166
props['component_name'] = component['component_name']
168167
props['component_job_name'] = '%s-%s-job' % (props['component_application'], props['component_name'])
169168
props['component_hdfs_root'] = '/user/%s/%s' % (application_name, component['component_name'])
170-
props['application_user'] = self._get_application_user()
171169
return props
172170

173171
def _fill_properties(self, local_file, props):
@@ -329,10 +327,3 @@ def _find_yarn_app_info(self, all_yarn_applications, job_name):
329327
if result is None or self._get_yarn_start_time(app) > self._get_yarn_start_time(result):
330328
result = app
331329
return result
332-
333-
def _get_application_user(self):
334-
application_user = getpass.getuser()
335-
# if running as root, make sure to start the application under a different user.
336-
if application_user == 'root':
337-
application_user = self._environment['application_default_user']
338-
return application_user

api/src/main/resources/plugins/jupyter.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -67,14 +67,11 @@ def create_component(self, staged_component_path, application_name, component, p
6767
key_file = self._environment['cluster_private_key']
6868
root_user = self._environment['cluster_root_user']
6969
target_host = self._environment['jupyter_host']
70-
application_user = properties['application_user']
7170
delete_commands = []
7271

7372
mkdircommands = []
74-
remote_component_tmp_path = '%s/%s/%s/%s' % ('/tmp/%s' % self._namespace, application_user, application_name, component['component_name'])
75-
remote_notebook_path = '/home/%s/%s' % (application_user, self._environment['jupyter_notebook_directory'])
73+
remote_component_tmp_path = '%s/%s/%s' % ('/tmp/%s' % self._namespace, application_name, component['component_name'])
7674
mkdircommands.append('mkdir -p %s' % remote_component_tmp_path)
77-
mkdircommands.append('sudo -u %s mkdir -p %s' % (application_user ,remote_notebook_path))
7875
deployer_utils.exec_ssh(target_host, root_user, key_file, mkdircommands)
7976

8077
file_list = component['component_detail']
@@ -85,7 +82,7 @@ def create_component(self, staged_component_path, application_name, component, p
8582
os.system("scp -i %s -o StrictHostKeyChecking=no %s/%s %s@%s:%s" %
8683
(key_file, staged_component_path, file_name, root_user, target_host, remote_component_tmp_path))
8784

88-
remote_component_install_path = '%s/%s_%s' % (remote_notebook_path, application_name, file_name)
85+
remote_component_install_path = '%s/%s_%s' % (self._environment['jupyter_notebook_directory'], application_name, file_name)
8986
deployer_utils.exec_ssh(
9087
target_host, root_user, key_file,
9188
['sudo mv %s %s' % (remote_component_tmp_path + '/*.ipynb', remote_component_install_path)])

api/src/main/resources/plugins/oozie.py

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -52,19 +52,19 @@ def destroy_component(self, application_name, create_data):
5252
application_name,
5353
json.dumps(create_data))
5454
# terminate oozie jobs
55-
self._kill_oozie(create_data['job_handle'], create_data['application_user'])
55+
self._kill_oozie(create_data['job_handle'], self.OOZIE_USER_NAME)
5656

5757
# delete component from hdfs
5858
remote_path = create_data['component_hdfs_root'][1:]
5959
self._hdfs_client.remove(remote_path, recursive=True)
6060

6161
def start_component(self, application_name, create_data):
6262
logging.debug("start_component: %s %s", application_name, json.dumps(create_data))
63-
self._start_oozie(create_data['job_handle'], create_data['application_user'])
63+
self._start_oozie(create_data['job_handle'], self.OOZIE_USER_NAME)
6464

6565
def stop_component(self, application_name, create_data):
6666
logging.debug("stop_component: %s %s", application_name, json.dumps(create_data))
67-
self._stop_oozie(create_data['job_handle'], create_data['application_user'])
67+
self._stop_oozie(create_data['job_handle'], self.OOZIE_USER_NAME)
6868

6969
def create_component(self, staged_component_path, application_name, component, properties):
7070
logging.debug(
@@ -89,7 +89,7 @@ def create_component(self, staged_component_path, application_name, component, p
8989
properties['deployment_end'] = end.strftime("%Y-%m-%dT%H:%MZ")
9090

9191
# insert required oozie properties
92-
properties['user.name'] = properties['application_user']
92+
properties['user.name'] = self.OOZIE_USER_NAME
9393
# Oozie ShareLib - supports actions
9494
properties['oozie.use.system.libpath'] = 'true'
9595
# platform shared libs e.g. hbase
@@ -104,15 +104,12 @@ def create_component(self, staged_component_path, application_name, component, p
104104
properties[def_path] = '%s/%s' % (self._environment['name_node'], remote_path)
105105

106106
# deploy everything to various hadoop services
107-
undeploy = self._deploy_to_hadoop(properties, staged_component_path, remote_path, properties['application_user'])
107+
undeploy = self._deploy_to_hadoop(properties, staged_component_path, remote_path)
108108

109109
# return something that can be used to undeploy later
110-
return {'job_handle': undeploy['id'],
111-
'component_hdfs_root': properties['component_hdfs_root'],
112-
'application_user': properties['application_user']
113-
}
110+
return {'job_handle': undeploy['id'], 'component_hdfs_root': properties['component_hdfs_root']}
114111

115-
def _deploy_to_hadoop(self, properties, staged_component_path, remote_path, application_user, exclude=None):
112+
def _deploy_to_hadoop(self, properties, staged_component_path, remote_path, exclude=None):
116113
if exclude is None:
117114
exclude = []
118115
exclude.extend(['hdfs.json',
@@ -130,7 +127,7 @@ def _deploy_to_hadoop(self, properties, staged_component_path, remote_path, appl
130127

131128
# submit to oozie
132129
result = self._submit_oozie(properties)
133-
self._stop_oozie(result['id'], application_user)
130+
self._stop_oozie(result['id'], self.OOZIE_USER_NAME)
134131

135132
return result
136133

api/src/main/resources/plugins/sparkStreaming.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import logging
2828
import platform
2929
from shutil import copy
30+
3031
import deployer_utils
3132
from plugins.base_creator import Creator
3233

api/src/main/resources/plugins/systemd.service.py.tpl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ Description=PNDA Application: ${component_application}-${component_name}
33

44
[Service]
55
Type=simple
6-
User=${application_user}
6+
User=hdfs
77
WorkingDirectory=/opt/${environment_namespace}/${component_application}/${component_name}/
88
ExecStartPre=/opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
99
ExecStopPost=/opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py

api/src/main/resources/plugins/systemd.service.tpl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ Description=PNDA Application: ${component_application}-${component_name}
33

44
[Service]
55
Type=simple
6-
User=${application_user}
6+
User=hdfs
77
WorkingDirectory=/opt/${environment_namespace}/${component_application}/${component_name}/
88
ExecStartPre=/opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
99
ExecStopPost=/opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py

api/src/main/resources/plugins/upstart.conf.py.tpl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ respawn limit unlimited
55
pre-start exec /opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
66
pre-stop exec /opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
77
chdir /opt/${environment_namespace}/${component_application}/${component_name}/
8-
exec sudo -u ${application_user} spark-submit --driver-java-options "-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties" --conf 'spark.executor.extraJavaOptions=-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties' --name '${component_job_name}' --master yarn-cluster --py-files application.properties,${component_py_files} ${component_spark_submit_args} ${component_main_py}
8+
exec sudo -u hdfs spark-submit --driver-java-options "-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties" --conf 'spark.executor.extraJavaOptions=-Dlog4j.configuration=file:///opt/${environment_namespace}/${component_application}/${component_name}/log4j.properties' --name '${component_job_name}' --master yarn-cluster --py-files application.properties,${component_py_files} ${component_spark_submit_args} ${component_main_py}

api/src/main/resources/plugins/upstart.conf.tpl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,4 +6,4 @@ pre-start exec /opt/${environment_namespace}/${component_application}/${componen
66
pre-stop exec /opt/${environment_namespace}/${component_application}/${component_name}/yarn-kill.py
77
env programDir=/opt/${environment_namespace}/${component_application}/${component_name}/
88
chdir /opt/${environment_namespace}/${component_application}/${component_name}/
9-
exec sudo -u ${application_user} spark-submit --driver-java-options "-Dlog4j.configuration=file:///${programDir}log4j.properties" --class ${component_main_class} --name '${component_job_name}' --master yarn-cluster --files log4j.properties ${component_spark_submit_args} ${component_main_jar}
9+
exec sudo -u hdfs spark-submit --driver-java-options "-Dlog4j.configuration=file:///${programDir}log4j.properties" --class ${component_main_class} --name '${component_job_name}' --master yarn-cluster --files log4j.properties ${component_spark_submit_args} ${component_main_jar}

0 commit comments

Comments
 (0)