Skip to content

Commit ce9c0fe

Browse files
committed
feat: add others deps
1 parent bac16c3 commit ce9c0fe

File tree

17 files changed

+2199
-0
lines changed

17 files changed

+2199
-0
lines changed
Lines changed: 242 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,242 @@
1+
#!/usr/bin/env bash
2+
3+
# Licensed to the Apache Software Foundation (ASF) under one or more
4+
# contributor license agreements. See the NOTICE file distributed with
5+
# this work for additional information regarding copyright ownership.
6+
# The ASF licenses this file to You under the Apache License, Version 2.0
7+
# (the "License"); you may not use this file except in compliance with
8+
# the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing, software
13+
# distributed under the License is distributed on an "AS IS" BASIS,
14+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
# See the License for the specific language governing permissions and
16+
# limitations under the License.
17+
18+
# The name of the script being executed.
19+
HADOOP_SHELL_EXECNAME="hadoop"
20+
MYNAME="${BASH_SOURCE-$0}"
21+
22+
## @description build up the hadoop command's usage text.
23+
## @audience public
24+
## @stability stable
25+
## @replaceable no
26+
function hadoop_usage
27+
{
28+
hadoop_add_option "buildpaths" "attempt to add class files from build tree"
29+
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in worker mode"
30+
hadoop_add_option "loglevel level" "set the log4j level for this command"
31+
hadoop_add_option "hosts filename" "list of hosts to use in worker mode"
32+
hadoop_add_option "workers" "turn on worker mode"
33+
34+
hadoop_add_subcommand "checknative" client "check native Hadoop and compression libraries availability"
35+
hadoop_add_subcommand "classpath" client "prints the class path needed to get the Hadoop jar and the required libraries"
36+
hadoop_add_subcommand "conftest" client "validate configuration XML files"
37+
hadoop_add_subcommand "credential" client "interact with credential providers"
38+
hadoop_add_subcommand "daemonlog" admin "get/set the log level for each daemon"
39+
hadoop_add_subcommand "dtutil" client "operations related to delegation tokens"
40+
hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
41+
hadoop_add_subcommand "fs" client "run a generic filesystem user client"
42+
hadoop_add_subcommand "jar <jar>" client "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command."
43+
hadoop_add_subcommand "jnipath" client "prints the java.library.path"
44+
hadoop_add_subcommand "kerbname" client "show auth_to_local principal conversion"
45+
hadoop_add_subcommand "key" client "manage keys via the KeyProvider"
46+
hadoop_add_subcommand "registrydns" daemon "run the registry DNS server"
47+
hadoop_add_subcommand "trace" client "view and modify Hadoop tracing settings"
48+
hadoop_add_subcommand "version" client "print the version"
49+
hadoop_add_subcommand "kdiag" client "Diagnose Kerberos Problems"
50+
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
51+
}
52+
53+
## @description Default command handler for hadoop command
54+
## @audience public
55+
## @stability stable
56+
## @replaceable no
57+
## @param CLI arguments
58+
function hadoopcmd_case
59+
{
60+
subcmd=$1
61+
shift
62+
63+
case ${subcmd} in
64+
balancer|datanode|dfs|dfsadmin|dfsgroups| \
65+
namenode|secondarynamenode|fsck|fetchdt|oiv| \
66+
portmap|nfs3)
67+
hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
68+
subcmd=${subcmd/dfsgroups/groups}
69+
hadoop_error "WARNING: Attempting to execute replacement \"hdfs ${subcmd}\" instead."
70+
hadoop_error ""
71+
#try to locate hdfs and if present, delegate to it.
72+
if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then
73+
exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
74+
--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
75+
elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
76+
exec "${HADOOP_HOME}/bin/hdfs" \
77+
--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
78+
else
79+
hadoop_error "HADOOP_HDFS_HOME not found!"
80+
exit 1
81+
fi
82+
;;
83+
84+
#mapred commands for backwards compatibility
85+
pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
86+
hadoop_error "WARNING: Use of this script to execute ${subcmd} is deprecated."
87+
subcmd=${subcmd/mrgroups/groups}
88+
hadoop_error "WARNING: Attempting to execute replacement \"mapred ${subcmd}\" instead."
89+
hadoop_error ""
90+
#try to locate mapred and if present, delegate to it.
91+
if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
92+
exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
93+
--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
94+
elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
95+
exec "${HADOOP_HOME}/bin/mapred" \
96+
--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
97+
else
98+
hadoop_error "HADOOP_MAPRED_HOME not found!"
99+
exit 1
100+
fi
101+
;;
102+
checknative)
103+
HADOOP_CLASSNAME=org.apache.hadoop.util.NativeLibraryChecker
104+
;;
105+
classpath)
106+
hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
107+
;;
108+
conftest)
109+
HADOOP_CLASSNAME=org.apache.hadoop.util.ConfTest
110+
;;
111+
credential)
112+
HADOOP_CLASSNAME=org.apache.hadoop.security.alias.CredentialShell
113+
;;
114+
daemonlog)
115+
HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel
116+
;;
117+
dtutil)
118+
HADOOP_CLASSNAME=org.apache.hadoop.security.token.DtUtilShell
119+
;;
120+
envvars)
121+
echo "JAVA_HOME='${JAVA_HOME}'"
122+
echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"
123+
echo "HADOOP_COMMON_DIR='${HADOOP_COMMON_DIR}'"
124+
echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'"
125+
echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'"
126+
echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
127+
echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
128+
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
129+
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
130+
if [[ -n "${QATESTMODE}" ]]; then
131+
echo "MYNAME=${MYNAME}"
132+
echo "HADOOP_SHELL_EXECNAME=${HADOOP_SHELL_EXECNAME}"
133+
fi
134+
exit 0
135+
;;
136+
fs)
137+
HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
138+
;;
139+
jar)
140+
if [[ -n "${YARN_OPTS}" ]] || [[ -n "${YARN_CLIENT_OPTS}" ]]; then
141+
hadoop_error "WARNING: Use \"yarn jar\" to launch YARN applications."
142+
fi
143+
if [[ -z $1 || $1 = "--help" ]]; then
144+
echo "Usage: hadoop jar <jar> [mainClass] args..."
145+
exit 0
146+
fi
147+
HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar
148+
;;
149+
jnipath)
150+
hadoop_finalize
151+
echo "${JAVA_LIBRARY_PATH}"
152+
exit 0
153+
;;
154+
kerbname)
155+
HADOOP_CLASSNAME=org.apache.hadoop.security.HadoopKerberosName
156+
;;
157+
kdiag)
158+
HADOOP_CLASSNAME=org.apache.hadoop.security.KDiag
159+
;;
160+
key)
161+
HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.KeyShell
162+
;;
163+
registrydns)
164+
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
165+
HADOOP_SECURE_CLASSNAME='org.apache.hadoop.registry.server.dns.PrivilegedRegistryDNSStarter'
166+
HADOOP_CLASSNAME='org.apache.hadoop.registry.server.dns.RegistryDNSServer'
167+
;;
168+
trace)
169+
HADOOP_CLASSNAME=org.apache.hadoop.tracing.TraceAdmin
170+
;;
171+
version)
172+
HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
173+
;;
174+
*)
175+
HADOOP_CLASSNAME="${subcmd}"
176+
if ! hadoop_validate_classname "${HADOOP_CLASSNAME}"; then
177+
hadoop_exit_with_usage 1
178+
fi
179+
;;
180+
esac
181+
}
182+
183+
# This script runs the hadoop core commands.
184+
185+
# let's locate libexec...
186+
if [[ -n "${HADOOP_HOME}" ]]; then
187+
HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec"
188+
else
189+
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
190+
HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
191+
fi
192+
193+
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}"
194+
HADOOP_NEW_CONFIG=true
195+
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
196+
# shellcheck source=./hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
197+
. "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
198+
else
199+
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
200+
exit 1
201+
fi
202+
203+
# now that we have support code, let's abs MYNAME so we can use it later
204+
MYNAME=$(hadoop_abs "${MYNAME}")
205+
206+
if [[ $# = 0 ]]; then
207+
hadoop_exit_with_usage 1
208+
fi
209+
210+
HADOOP_SUBCMD=$1
211+
shift
212+
213+
if hadoop_need_reexec hadoop "${HADOOP_SUBCMD}"; then
214+
hadoop_uservar_su hadoop "${HADOOP_SUBCMD}" \
215+
"${MYNAME}" \
216+
"--reexec" \
217+
"${HADOOP_USER_PARAMS[@]}"
218+
exit $?
219+
fi
220+
221+
hadoop_verify_user_perm "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
222+
223+
HADOOP_SUBCMD_ARGS=("$@")
224+
225+
if declare -f hadoop_subcommand_"${HADOOP_SUBCMD}" >/dev/null 2>&1; then
226+
hadoop_debug "Calling dynamically: hadoop_subcommand_${HADOOP_SUBCMD} ${HADOOP_SUBCMD_ARGS[*]}"
227+
"hadoop_subcommand_${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
228+
else
229+
hadoopcmd_case "${HADOOP_SUBCMD}" "${HADOOP_SUBCMD_ARGS[@]}"
230+
fi
231+
232+
hadoop_add_client_opts
233+
234+
if [[ ${HADOOP_WORKER_MODE} = true ]]; then
235+
hadoop_common_worker_mode_execute "${HADOOP_COMMON_HOME}/bin/hadoop" "${HADOOP_USER_PARAMS[@]}"
236+
exit $?
237+
fi
238+
239+
hadoop_subcommand_opts "${HADOOP_SHELL_EXECNAME}" "${HADOOP_SUBCMD}"
240+
241+
# everything is in globals at this point, so call the generic handler
242+
hadoop_generic_java_subcmd_handler

0 commit comments

Comments
 (0)