Skip to content

Commit db2d831

Browse files
hiroyuki-satokou
andauthored
GH-46656: [CI][Dev] Fix shellcheck SC2034 and SC2086 errors in ci/scripts directory (#46657)
### Rationale for this change We are trying to implement shellcheck on all sh files in #44748. ### What changes are included in this PR? * SC2034 unused variable error. Use variable properly like `${1}` -> `${arrow_dir}`. * SC2086 check require quoting like `${download_url}` -> `"${download_url}"`. ``` In ci/scripts/install_conda.sh line 30: version=$2 ^-----^ SC2034 (warning): version appears unused. Verify use (or export if used externally). In ci/scripts/install_conda.sh line 37: wget -nv ${download_url} -O /tmp/installer.sh ^-------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ``` ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46656 Lead-authored-by: Hiroyuki Sato <[email protected]> Co-authored-by: Sutou Kouhei <[email protected]> Signed-off-by: Sutou Kouhei <[email protected]>
1 parent 6804059 commit db2d831

File tree

4 files changed

+21
-15
lines changed

4 files changed

+21
-15
lines changed

.pre-commit-config.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -276,6 +276,7 @@ repos:
276276
?^ci/scripts/install_ceph\.sh$|
277277
?^ci/scripts/install_chromedriver\.sh$|
278278
?^ci/scripts/install_cmake\.sh$|
279+
?^ci/scripts/install_conda\.sh$|
279280
?^ci/scripts/install_emscripten\.sh$|
280281
?^ci/scripts/install_iwyu\.sh$|
281282
?^ci/scripts/install_ninja\.sh$|
@@ -286,9 +287,11 @@ repos:
286287
?^ci/scripts/install_vcpkg\.sh$|
287288
?^ci/scripts/integration_arrow_build\.sh$|
288289
?^ci/scripts/integration_dask\.sh$|
290+
?^ci/scripts/integration_spark\.sh$|
289291
?^ci/scripts/matlab_build\.sh$|
290292
?^ci/scripts/msys2_system_clean\.sh$|
291293
?^ci/scripts/msys2_system_upgrade\.sh$|
294+
?^ci/scripts/nanoarrow_build\.sh$|
292295
?^ci/scripts/python_sdist_build\.sh$|
293296
?^ci/scripts/python_wheel_unix_test\.sh$|
294297
?^ci/scripts/r_build\.sh$|

ci/scripts/install_conda.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,16 @@ installer=$1
3030
version=$2
3131
prefix=$3
3232

33-
download_url=https://github.com/conda-forge/miniforge/releases/latest/download/${installer^}-${platform}-${arch}.sh
33+
download_url=https://github.com/conda-forge/miniforge/releases/${version}/download/${installer^}-${platform}-${arch}.sh
3434

3535
echo "Downloading Miniconda installer from ${download_url} ..."
3636

37-
wget -nv ${download_url} -O /tmp/installer.sh
38-
bash /tmp/installer.sh -b -p ${prefix}
37+
wget -nv "${download_url}" -O /tmp/installer.sh
38+
bash /tmp/installer.sh -b -p "${prefix}"
3939
rm /tmp/installer.sh
4040

4141
# Like "conda init", but for POSIX sh rather than bash
42-
ln -s ${prefix}/etc/profile.d/conda.sh /etc/profile.d/conda.sh
42+
ln -s "${prefix}/etc/profile.d/conda.sh" /etc/profile.d/conda.sh
4343

4444
export PATH=/opt/conda/bin:$PATH
4545

ci/scripts/integration_spark.sh

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -18,25 +18,28 @@
1818
# exit on any error
1919
set -eu
2020

21-
source_dir=${1}
22-
spark_dir=${2}
21+
if [ "$#" -lt 2 ]; then
22+
echo "Usage: $0 <spark_version> <spark_dir>"
23+
exit 1
24+
fi
2325

2426
# Spark branch to checkout
25-
spark_version=${SPARK_VERSION:-master}
27+
spark_version=${1}
28+
spark_dir=${2}
2629

2730
# Use old behavior that always dropped timezones.
2831
export PYARROW_IGNORE_TIMEZONE=1
2932

30-
if [ "${SPARK_VERSION:1:2}" == "2." ]; then
33+
if [ "${spark_version:1:2}" == "2." ]; then
3134
# https://github.com/apache/spark/blob/master/docs/sql-pyspark-pandas-with-arrow.md#compatibility-setting-for-pyarrow--0150-and-spark-23x-24x
3235
export ARROW_PRE_0_15_IPC_FORMAT=1
3336
fi
3437

3538
export MAVEN_OPTS="-Xss256m -Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=warn"
3639
export MAVEN_OPTS="${MAVEN_OPTS} -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn"
3740

38-
pushd ${spark_dir}
39-
echo "Building Spark ${SPARK_VERSION}"
41+
pushd "${spark_dir}"
42+
echo "Building Spark ${spark_version}"
4043

4144
# Build Spark only
4245
build/mvn -B -DskipTests package
@@ -50,7 +53,7 @@ pushd ${spark_dir}
5053
"pyspark.sql.tests.arrow.test_arrow_map"
5154
"pyspark.sql.tests.arrow.test_arrow_python_udf")
5255

53-
case "${SPARK_VERSION}" in
56+
case "${spark_version}" in
5457
v1.*|v2.*|v3.0.*|v3.1.*|v3.2.*|v3.3.*)
5558
old_test_modules=true
5659
;;

ci/scripts/nanoarrow_build.sh

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
set -e
2121

2222
arrow_dir=${1}
23-
source_dir=${1}/nanoarrow
23+
source_dir=${arrow_dir}/nanoarrow
2424
build_dir=${2}/nanoarrow
2525

2626
# This file is used to build the nanoarrow binaries needed for the archery
@@ -43,10 +43,10 @@ fi
4343

4444
set -x
4545

46-
mkdir -p ${build_dir}
47-
pushd ${build_dir}
46+
mkdir -p "${build_dir}"
47+
pushd "${build_dir}"
4848

49-
cmake ${source_dir} \
49+
cmake "${source_dir}" \
5050
-DNANOARROW_IPC=ON \
5151
-DNANOARROW_IPC_WITH_ZSTD=ON \
5252
-DNANOARROW_BUILD_INTEGRATION_TESTS=ON

0 commit comments

Comments
 (0)