22
33set -euo pipefail
44
5- # Script Name: s3-repo-sync-and-sign.sh
6- #
75# Description:
8- # This script automates AWS credentials configuration, secure GPG key handling,
9- # RPM repository synchronization from an S3 bucket, RPM signing, and repository metadata signing.
10- # It also exports and places the GPG public key in the repository for client use.
11- # Additionally, it supports a `s3-sync-only` mode to sync the S3 bucket to the local directory
12- # and exit after the operation completes.
13- #
14- # Usage:
15- # ./s3-repo-sync-and-sign.sh [-c] [-s <s3-bucket>] [-d <local-dir>] [-k <encrypted-key-file>] [-g <gpg-key-id>] [--upload-with-delete] [--s3-sync-only]
16- #
17- # Options:
18- # -c : Configure AWS credentials using 'aws configure'.
19- # -s <s3-bucket> : Specify the S3 bucket and path to sync (required).
20- # -d <local-dir> : Specify the local directory to sync to (default: ~/repo).
21- # -k <encrypted-key-file> : Specify the encrypted GPG private key file to import (optional).
22- # -g <gpg-key-id> : Specify the GPG key ID or email to use for signing (required for signing operations).
23- # --upload-with-delete : Sync local changes to S3, deleting files in S3 that don't exist locally.
24- # --s3-sync-only : Perform only the S3 sync to the local directory, inform the user, and exit.
25- # -h, --help : Display this help and exit.
6+ # This script automates several tasks related to managing RPM repositories in AWS S3.
7+ # It handles the following operations:
8+ # 1. Syncing an RPM repository from an S3 bucket to a local directory.
9+ # 2. Signing all RPMs in the local repository with a specified GPG key.
10+ # 3. Updating and signing the repository metadata.
11+ # 4. Exporting the GPG public key and placing it in the repository for client use.
12+ # 5. Optionally, uploading changes back to the S3 bucket and deleting files in S3 that no longer exist locally.
13+ # 6. Decrypting and importing a GPG private key used for signing.
14+ # 7. A mode to only decrypt and import the GPG private key.
15+ # 8. Identifying and copying a newly built RPM to the appropriate repository.
16+
17+ # Function to display detailed usage information
18+ usage () {
19+ cat << EOF
20+ Usage: $0 [OPTIONS]
21+
22+ This script automates several tasks related to managing RPM repositories in AWS S3.
23+ It can be used to sync repositories from S3, sign RPMs with a GPG key, update and sign repository metadata,
24+ and optionally upload changes back to S3.
25+
26+ Options:
27+ -c Configure AWS credentials using 'aws configure'.
28+ -s <s3-bucket> Specify the S3 bucket and path to sync (required for S3 operations).
29+ -d <local-dir> Specify the local directory to sync to (default: ~/repo).
30+ -k <encrypted-key-file> Specify the encrypted GPG private key file to import (optional).
31+ -g <gpg-key-id> Specify the GPG key ID or email to use for signing (required for signing operations).
32+ --upload-with-delete Sync local changes to S3, deleting files in S3 that no longer exist locally.
33+ --s3-sync-only Perform only the S3 sync to the local directory, inform the user, and exit.
34+ --import-gpg-key-only Decrypt and import the GPG private key, then exit. No other operations will be performed.
35+ --copy-new-rpm Copy the newly built RPM(s) to the appropriate repository directory based on architecture and version.
36+ -h, --help Display this help message and exit.
37+
38+ Examples:
39+ # Sync an S3 repository to a local directory and sign RPMs with a GPG key
40+ $0 -s s3://mybucket/repo -g [email protected] 41+
42+ # Sync an S3 repository only, without signing RPMs or performing other operations
43+ $0 -s s3://mybucket/repo --s3-sync-only
44+
45+ # Decrypt and import a GPG private key, then exit
46+ $0 -k ~/path/to/encrypted-gpg-key.asc --import-gpg-key-only
47+
48+ # Copy newly built RPMs to the appropriate repository and sign them
49+ $0 --copy-new-rpm -g [email protected] 50+
51+ Notes:
52+ - The -s option is required for any operation that interacts with S3, such as syncing or uploading with delete.
53+ - The -g option is required for any operation that involves signing RPMs or repository metadata.
54+ - When using --upload-with-delete, ensure that you have the necessary permissions to delete objects in the specified S3 bucket.
55+ - If you only want to perform local operations (e.g., copying RPMs, signing), you do not need to specify the -s option.
56+
57+ EOF
58+ }
59+
60+ # Parse options and arguments
61+ GPG_KEY_ID=" "
62+ UPLOAD_WITH_DELETE=false
63+ S3_SYNC_ONLY=false
64+ IMPORT_GPG_KEY_ONLY=false
65+ COPY_NEW_RPM=false
66+ CONFIGURE_AWS=false
67+ LOCAL_DIR=~ /repo
2668
2769# Function to check if required commands are available
2870check_commands () {
@@ -35,16 +77,7 @@ check_commands() {
3577 done
3678}
3779
38- # Function to display usage information
39- usage () {
40- echo " Usage: $0 [-c] [-s <s3-bucket>] [-d <local-dir>] [-k <encrypted-key-file>] [-g <gpg-key-id>] [--upload-with-delete] [--s3-sync-only]"
41- exit 1
42- }
43-
4480# Parse options
45- GPG_KEY_ID=" "
46- UPLOAD_WITH_DELETE=false
47- S3_SYNC_ONLY=false
4881while [[ " $# " -gt 0 ]]; do
4982 case $1 in
5083 -c) CONFIGURE_AWS=true; shift ;;
@@ -54,45 +87,22 @@ while [[ "$#" -gt 0 ]]; do
5487 -g) GPG_KEY_ID=" $2 " ; shift 2 ;;
5588 --upload-with-delete) UPLOAD_WITH_DELETE=true; shift ;;
5689 --s3-sync-only) S3_SYNC_ONLY=true; shift ;;
57- -h|--help) usage ;;
58- * ) echo " Unknown option: $1 " ; usage ;;
90+ --import-gpg-key-only) IMPORT_GPG_KEY_ONLY=true; shift ;;
91+ --copy-new-rpm) COPY_NEW_RPM=true; shift ;;
92+ -h|--help) usage; exit 0 ;;
93+ * ) echo " Unknown option: $1 " ; usage; exit 1 ;;
5994 esac
6095done
6196
62- # Check if required commands are available
6397check_commands
6498
65- # Ensure S3 bucket is provided
66- if [ -z " ${S3_BUCKET:- } " ]; then
67- echo " Error: S3 bucket (-s) is required."
68- usage
69- fi
70-
7199# AWS credentials configuration (optional)
72- if [ " ${ CONFIGURE_AWS:- false} " = true ]; then
100+ if [ " $CONFIGURE_AWS " = true ]; then
73101 echo " Configuring AWS credentials..."
74102 aws configure
75103fi
76104
77- # Check access to the S3 bucket
78- echo " Checking access to S3 bucket $S3_BUCKET ..."
79- if ! aws s3 ls " $S3_BUCKET " & > /dev/null; then
80- echo " Error: Unable to access S3 bucket $S3_BUCKET . Please check your AWS credentials and permissions."
81- exit 1
82- fi
83-
84- # Sync the S3 repository to the local directory
85- mkdir -p " $LOCAL_DIR "
86- echo " Syncing S3 repository from $S3_BUCKET to $LOCAL_DIR ..."
87- aws s3 sync " $S3_BUCKET " " $LOCAL_DIR "
88-
89- # Check if the operation is `s3-sync-only`
90- if [ " $S3_SYNC_ONLY " = true ]; then
91- echo " S3 sync operation completed successfully."
92- exit 0
93- fi
94-
95- # Decrypt and import GPG private key if not in sync-only mode
105+ # Decrypt and import GPG private key if in import-only mode or not in sync-only mode
96106if [ -n " ${ENCRYPTED_KEY_FILE:- } " ]; then
97107 DECRYPTED_KEY_FILE=" ${ENCRYPTED_KEY_FILE% .* } "
98108 echo " Decrypting GPG private key..."
@@ -107,50 +117,130 @@ if [ -n "${ENCRYPTED_KEY_FILE:-}" ]; then
107117
108118 # Securely delete the decrypted key file
109119 shred -u " $DECRYPTED_KEY_FILE "
120+
121+ # Exit if only importing GPG key
122+ if [ " $IMPORT_GPG_KEY_ONLY " = true ]; then
123+ echo " GPG key has been decrypted and imported successfully. Exiting."
124+ exit 0
125+ fi
126+ fi
127+
128+ # Check access to the S3 bucket and perform sync only if needed
129+ if [ " $IMPORT_GPG_KEY_ONLY " = false ] && [ " $S3_SYNC_ONLY " = false ] && [ " $COPY_NEW_RPM " = false ] && [ " $UPLOAD_WITH_DELETE " = false ]; then
130+ if [ -z " ${S3_BUCKET:- } " ]; then
131+ echo " Error: S3 bucket (-s) is required."
132+ exit 1
133+ fi
134+
135+ echo " Checking access to S3 bucket $S3_BUCKET ..."
136+ if ! aws s3 ls " $S3_BUCKET " & > /dev/null; then
137+ echo " Error: Unable to access S3 bucket $S3_BUCKET . Please check your AWS credentials and permissions."
138+ exit 1
139+ fi
140+
141+ # Sync the S3 repository to the local directory
142+ mkdir -p " $LOCAL_DIR "
143+ echo " Syncing S3 repository from $S3_BUCKET to $LOCAL_DIR ..."
144+ aws s3 sync " $S3_BUCKET " " $LOCAL_DIR "
145+
146+ # Check if the operation is `s3-sync-only`
147+ if [ " $S3_SYNC_ONLY " = true ]; then
148+ echo " S3 sync operation completed successfully."
149+ exit 0
150+ fi
151+ fi
152+
153+ # Copy the newly built RPM to the appropriate repository
154+ if [ " $COPY_NEW_RPM " = true ]; then
155+ echo " Identifying the newly built RPMs..."
156+
157+ for ARCH in x86_64 noarch; do
158+ RPM_DIR=~ /rpmbuild/RPMS/$ARCH
159+
160+ # Check if the RPM directory exists
161+ if [ ! -d " $RPM_DIR " ]; then
162+ echo " Warning: Directory $RPM_DIR does not exist. Skipping $ARCH ."
163+ continue
164+ fi
165+
166+ # Find all matching RPMs and copy them to the appropriate repository directory
167+ NEW_RPMS=$( find " $RPM_DIR " -name " cloudberry-*.rpm" ! -name " *debuginfo*.rpm" )
168+ if [ -n " $NEW_RPMS " ]; then
169+ for NEW_RPM in $NEW_RPMS ; do
170+ # Determine the repository (el8 or el9) based on the RPM filename
171+ if echo " $NEW_RPM " | grep -q " \.el8\." ; then
172+ TARGET_REPO=" $LOCAL_DIR /el8/$ARCH "
173+ elif echo " $NEW_RPM " | grep -q " \.el9\." ; then
174+ TARGET_REPO=" $LOCAL_DIR /el9/$ARCH "
175+ else
176+ echo " Error: Unable to determine the correct repository for $NEW_RPM . Exiting."
177+ exit 1
178+ fi
179+
180+ # Ensure the target repository directory exists
181+ mkdir -p " $TARGET_REPO "
182+
183+ # Copy the RPM to the target repository
184+ echo " Copying $NEW_RPM to $TARGET_REPO ..."
185+ cp " $NEW_RPM " " $TARGET_REPO /"
186+ echo " Copy operation completed."
187+ done
188+ else
189+ echo " No matching RPMs found in $RPM_DIR ."
190+ fi
191+ done
110192fi
111193
112194# Define the directories for `el8` and `el9` repositories
113- REPO_DIRS=(" $LOCAL_DIR /el8/x86_64" " $LOCAL_DIR /el9/x86_64" )
195+ REPO_DIRS=(" $LOCAL_DIR /el8/x86_64" " $LOCAL_DIR /el8/noarch " " $LOCAL_DIR / el9/x86_64" " $LOCAL_DIR /el9/noarch " )
114196
115197# Traverse each repository directory (el8 and el9) and sign RPMs
116198for REPO_DIR in " ${REPO_DIRS[@]} " ; do
117199 if [ -d " $REPO_DIR " ]; then
118200 echo " Processing repository at $REPO_DIR ..."
119201
202+ # Export GPG public key for clients and place it in the root of the repository
203+ TEMP_GPG_KEY=$( mktemp)
204+ echo " Exporting GPG public key to temporary location..."
205+ gpg --armor --export " $GPG_KEY_ID " > " $TEMP_GPG_KEY "
206+
207+ # Import the GPG public key to RPM database
208+ echo " Importing GPG public key into RPM database..."
209+ sudo rpm --import " $TEMP_GPG_KEY "
210+
120211 # Sign each RPM in the directory
121212 echo " Signing RPM packages in $REPO_DIR ..."
122- find " $REPO_DIR " -name " *.rpm" -exec rpm --addsign {} \;
213+ find " $REPO_DIR " -name " *.rpm" -exec rpm --addsign --define " _gpg_name $GPG_KEY_ID " {} \;
123214
124215 # Verify that RPMs were signed successfully
125216 echo " Verifying RPM signatures in $REPO_DIR ..."
126- find " $REPO_DIR " -name " *.rpm" -exec rpm -K {} \;
217+ find " $REPO_DIR " -name " *.rpm" -exec rpm -Kv {} \;
127218
128219 # Recreate the repository metadata
129220 echo " Updating repository metadata in $REPO_DIR ..."
130221 createrepo --update " $REPO_DIR "
131222
132- # Sign the repository metadata
223+ # Sign the repository metadata, automatically overwriting if the file already exists
133224 echo " Signing repository metadata in $REPO_DIR ..."
134- gpg --detach-sign --armor --local-user " $GPG_KEY_ID " " $REPO_DIR /repodata/repomd.xml"
135- else
136- echo " Warning: Repository directory $REPO_DIR does not exist. Skipping..."
137- fi
138- done
225+ gpg --batch --yes --detach-sign --armor --local-user " $GPG_KEY_ID " " $REPO_DIR /repodata/repomd.xml"
139226
140- # Export GPG public key for clients and place it in the root of the repository
141- echo " Exporting GPG public key..."
142- gpg --armor --export " $GPG_KEY_ID " > " $LOCAL_DIR /RPM-GPG-KEY-cloudberry"
227+ # Copy the public key to each repo
228+ cp " $TEMP_GPG_KEY " " $REPO_DIR /RPM-GPG-KEY-cloudberry"
143229
144- # Optionally, place the public key in a specific directory (keys) within each repo
145- for REPO_DIR in " ${REPO_DIRS[@]} " ; do
146- if [ -d " $REPO_DIR " ]; then
147- mkdir -p " $REPO_DIR /keys"
148- cp " $LOCAL_DIR /RPM-GPG-KEY-cloudberry" " $REPO_DIR /keys/RPM-GPG-KEY-cloudberry"
230+ # Clean up temporary GPG key
231+ rm -f " $TEMP_GPG_KEY "
232+ else
233+ echo " Warning: Repository directory $REPO_DIR does not exist. Skipping..."
149234 fi
150235done
151236
152237# Upload changes to S3 with --delete option if requested
153238if [ " $UPLOAD_WITH_DELETE " = true ]; then
239+ if [ -z " ${S3_BUCKET:- } " ]; then
240+ echo " Error: S3 bucket (-s) is required for upload with delete."
241+ exit 1
242+ fi
243+
154244 echo " Uploading local changes to S3 with --delete option..."
155245 aws s3 sync " $LOCAL_DIR " " $S3_BUCKET " --delete
156246 echo " S3 sync with --delete completed."
0 commit comments