Skip to content
This repository was archived by the owner on Sep 12, 2025. It is now read-only.

Commit e40c74a

Browse files
committed
Add repo tool
1 parent 16b3dfe commit e40c74a

File tree

1 file changed

+160
-0
lines changed

1 file changed

+160
-0
lines changed

scripts/s3-repo-sync-and-sign.sh

Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
#!/bin/bash
2+
3+
set -euo pipefail
4+
5+
# Script Name: s3-repo-sync-and-sign.sh
6+
#
7+
# Description:
8+
# This script automates AWS credentials configuration, secure GPG key handling,
9+
# RPM repository synchronization from an S3 bucket, RPM signing, and repository metadata signing.
10+
# It also exports and places the GPG public key in the repository for client use.
11+
# Additionally, it supports a `s3-sync-only` mode to sync the S3 bucket to the local directory
12+
# and exit after the operation completes.
13+
#
14+
# Usage:
15+
# ./s3-repo-sync-and-sign.sh [-c] [-s <s3-bucket>] [-d <local-dir>] [-k <encrypted-key-file>] [-g <gpg-key-id>] [--upload-with-delete] [--s3-sync-only]
16+
#
17+
# Options:
18+
# -c : Configure AWS credentials using 'aws configure'.
19+
# -s <s3-bucket> : Specify the S3 bucket and path to sync (required).
20+
# -d <local-dir> : Specify the local directory to sync to (default: ~/repo).
21+
# -k <encrypted-key-file> : Specify the encrypted GPG private key file to import (optional).
22+
# -g <gpg-key-id> : Specify the GPG key ID or email to use for signing (required for signing operations).
23+
# --upload-with-delete : Sync local changes to S3, deleting files in S3 that don't exist locally.
24+
# --s3-sync-only : Perform only the S3 sync to the local directory, inform the user, and exit.
25+
# -h, --help : Display this help and exit.
26+
27+
# Function to check if required commands are available
28+
check_commands() {
29+
local cmds=("aws" "gpg" "shred" "createrepo" "rpm" "find")
30+
for cmd in "${cmds[@]}"; do
31+
if ! command -v "$cmd" &> /dev/null; then
32+
echo "Error: Required command '$cmd' not found. Please install it before running the script."
33+
exit 1
34+
fi
35+
done
36+
}
37+
38+
# Function to display usage information
39+
usage() {
40+
echo "Usage: $0 [-c] [-s <s3-bucket>] [-d <local-dir>] [-k <encrypted-key-file>] [-g <gpg-key-id>] [--upload-with-delete] [--s3-sync-only]"
41+
exit 1
42+
}
43+
44+
# Parse options
45+
GPG_KEY_ID=""
46+
UPLOAD_WITH_DELETE=false
47+
S3_SYNC_ONLY=false
48+
while [[ "$#" -gt 0 ]]; do
49+
case $1 in
50+
-c) CONFIGURE_AWS=true; shift ;;
51+
-s) S3_BUCKET="$2"; shift 2 ;;
52+
-d) LOCAL_DIR="$2"; shift 2 ;;
53+
-k) ENCRYPTED_KEY_FILE="$2"; shift 2 ;;
54+
-g) GPG_KEY_ID="$2"; shift 2 ;;
55+
--upload-with-delete) UPLOAD_WITH_DELETE=true; shift ;;
56+
--s3-sync-only) S3_SYNC_ONLY=true; shift ;;
57+
-h|--help) usage ;;
58+
*) echo "Unknown option: $1"; usage ;;
59+
esac
60+
done
61+
62+
# Check if required commands are available
63+
check_commands
64+
65+
# Ensure S3 bucket is provided
66+
if [ -z "${S3_BUCKET:-}" ]; then
67+
echo "Error: S3 bucket (-s) is required."
68+
usage
69+
fi
70+
71+
# AWS credentials configuration (optional)
72+
if [ "${CONFIGURE_AWS:-false}" = true ]; then
73+
echo "Configuring AWS credentials..."
74+
aws configure
75+
fi
76+
77+
# Check access to the S3 bucket
78+
echo "Checking access to S3 bucket $S3_BUCKET..."
79+
if ! aws s3 ls "$S3_BUCKET" &> /dev/null; then
80+
echo "Error: Unable to access S3 bucket $S3_BUCKET. Please check your AWS credentials and permissions."
81+
exit 1
82+
fi
83+
84+
# Sync the S3 repository to the local directory
85+
mkdir -p "$LOCAL_DIR"
86+
echo "Syncing S3 repository from $S3_BUCKET to $LOCAL_DIR..."
87+
aws s3 sync "$S3_BUCKET" "$LOCAL_DIR"
88+
89+
# Check if the operation is `s3-sync-only`
90+
if [ "$S3_SYNC_ONLY" = true ]; then
91+
echo "S3 sync operation completed successfully."
92+
exit 0
93+
fi
94+
95+
# Decrypt and import GPG private key if not in sync-only mode
96+
if [ -n "${ENCRYPTED_KEY_FILE:-}" ]; then
97+
DECRYPTED_KEY_FILE="${ENCRYPTED_KEY_FILE%.*}"
98+
echo "Decrypting GPG private key..."
99+
gpg --decrypt --output "$DECRYPTED_KEY_FILE" "$ENCRYPTED_KEY_FILE"
100+
101+
# Check if the key is already imported
102+
if gpg --list-keys | grep -q "$GPG_KEY_ID"; then
103+
echo "GPG key already imported."
104+
else
105+
gpg --import "$DECRYPTED_KEY_FILE"
106+
fi
107+
108+
# Securely delete the decrypted key file
109+
shred -u "$DECRYPTED_KEY_FILE"
110+
fi
111+
112+
# Define the directories for `el8` and `el9` repositories
113+
REPO_DIRS=("$LOCAL_DIR/el8/x86_64" "$LOCAL_DIR/el9/x86_64")
114+
115+
# Traverse each repository directory (el8 and el9) and sign RPMs
116+
for REPO_DIR in "${REPO_DIRS[@]}"; do
117+
if [ -d "$REPO_DIR" ]; then
118+
echo "Processing repository at $REPO_DIR..."
119+
120+
# Sign each RPM in the directory
121+
echo "Signing RPM packages in $REPO_DIR..."
122+
find "$REPO_DIR" -name "*.rpm" -exec rpm --addsign {} \;
123+
124+
# Verify that RPMs were signed successfully
125+
echo "Verifying RPM signatures in $REPO_DIR..."
126+
find "$REPO_DIR" -name "*.rpm" -exec rpm -K {} \;
127+
128+
# Recreate the repository metadata
129+
echo "Updating repository metadata in $REPO_DIR..."
130+
createrepo --update "$REPO_DIR"
131+
132+
# Sign the repository metadata
133+
echo "Signing repository metadata in $REPO_DIR..."
134+
gpg --detach-sign --armor --local-user "$GPG_KEY_ID" "$REPO_DIR/repodata/repomd.xml"
135+
else
136+
echo "Warning: Repository directory $REPO_DIR does not exist. Skipping..."
137+
fi
138+
done
139+
140+
# Export GPG public key for clients and place it in the root of the repository
141+
echo "Exporting GPG public key..."
142+
gpg --armor --export "$GPG_KEY_ID" > "$LOCAL_DIR/RPM-GPG-KEY-cloudberry"
143+
144+
# Optionally, place the public key in a specific directory (keys) within each repo
145+
for REPO_DIR in "${REPO_DIRS[@]}"; do
146+
if [ -d "$REPO_DIR" ]; then
147+
mkdir -p "$REPO_DIR/keys"
148+
cp "$LOCAL_DIR/RPM-GPG-KEY-cloudberry" "$REPO_DIR/keys/RPM-GPG-KEY-cloudberry"
149+
fi
150+
done
151+
152+
# Upload changes to S3 with --delete option if requested
153+
if [ "$UPLOAD_WITH_DELETE" = true ]; then
154+
echo "Uploading local changes to S3 with --delete option..."
155+
aws s3 sync "$LOCAL_DIR" "$S3_BUCKET" --delete
156+
echo "S3 sync with --delete completed."
157+
fi
158+
159+
# Print completion message
160+
echo "S3 repository sync, RPM signing, metadata signing, and public key export completed successfully."

0 commit comments

Comments
 (0)