Skip to content

Commit 0fead2f

Browse files
committed
update pipeline-service ci setup
1 parent 40eeb2e commit 0fead2f

File tree

3 files changed

+58
-93
lines changed

3 files changed

+58
-93
lines changed

ci/docs/continuous_integation.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,14 @@ HyperShift official [Documentation Guide](https://hypershift-docs.netlify.app/).
1111
### Pre-requisites:
1212

1313
- Install [aws](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) cli
14+
- Install [hypershift](https://hypershift-docs.netlify.app/getting-started/#prerequisites) cli
1415
- Provision a ROSA cluster by following [documentation](rosa_cluster_provision.md)
1516

1617
### HyperShift Setup
17-
1818
You can configure HyperShift on ROSA by running the [hypershift_setup.sh](../hack/hypershift_setup.sh) script.
1919

20+
After that, you need to add a Configmap for storing the kubeconfig of ROSA cluster and a Secret for sotring the Bitwarden credentials(BW_CLIENTID,BW_CLIENTSECRET and BW_PASSWORD).
21+
2022
## Setup GitHub app
2123

2224
- Need to configure GitHub app for Pipelines as Code configuration into Pipeline Service repository.

ci/hack/hypershift_setup.sh

Lines changed: 52 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,23 @@
11
#!/usr/bin/env bash
22

3-
#quit if exit status of any cmd is a non-zero value
3+
# Quit if exit status of any cmd is a non-zero value
44
set -o errexit
55
set -o nounset
66
set -o pipefail
77

8+
SCRIPT_DIR="$(
9+
cd "$(dirname "$0")" >/dev/null
10+
pwd
11+
)"
12+
13+
PROJECT_DIR="$(
14+
cd "$SCRIPT_DIR/../.." >/dev/null || exit 1
15+
pwd
16+
)"
17+
18+
# shellcheck source=ci/images/ci-runner/hack/bin/utils.sh
19+
source "$PROJECT_DIR/ci/images/ci-runner/hack/bin/utils.sh"
20+
821
usage() {
922
echo "
1023
Usage:
@@ -15,16 +28,8 @@ Install HyperShift operator on ROSA cluster
1528
Mandatory arguments:
1629
--kubeconfig
1730
path to HyperShift Compute KUBECONFIG.
18-
--secret
19-
path to HyperShift pull secret.
20-
--url
21-
HyperShift base domain url.
22-
--id
23-
AWS access key id.
24-
--key
25-
AWS secret access key.
2631
-r, --region
27-
AWS region name.
32+
AWS s3 region name.
2833
-n, --name
2934
AWS S3 bucket name.
3035
@@ -36,7 +41,6 @@ Optional arguments:
3641
Example:
3742
${0##*/} ./hypershift_setup.sh --region us-west-2
3843
" >&2
39-
4044
}
4145

4246
parse_args() {
@@ -46,25 +50,9 @@ parse_args() {
4650
shift
4751
export KUBECONFIG="$1"
4852
;;
49-
--secret)
50-
shift
51-
export HYPERSHIFT_PULL_SECRET="$1"
52-
;;
53-
--url)
54-
shift
55-
export HYPERSHIFT_BASE_DOMAIN="$1"
56-
;;
57-
--id)
58-
shift
59-
export AWS_ACCESS_KEY_ID="$1"
60-
;;
61-
--key)
62-
shift
63-
export AWS_SECRET_ACCESS_KEY="$1"
64-
;;
6553
-r | --region)
6654
shift
67-
export AWS_REGION="$1"
55+
export BUCKET_REGION="$1"
6856
;;
6957
-n | --name)
7058
shift
@@ -93,28 +81,8 @@ prechecks() {
9381
usage
9482
exit 1
9583
fi
96-
if [[ -z "$HYPERSHIFT_PULL_SECRET" ]]; then
97-
printf "HyperShift pull secret is not set\n\n"
98-
usage
99-
exit 1
100-
fi
101-
if [[ -z "$HYPERSHIFT_BASE_DOMAIN" ]]; then
102-
printf "HyperShift base domain url is not set\n\n"
103-
usage
104-
exit 1
105-
fi
106-
if [[ -z "$AWS_ACCESS_KEY_ID" ]]; then
107-
printf "AWS access key id is not set\n\n"
108-
usage
109-
exit 1
110-
fi
111-
if [[ -z "$AWS_SECRET_ACCESS_KEY" ]]; then
112-
printf "AWS secret access key is not set\n\n"
113-
usage
114-
exit 1
115-
fi
116-
if [[ -z "$AWS_REGION" ]]; then
117-
printf "AWS region is not set\n\n"
84+
if [[ -z "$BUCKET_REGION" ]]; then
85+
printf "AWS S3 region is not set\n\n"
11886
usage
11987
exit 1
12088
fi
@@ -125,58 +93,53 @@ prechecks() {
12593
fi
12694
}
12795

96+
create_s3_bucket() {
97+
# Check if the s3 bucket is there
98+
BUCKET_EXISTS=$(aws s3api head-bucket --bucket "${BUCKET_NAME}" 2>&1 || true)
99+
if [ -z "$BUCKET_EXISTS" ]; then
100+
echo "Bucket $BUCKET_NAME exists"
101+
else
102+
echo "Bucket $BUCKET_NAME does not exist, start to create it"
103+
aws s3api create-bucket --acl public-read \
104+
--create-bucket-configuration LocationConstraint="$BUCKET_REGION" \
105+
--region "$BUCKET_REGION" \
106+
--bucket "$BUCKET_NAME"
107+
fi
108+
}
109+
128110
init() {
129-
SCRIPT_DIR=$(
130-
cd "$(dirname "$0")" >/dev/null
131-
pwd
132-
)
111+
# Retrieve AWS Credential file from Bitwarden
112+
open_bitwarden_session
113+
get_aws_credentials
133114
}
134115

135116
install_hypershift() {
136117
echo "HyperShift setup on ROSA cluster"
137-
# Enable HyperShift and make ROSA cluster a managed cluster, visit documentation https://access.redhat.com/documentation/en-us/red_hat_advanced_cluster_management_for_kubernetes/2.6/html-single/multicluster_engine/index#hosted-control-planes-configure
138-
kubectl apply -f "$SCRIPT_DIR/ci/manifests/hypershift/multi_cluster_engine.yaml"
139-
kubectl apply -f "$SCRIPT_DIR/ci/manifests/hypershift/manage_cluster.yaml"
140-
141-
# Create an S3 bucket for HyperShift Operator with public-read
142-
aws s3api create-bucket --acl public-read --bucket "$BUCKET_NAME" \
143-
--create-bucket-configuration LocationConstraint="$AWS_REGION" \
144-
--region "$AWS_REGION"
145-
146-
# Create an OIDC S3 credentials secret
147-
oc create secret generic hypershift-operator-oidc-provider-s3-credentials \
148-
--from-file=credentials="$HOME/.aws/credentials" \
149-
--from-literal=bucket="$BUCKET_NAME" \
150-
--from-literal=region="$AWS_REGION" -n local-cluster
151-
152-
# Install HyperShift operator on the managed cluster
153-
kubectl apply -f "$SCRIPT_DIR/ci/manifests/hypershift/hypershift_operator_install.yaml"
154-
# Wait for HyperShift operator to be installed
155-
while [ "$(kubectl -n local-cluster get ManagedClusterAddOn | grep -cE "hypershift-addon")" != "1" ]; do
156-
echo -n "."
157-
sleep 2
158-
done
159-
echo "HyperShift operator successfully installed on the managed cluster"
160-
161-
# Create AWS credential secret
162-
kubectl create ns ci-clusters
163-
164-
oc create secret generic my-aws-cred -n ci-clusters \
165-
--from-literal=baseDomain="$HYPERSHIFT_BASE_DOMAIN" \
166-
--from-literal=aws_access_key_id="$AWS_ACCESS_KEY_ID" \
167-
--from-literal=aws_secret_access_key="$AWS_SECRET_ACCESS_KEY" \
168-
--from-literal=pullSecret="$HYPERSHIFT_PULL_SECRET" \
169-
--from-file=ssh-publickey="$HOME/.ssh/id_rsa.pub" \
170-
--from-file=ssh-privatekey="$HOME/.ssh/id_rsa"
118+
# Install HyperShift operator
119+
hypershift install --oidc-storage-provider-s3-credentials "$AWS_CREDENTIALS" \
120+
--oidc-storage-provider-s3-bucket-name "$BUCKET_NAME" \
121+
--oidc-storage-provider-s3-region="$BUCKET_REGION"
122+
123+
# Loop to check if the deployment is Available and Ready
124+
local ns="hypershift"
125+
if kubectl wait --for=condition=Available=true "deployment/operator" -n "$ns" --timeout=120s >/dev/null; then
126+
printf ", Ready\n"
127+
else
128+
kubectl -n "$ns" describe "deployment/operator"
129+
kubectl -n "$ns" logs "deployment/operator"
130+
kubectl -n "$ns" get events | grep Warning
131+
exit 1
132+
fi
171133
}
172134

173135
main() {
174136
init
175137
parse_args "$@"
176138
prechecks
139+
create_s3_bucket
177140
install_hypershift
178141
}
179142

180143
if [ "${BASH_SOURCE[0]}" == "$0" ]; then
181144
main "$@"
182-
fi
145+
fi

ci/hack/rosa_cluster_provision.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,10 +111,10 @@ init() {
111111
provision_rosa_cluster() {
112112
# This repo has the ROSA provision and destroy cluster scripts
113113
git clone --branch main [email protected]:stolostron/bootstrap-ks.git
114-
cd bootstrap-ks
114+
cd bootstrap-ks/rosa
115115
git checkout 1200f8b7
116-
117-
./rosa/provision.sh
116+
./install.sh
117+
./provision.sh
118118
}
119119

120120
main() {

0 commit comments

Comments
 (0)