|
1 | 1 | #!/usr/bin/env bash |
2 | 2 | set -euo pipefail |
3 | 3 |
|
4 | | -unset region |
5 | | -unset cluster_name |
| 4 | +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
| 5 | +repo_root="$(cd "${script_dir}/.." && pwd)" |
6 | 6 |
|
7 | | -resolve_daylily_res_dir() { |
8 | | - if [[ -n "${DAYLILY_EC_RESOURCES_DIR:-}" ]]; then |
9 | | - echo "${DAYLILY_EC_RESOURCES_DIR}" |
10 | | - return 0 |
11 | | - fi |
12 | | - if command -v daylily-ec >/dev/null 2>&1; then |
13 | | - daylily-ec resources-dir |
14 | | - return 0 |
15 | | - fi |
16 | | - # Dev fallback: repo checkout. |
17 | | - local script_dir repo_root |
18 | | - script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
19 | | - repo_root="$(cd "${script_dir}/.." && pwd)" |
20 | | - if [[ -d "${repo_root}/config" && -d "${repo_root}/bin" ]]; then |
21 | | - echo "${repo_root}" |
22 | | - return 0 |
23 | | - fi |
24 | | - return 1 |
25 | | -} |
26 | | - |
27 | | -require_cmd() { |
28 | | - if ! command -v "$1" >/dev/null 2>&1; then |
29 | | - echo "Error: required command '$1' not found in PATH" >&2 |
30 | | - exit 1 |
31 | | - fi |
32 | | -} |
33 | | - |
34 | | -# --------------------------------------------------------------------------- |
35 | | -# Flag parsing (backward-compatible: falls back to interactive prompts) |
36 | | -# --------------------------------------------------------------------------- |
37 | | -usage() { |
38 | | - cat <<'EOF' |
39 | | -Usage: daylily-delete-ephemeral-cluster [options] |
40 | | -
|
41 | | -Options: |
42 | | - --region REGION AWS region where the cluster is located |
43 | | - --cluster-name NAME AWS ParallelCluster cluster name |
44 | | - --profile PROFILE AWS profile to use (defaults to AWS_PROFILE) |
45 | | - --yes Skip the FSx deletion confirmation prompt |
46 | | - -h, --help Show this help message and exit |
47 | | -
|
48 | | -When flags are omitted the script falls back to interactive prompts. |
49 | | -EOF |
50 | | -} |
51 | | - |
52 | | -flag_region="" |
53 | | -flag_cluster_name="" |
54 | | -flag_profile="" |
55 | | -flag_yes=0 |
56 | | - |
57 | | -while [[ $# -gt 0 ]]; do |
58 | | - case "$1" in |
59 | | - --region) flag_region="$2"; shift 2 ;; |
60 | | - --cluster-name) flag_cluster_name="$2"; shift 2 ;; |
61 | | - --profile) flag_profile="$2"; shift 2 ;; |
62 | | - --yes) flag_yes=1; shift ;; |
63 | | - -h|--help) usage; exit 0 ;; |
64 | | - *) echo "Unknown option: $1" >&2; usage; exit 1 ;; |
65 | | - esac |
66 | | -done |
67 | | - |
68 | | -prompt_non_empty() { |
69 | | - local prompt="$1" |
70 | | - local value="" |
71 | | - while [[ -z "$value" ]]; do |
72 | | - read -r -p "$prompt" value |
73 | | - done |
74 | | - echo "$value" |
75 | | -} |
76 | | - |
77 | | -delete_cluster() { |
78 | | - local name="$1" |
79 | | - local reg="$2" |
80 | | - |
81 | | - echo "Deleting cluster '$name' in region '$reg'..." |
82 | | - pcluster delete-cluster -n "$name" --region "$reg" |
83 | | - |
84 | | - echo "Monitoring cluster deletion status..." |
85 | | - while true; do |
86 | | - local desc status |
87 | | - if ! desc="$(pcluster describe-cluster -n "$name" --region "$reg" 2>/dev/null)"; then |
88 | | - echo "Cluster deletion completed successfully." |
89 | | - break |
90 | | - fi |
91 | | - status="$( |
92 | | - python3 - <<'PY' |
93 | | -import json, sys |
94 | | -try: |
95 | | - d = json.load(sys.stdin) |
96 | | -except Exception: |
97 | | - sys.exit(0) |
98 | | -print(d.get("clusterStatus", "")) |
99 | | -PY |
100 | | - <<<"$desc" |
101 | | - )" |
102 | | - if [[ -z "$status" ]]; then |
103 | | - echo "Cluster deletion completed successfully." |
104 | | - break |
105 | | - elif [[ "$status" == "DELETE_FAILED" ]]; then |
106 | | - echo "Error: Cluster deletion failed." |
107 | | - exit 1 |
108 | | - else |
109 | | - echo "Current status: $status" |
110 | | - sleep 15 |
111 | | - fi |
112 | | - done |
113 | | -} |
114 | | - |
115 | | -teardown_heartbeat_best_effort() { |
116 | | - local name="$1" |
117 | | - local reg="$2" |
118 | | - local prof="$3" |
119 | | - |
120 | | - echo "Tearing down heartbeat notification infrastructure (best-effort)..." |
121 | | - local account_id topic_arn schedule_name lambda_name |
122 | | - account_id="$(aws sts get-caller-identity --profile "$prof" --region "$reg" --query Account --output text 2>/dev/null || true)" |
123 | | - if [[ -z "$account_id" ]]; then |
124 | | - echo "Warning: unable to resolve AWS account id; skipping heartbeat teardown." >&2 |
125 | | - return 0 |
126 | | - fi |
127 | | - |
128 | | - topic_arn="arn:aws:sns:${reg}:${account_id}:daylily-${name}-heartbeat" |
129 | | - schedule_name="daylily-${name}-heartbeat" |
130 | | - schedule_name="${schedule_name:0:64}" |
131 | | - lambda_name="daylily-${name}-heartbeat" |
132 | | - |
133 | | - aws scheduler delete-schedule \ |
134 | | - --profile "$prof" --region "$reg" \ |
135 | | - --group-name default --name "$schedule_name" >/dev/null 2>&1 || true |
136 | | - aws lambda delete-function \ |
137 | | - --profile "$prof" --region "$reg" \ |
138 | | - --function-name "$lambda_name" >/dev/null 2>&1 || true |
139 | | - aws sns delete-topic \ |
140 | | - --profile "$prof" --region "$reg" \ |
141 | | - --topic-arn "$topic_arn" >/dev/null 2>&1 || true |
142 | | -} |
143 | | - |
144 | | -require_cmd aws |
145 | | -require_cmd pcluster |
146 | | -require_cmd python3 |
147 | | - |
148 | | -aws_profile="${flag_profile:-${AWS_PROFILE:-}}" |
149 | | -if [[ -z "$aws_profile" ]]; then |
150 | | - echo "Error: AWS profile not specified. Set AWS_PROFILE or pass --profile." >&2 |
151 | | - exit 1 |
152 | | -fi |
153 | | -export AWS_PROFILE="$aws_profile" |
154 | | - |
155 | | -if [[ -n "$flag_region" ]]; then |
156 | | - region="$flag_region" |
157 | | -else |
158 | | - region="$(prompt_non_empty "Enter the AWS region where the cluster is located: ")" |
159 | | -fi |
160 | | - |
161 | | -if [[ -n "$flag_cluster_name" ]]; then |
162 | | - cluster_name="$flag_cluster_name" |
163 | | -else |
164 | | - cluster_name="$(prompt_non_empty "Enter the AWS ParallelCluster cluster name: ")" |
165 | | -fi |
166 | | - |
167 | | -if [[ -z "$region" || -z "$cluster_name" ]]; then |
168 | | - echo "Error: region and cluster name are required." >&2 |
169 | | - exit 1 |
| 7 | +if command -v daylily-ec >/dev/null 2>&1; then |
| 8 | + exec daylily-ec delete "$@" |
170 | 9 | fi |
171 | 10 |
|
172 | | -if ! pcluster describe-cluster -n "$cluster_name" --region "$region" >/dev/null 2>&1; then |
173 | | - echo "ERROR: Cluster ($cluster_name) does not exist in region ($region)" >&2 |
174 | | - exit 1 |
| 11 | +if [[ -f "${repo_root}/daylily_ec/__main__.py" ]]; then |
| 12 | + cd "${repo_root}" |
| 13 | + exec python3 -m daylily_ec delete "$@" |
175 | 14 | fi |
176 | 15 |
|
177 | | - |
178 | | -fsx_associations=$(aws fsx describe-file-systems \ |
179 | | - --profile "$aws_profile" \ |
180 | | - --region "$region" \ |
181 | | - --query "FileSystems[?contains(Tags[?Key=='parallelcluster:cluster-name'].Value | [0], '$cluster_name')].FileSystemId" \ |
182 | | - --output text) |
183 | | - |
184 | | -if [[ "$fsx_associations" == "" ]]; then |
185 | | - echo "No FSx filesystems associated with the cluster." |
186 | | -else |
187 | | - echo "~~WARNING~~ " |
188 | | - echo " FSx filesystems are still associated with the cluster:" |
189 | | - echo "$fsx_associations" |
190 | | - echo "" |
191 | | - echo "If you wish to export FSX data back to S3, please do so via the FSX console, or you may run the following command:" |
192 | | - if RES_DIR="$(resolve_daylily_res_dir 2>/dev/null)"; then |
193 | | - echo " ${RES_DIR}/bin/daylily-export-fsx-to-s3" |
194 | | - else |
195 | | - echo " daylily-export-fsx-to-s3" |
196 | | - fi |
197 | | - echo "" |
198 | | - sleep 2 |
199 | | - echo "If you wish to proceed with deleting this cluster, the FSX filesystem will be deleted or preserved given the parameters set during creation." |
200 | | - echo "" |
201 | | - if [[ "$flag_yes" == "1" ]]; then |
202 | | - echo "Skipping confirmation (--yes flag provided)." |
203 | | - else |
204 | | - read -p "Type 'please delete' to proceed with cluster deletion: " confirmation |
205 | | - if [[ "$confirmation" != "please delete" ]]; then |
206 | | - echo "Aborting cluster deletion." |
207 | | - exit 1 |
208 | | - fi |
209 | | - fi |
210 | | -fi |
211 | | - |
212 | | -teardown_heartbeat_best_effort "$cluster_name" "$region" "$aws_profile" |
213 | | - |
214 | | - |
215 | | -delete_cluster "$cluster_name" "$region" |
216 | | - |
217 | | -echo "Deletion of $cluster_name is complete." |
| 16 | +echo "Error: daylily-ec is not available. Run 'source ./activate' first." >&2 |
| 17 | +exit 1 |
0 commit comments