Skip to content

Commit 3c8d2e0

Browse files
authored
Merge branch 'main' into sms-deploy
2 parents 58850c4 + 8bfe946 commit 3c8d2e0

File tree

2 files changed

+104
-8
lines changed

2 files changed

+104
-8
lines changed

.github/workflows/stackhpc.yml

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,21 @@ on:
55
push:
66
branches:
77
- main
8+
paths:
9+
- '**'
10+
- '!dev/**'
11+
- 'dev/setup-env.sh'
12+
- '!docs/**'
13+
- '!README.md'
14+
- '!.gitignore'
815
pull_request:
16+
paths:
17+
- '**'
18+
- '!dev/**'
19+
- 'dev/setup-env.sh'
20+
- '!docs/**'
21+
- '!README.md'
22+
- '!.gitignore'
923
jobs:
1024
openstack:
1125
name: openstack-ci
@@ -53,23 +67,23 @@ jobs:
5367
echo "${{ secrets[format('{0}_SSH_KEY', env.CI_CLOUD)] }}" > ~/.ssh/id_rsa
5468
chmod 0600 ~/.ssh/id_rsa
5569
shell: bash
56-
70+
5771
- name: Add bastion's ssh key to known_hosts
5872
run: cat environments/.stackhpc/bastion_fingerprints >> ~/.ssh/known_hosts
5973
shell: bash
60-
74+
6175
- name: Install ansible etc
6276
run: dev/setup-env.sh
6377

6478
- name: Install OpenTofu
6579
uses: opentofu/setup-opentofu@v1
6680
with:
6781
tofu_version: 1.6.2
68-
82+
6983
- name: Initialise terraform
7084
run: terraform init
7185
working-directory: ${{ github.workspace }}/environments/.stackhpc/terraform
72-
86+
7387
- name: Write clouds.yaml
7488
run: |
7589
mkdir -p ~/.config/openstack/
@@ -125,14 +139,14 @@ jobs:
125139
run: |
126140
. venv/bin/activate
127141
. environments/.stackhpc/activate
128-
142+
129143
# load ansible variables into shell:
130144
ansible-playbook ansible/ci/output_vars.yml \
131145
-e output_vars_hosts=openondemand \
132146
-e output_vars_path=$APPLIANCES_ENVIRONMENT_ROOT/vars.txt \
133147
-e output_vars_items=bastion_ip,bastion_user,openondemand_servername
134148
source $APPLIANCES_ENVIRONMENT_ROOT/vars.txt
135-
149+
136150
# setup ssh proxying:
137151
sudo apt-get --yes install proxychains
138152
echo proxychains installed
@@ -169,7 +183,7 @@ jobs:
169183
# ansible login -v -a "sudo scontrol reboot ASAP nextstate=RESUME reason='rebuild image:${{ steps.packer_build.outputs.NEW_COMPUTE_IMAGE_ID }}' ${TF_VAR_cluster_name}-compute-[0-3]"
170184
# ansible compute -m wait_for_connection -a 'delay=60 timeout=600' # delay allows node to go down
171185
# ansible-playbook -v ansible/ci/check_slurm.yml
172-
186+
173187
- name: Test reimage of login and control nodes (via rebuild adhoc)
174188
run: |
175189
. venv/bin/activate
@@ -178,7 +192,7 @@ jobs:
178192
ansible all -m wait_for_connection -a 'delay=60 timeout=600' # delay allows node to go down
179193
ansible-playbook -v ansible/site.yml
180194
ansible-playbook -v ansible/ci/check_slurm.yml
181-
195+
182196
- name: Check sacct state survived reimage
183197
run: |
184198
. venv/bin/activate

dev/extract_logs.py

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
#!/usr/bin/env python
2+
3+
"""
4+
Process packer build workflow logs into CSV. Useful for timing
5+
dissemination.
6+
7+
Usage:
8+
extract_logs.py <logs.txt>
9+
10+
Where logs.txt is the name of the workflow log downloaded.
11+
It will list task name, against task directory path, against time to complete.
12+
"""
13+
14+
import csv
15+
import re
16+
import os
17+
import sys
18+
19+
def convert_time_to_seconds(time_str):
20+
h, m, s = time_str.split(':')
21+
return int(h) * 3600 + int(m) * 60 + float(s)
22+
23+
def extract_log_info_and_generate_csv(log_file_path, output_csv_path, target_directory):
24+
data = []
25+
26+
unwanted_chars = re.compile(r'(\x1B\[[0-9;]*m)|([^\x00-\x7F])')
27+
28+
with open(log_file_path, 'r') as file:
29+
lines = file.readlines()
30+
31+
previous_task = None
32+
33+
for i in range(len(lines)):
34+
if "TASK [" in lines[i]:
35+
task_name = lines[i].strip().split('TASK [')[1].split(']')[0]
36+
37+
full_task_path = lines[i + 1].strip().split('task path: ')[1]
38+
if target_directory in full_task_path:
39+
start_index = full_task_path.find(target_directory) + len(target_directory)
40+
partial_task_path = full_task_path[start_index:]
41+
else:
42+
partial_task_path = full_task_path
43+
44+
partial_task_path = unwanted_chars.sub('', partial_task_path).strip()
45+
46+
time_to_complete = lines[i + 2].strip().split('(')[1].split(')')[0]
47+
48+
if previous_task:
49+
previous_task[2] = time_to_complete # Shift the time to the previous task
50+
data.append(previous_task)
51+
52+
previous_task = [task_name, partial_task_path, None] # Placeholder for the next time_to_complete
53+
54+
if previous_task:
55+
previous_task[2] = time_to_complete if time_to_complete else 'N/A'
56+
data.append(previous_task)
57+
58+
for row in data:
59+
if row[2] != 'N/A':
60+
row[2] = convert_time_to_seconds(row[2])
61+
62+
data.sort(key=lambda x: x[2], reverse=True)
63+
64+
for row in data:
65+
if isinstance(row[2], float):
66+
row[2] = f'{int(row[2] // 3600):02}:{int((row[2] % 3600) // 60):02}:{row[2] % 60:.3f}'
67+
68+
with open(output_csv_path, 'w', newline='') as csvfile:
69+
csvwriter = csv.writer(csvfile)
70+
csvwriter.writerow(['Task Name', 'Task Path', 'Time to Complete'])
71+
csvwriter.writerows(data)
72+
73+
print(f"Data extracted, sorted, and saved to {output_csv_path}")
74+
75+
if len(sys.argv) != 2:
76+
print("Path to workflow log plain text file should be provided as the only arg to this script")
77+
sys.exit(1)
78+
log_file_path = sys.argv[1] # Input workflow log name
79+
output_csv_path = log_file_path.replace('.txt.', '.csv') # Output CSV name
80+
target_directory = '/ansible/' # Shared directory for task path
81+
82+
extract_log_info_and_generate_csv(log_file_path, output_csv_path, target_directory)

0 commit comments

Comments
 (0)