Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 18 additions & 8 deletions .github/workflows/stackhpc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,17 @@ on:
push:
branches:
- main
paths-ignore:
- dev/**
- docs/**
- README.md
- .gitignore
pull_request:
paths-ignore:
- dev/**
- docs/**
- README.md
- .gitignore
jobs:
openstack:
name: openstack-ci
Expand Down Expand Up @@ -39,23 +49,23 @@ jobs:
echo "${{ secrets[format('{0}_SSH_KEY', vars.CI_CLOUD)] }}" > ~/.ssh/id_rsa
chmod 0600 ~/.ssh/id_rsa
shell: bash

- name: Add bastion's ssh key to known_hosts
run: cat environments/.stackhpc/bastion_fingerprints >> ~/.ssh/known_hosts
shell: bash

- name: Install ansible etc
run: dev/setup-env.sh

- name: Install OpenTofu
uses: opentofu/setup-opentofu@v1
with:
tofu_version: 1.6.2

- name: Initialise terraform
run: terraform init
working-directory: ${{ github.workspace }}/environments/.stackhpc/terraform

- name: Write clouds.yaml
run: |
mkdir -p ~/.config/openstack/
Expand Down Expand Up @@ -111,14 +121,14 @@ jobs:
run: |
. venv/bin/activate
. environments/.stackhpc/activate

# load ansible variables into shell:
ansible-playbook ansible/ci/output_vars.yml \
-e output_vars_hosts=openondemand \
-e output_vars_path=$APPLIANCES_ENVIRONMENT_ROOT/vars.txt \
-e output_vars_items=bastion_ip,bastion_user,openondemand_servername
source $APPLIANCES_ENVIRONMENT_ROOT/vars.txt

# setup ssh proxying:
sudo apt-get --yes install proxychains
echo proxychains installed
Expand Down Expand Up @@ -155,7 +165,7 @@ jobs:
# ansible login -v -a "sudo scontrol reboot ASAP nextstate=RESUME reason='rebuild image:${{ steps.packer_build.outputs.NEW_COMPUTE_IMAGE_ID }}' ${TF_VAR_cluster_name}-compute-[0-3]"
# ansible compute -m wait_for_connection -a 'delay=60 timeout=600' # delay allows node to go down
# ansible-playbook -v ansible/ci/check_slurm.yml

- name: Test reimage of login and control nodes (via rebuild adhoc)
run: |
. venv/bin/activate
Expand All @@ -164,7 +174,7 @@ jobs:
ansible all -m wait_for_connection -a 'delay=60 timeout=600' # delay allows node to go down
ansible-playbook -v ansible/site.yml
ansible-playbook -v ansible/ci/check_slurm.yml

- name: Check sacct state survived reimage
run: |
. venv/bin/activate
Expand Down
65 changes: 65 additions & 0 deletions dev/extract_logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import csv
import re
import os

def convert_time_to_seconds(time_str):
h, m, s = time_str.split(':')
return int(h) * 3600 + int(m) * 60 + float(s)

def extract_log_info_and_generate_csv(log_file_path, output_csv_path, target_directory):
data = []

unwanted_chars = re.compile(r'(\x1B\[[0-9;]*m)|([^\x00-\x7F])')

with open(log_file_path, 'r') as file:
lines = file.readlines()

previous_task = None

for i in range(len(lines)):
if "TASK [" in lines[i]:
task_name = lines[i].strip().split('TASK [')[1].split(']')[0]

full_task_path = lines[i + 1].strip().split('task path: ')[1]
if target_directory in full_task_path:
start_index = full_task_path.find(target_directory) + len(target_directory)
partial_task_path = full_task_path[start_index:]
else:
partial_task_path = full_task_path

partial_task_path = unwanted_chars.sub('', partial_task_path).strip()

time_to_complete = lines[i + 2].strip().split('(')[1].split(')')[0]

if previous_task:
previous_task[2] = time_to_complete # Shift the time to the previous task
data.append(previous_task)

previous_task = [task_name, partial_task_path, None] # Placeholder for the next time_to_complete

if previous_task:
previous_task[2] = time_to_complete if time_to_complete else 'N/A'
data.append(previous_task)

for row in data:
if row[2] != 'N/A':
row[2] = convert_time_to_seconds(row[2])

data.sort(key=lambda x: x[2], reverse=True)

for row in data:
if isinstance(row[2], float):
row[2] = f'{int(row[2] // 3600):02}:{int((row[2] % 3600) // 60):02}:{row[2] % 60:.3f}'

with open(output_csv_path, 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow(['Task Name', 'Task Path', 'Time to Complete'])
csvwriter.writerows(data)

print(f"Data extracted, sorted, and saved to {output_csv_path}")

log_file_path = './RL9-ofed-fatimage-177.txt' # Input workflow log name
output_csv_path = 'RL9-ofed-fatimage-177.csv' # Output CSV name
target_directory = '/ansible/' # Shared directory for task path

extract_log_info_and_generate_csv(log_file_path, output_csv_path, target_directory)
Loading