Skip to content

Commit c221045

Browse files
committed
Merge remote-tracking branch 'origin/main' into ci/enable-linting
2 parents a875531 + bc0c66c commit c221045

File tree

5 files changed

+20
-12
lines changed

5 files changed

+20
-12
lines changed

ansible/roles/cuda/defaults/main.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22
# yamllint disable-line rule:line-length
33
cuda_repo_url: "https://developer.download.nvidia.com/compute/cuda/repos/rhel{{ ansible_distribution_major_version }}/{{ ansible_architecture }}/cuda-rhel{{ ansible_distribution_major_version }}.repo"
44
cuda_nvidia_driver_stream: '580-open'
5-
cuda_nvidia_driver_pkg: "nvidia-open-3:580.65.06-1.el{{ ansible_distribution_major_version }}"
6-
cuda_package_version: '13.0.0-1'
5+
cuda_nvidia_driver_pkg: "nvidia-open-3:580.82.07-1.el{{ ansible_distribution_major_version }}"
6+
cuda_package_version: '13.0.1-1'
77
cuda_version_short: "{{ (cuda_package_version | split('.'))[0:2] | join('.') }}" # major.minor
88
cuda_packages:
99
- "cuda-toolkit-{{ cuda_package_version }}"

ansible/roles/hpctests/tasks/hpl-solo.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,11 @@
4444
hpctests_hplsolo_N: "{{ ((((((hpctests_nodeinfo.info['MEMORY'][0] | int) * (hpctests_hpl_mem_frac | float) * 1024 * 1024 * 1) / 8) | root) / hpctests_hpl_NB)
4545
| int) * hpctests_hpl_NB }}"
4646
- ansible.builtin.debug:
47-
# yamllint disable-line rule:line-length
47+
# yamllint disable rule:line-length
4848
msg: "Using {{ hpctests_hplsolo_ntasks }} process per node with P={{ hpctests_hplsolo_pq.grid.P }}, Q={{ hpctests_hplsolo_pq.grid.Q }} targeting {{ (hpctests_hpl_mem_frac
4949
| float) * 100 }}% of {{ hpctests_nodeinfo.info['MEMORY'][0] }} MB memory per node, block size (NB) = {{ hpctests_hpl_NB }}, problem size (N) = {{ hpctests_hplsolo_N
5050
}}"
51+
# yamllint enable rule:line-length
5152

5253
- name: Get all nodes in partition
5354
ansible.builtin.command: "sinfo --Node --noheader --format %N --partition={{ hpctests_partition }}"

ansible/roles/persist_openhpc_secrets/tasks/main.yml

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
---
2+
23
- name: Check if OpenHPC secrets exist in persistent storage
34
ansible.builtin.stat:
45
path: "{{ appliances_state_dir }}/ansible.facts.d/openhpc_secrets.fact"
@@ -14,13 +15,17 @@
1415
- "{{ appliances_state_dir }}/ansible.facts.d"
1516
- "/etc/ansible/facts.d"
1617

18+
- name: Load existing OpenHPC secrets if present
19+
ansible.builtin.setup:
20+
filter: ansible_local
21+
when: openhpc_secrets_stat.stat.exists
22+
1723
- name: Write OpenHPC secrets
1824
ansible.builtin.template:
1925
src: openhpc_secrets.fact
2026
dest: "{{ appliances_state_dir }}/ansible.facts.d/openhpc_secrets.fact"
2127
owner: root
2228
mode: "0600"
23-
when: "not openhpc_secrets_stat.stat.exists"
2429

2530
- name: Symlink persistent facts to facts_path
2631
ansible.builtin.file:
@@ -29,6 +34,6 @@
2934
dest: /etc/ansible/facts.d/openhpc_secrets.fact
3035
owner: root
3136

32-
- name: Read facts
37+
- name: Refresh facts to pick up any new secrets
3338
ansible.builtin.setup:
3439
filter: ansible_local
Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
{
2-
"vault_azimuth_user_password": "{{ lookup('password', '/dev/null') }}",
3-
"vault_grafana_admin_password": "{{ lookup('password', '/dev/null') }}",
4-
"vault_elasticsearch_admin_password": "{{ lookup('password', '/dev/null') }}",
5-
"vault_elasticsearch_kibana_password": "{{ lookup('password', '/dev/null') }}",
6-
"vault_mysql_root_password": "{{ lookup('password', '/dev/null') }}",
7-
"vault_mysql_slurm_password": "{{ lookup('password', '/dev/null') }}",
8-
"vault_openhpc_mungekey": "{{ lookup('pipe', 'dd if=/dev/urandom bs=1 count=1024 2>/dev/null | base64') | regex_replace('\s+', '') }}"
2+
"vault_azimuth_user_password": "{{ ansible_local.openhpc_secrets.vault_azimuth_user_password | default(lookup('password', '/dev/null')) }}",
3+
"vault_grafana_admin_password": "{{ ansible_local.openhpc_secrets.vault_grafana_admin_password | default(lookup('password', '/dev/null')) }}",
4+
"vault_elasticsearch_admin_password": "{{ ansible_local.openhpc_secrets.vault_elasticsearch_admin_password | default(lookup('password', '/dev/null')) }}",
5+
"vault_elasticsearch_kibana_password": "{{ ansible_local.openhpc_secrets.vault_elasticsearch_kibana_password | default(lookup('password', '/dev/null')) }}",
6+
"vault_mysql_root_password": "{{ ansible_local.openhpc_secrets.vault_mysql_root_password | default(lookup('password', '/dev/null')) }}",
7+
"vault_mysql_slurm_password": "{{ ansible_local.openhpc_secrets.vault_mysql_slurm_password | default(lookup('password', '/dev/null')) }}",
8+
"vault_openhpc_mungekey": "{{ ansible_local.openhpc_secrets.vault_openhpc_mungekey | default(lookup('pipe', 'dd if=/dev/urandom bs=1 count=1024 2>/dev/null | base64') | regex_replace('\\s+', '')) }}",
9+
"vault_alertmanager_admin_password": "{{ ansible_local.openhpc_secrets.vault_alertmanager_admin_password | default(lookup('password', '/dev/null')) }}"
910
}

environments/.caas/inventory/group_vars/all/cluster.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ vault_elasticsearch_kibana_password: "{{ hostvars[groups['control'][0]].ansible_
1212
vault_mysql_root_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_mysql_root_password }}"
1313
vault_mysql_slurm_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_mysql_slurm_password }}"
1414
vault_openhpc_mungekey: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_openhpc_mungekey }}"
15+
vault_alertmanager_admin_password: "{{ hostvars[groups['control'][0]].ansible_local.openhpc_secrets.vault_alertmanager_admin_password }}"
1516

1617
# Override this to cope with the case where the podman group just doesn't exist
1718
appliances_local_users_podman_enable: "{{ groups.get('podman', []) | length > 0 }}"

0 commit comments

Comments
 (0)