Skip to content

Commit c6f91a3

Browse files
author
Matt Pryor
committed
Put the Zenith client and MITM in the same pod
1 parent dfb2ec0 commit c6f91a3

File tree

8 files changed

+90
-47
lines changed

8 files changed

+90
-47
lines changed

roles/zenith_proxy/defaults/main.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,11 @@ zenith_sshd_port: 22
88
zenith_proxy_podman_user: "{{ ansible_user }}"
99

1010
zenith_proxy_service_name: "{{ undef(hint = 'zenith_proxy_service_name is required') }}"
11+
zenith_proxy_client_service_name: "{{ zenith_proxy_service_name }}-client"
1112
zenith_proxy_mitm_service_name: "{{ zenith_proxy_service_name }}-mitm"
1213

13-
zenith_proxy_container_name: "{{ zenith_proxy_service_name }}"
14+
zenith_proxy_pod_name: "{{ zenith_proxy_service_name }}"
15+
zenith_proxy_client_container_name: "{{ zenith_proxy_client_service_name }}"
1416
zenith_proxy_mitm_container_name: "{{ zenith_proxy_mitm_service_name }}"
1517

1618
zenith_proxy_client_image_repository: ghcr.io/stackhpc/zenith-client
@@ -31,7 +33,6 @@ zenith_proxy_client_auth_skip: false
3133
zenith_proxy_client_auth_params: {}
3234

3335
zenith_proxy_mitm_enabled: no
34-
zenith_proxy_mitm_host: "{{ ansible_default_ipv4.address }}"
3536
zenith_proxy_mitm_listen_port: 8080
3637
zenith_proxy_mitm_auth_inject: none # valid values are 'basic' and 'bearer'
3738
zenith_proxy_mitm_auth_basic_username: >-
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
#!/usr/bin/env bash
2+
3+
#####
4+
# Small script that can be used to attach to the infra container of a pod
5+
#
6+
# Useful in a systemd service that starts a pod in order to track the execution
7+
#
8+
# Accepts a single argument which is the name of the pod whose infra container we should attach to
9+
#####
10+
11+
set -e
12+
13+
echo "[INFO] Finding infra container for pod '$1'"
14+
INFRA_CONTAINER_ID="$(podman pod inspect --format '{{.InfraContainerID}}' "$1")"
15+
16+
echo "[INFO] Attaching to infra container '${INFRA_CONTAINER_ID}'"
17+
exec podman container attach --no-stdin ${INFRA_CONTAINER_ID}

roles/zenith_proxy/tasks/main.yml

Lines changed: 22 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,26 @@
11
---
22

3-
- name: Collect usernamespace facts
4-
user_namespace_facts:
3+
- name: Install script for attaching to pod infra containers
4+
copy:
5+
src: podman-pod-infra-attach.sh
6+
dest: /usr/bin/
7+
mode: +x
8+
become: true
59

6-
- name: Set facts containing sub-ids
7-
set_fact:
8-
# podman user is 1000
9-
zenith_proxy_host_user_id: "{{ ansible_facts.subuid[zenith_proxy_podman_user]['start'] + 1000 - 1 }}"
10-
zenith_proxy_host_group_id: "{{ ansible_facts.subgid[zenith_proxy_podman_user]['start'] + 1000 - 1 }}"
10+
- name: Create systemd unit for Zenith pod
11+
template:
12+
src: pod.service.j2
13+
dest: /etc/systemd/system/{{ zenith_proxy_service_name }}.service
14+
become: true
15+
register: zenith_proxy_pod_systemd_unit
16+
17+
- name: Ensure Zenith pod is started and enabled
18+
service:
19+
name: "{{ zenith_proxy_service_name }}.service"
20+
state: "{{ 'restarted' if zenith_proxy_pod_systemd_unit is changed else 'started' }}"
21+
enabled: yes
22+
daemon_reload: "{{ zenith_proxy_pod_systemd_unit is changed }}"
23+
become: true
1124

1225
- block:
1326
- name: Create systemd unit file for MITM proxy
@@ -65,13 +78,13 @@
6578
- name: Create systemd unit file for Zenith client
6679
template:
6780
src: client.service.j2
68-
dest: /etc/systemd/system/{{ zenith_proxy_service_name }}.service
81+
dest: /etc/systemd/system/{{ zenith_proxy_client_service_name }}.service
6982
become: true
7083
register: zenith_proxy_client_systemd_unit
7184

7285
- name: Ensure Zenith client is started and enabled
7386
service:
74-
name: "{{ zenith_proxy_service_name }}.service"
87+
name: "{{ zenith_proxy_client_service_name }}.service"
7588
state: >-
7689
{{
7790
'restarted'
Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,34 @@
1-
# container-filebeat.service
2-
# based off
3-
# podman generate systemd filebeat --restart-policy always --new --name
4-
# with pid/cidfiles replaced with --sdnotify=conmon approach
5-
61
[Unit]
7-
Description=Podman container-filebeat.service
8-
Documentation=man:podman-generate-systemd(1)
2+
Description=Podman {{ zenith_proxy_client_service_name }}.service
93
Wants=network.target
104
After=network-online.target
5+
BindsTo={{ zenith_proxy_service_name }}.service
6+
PartOf={{ zenith_proxy_service_name }}.service
7+
After={{ zenith_proxy_service_name }}.service
8+
{% if zenith_proxy_mitm_enabled %}
9+
Wants={{ zenith_proxy_mitm_service_name }}.service
10+
After={{ zenith_proxy_mitm_service_name }}.service
11+
{% endif %}
1112

1213
[Service]
1314
Environment=PODMAN_SYSTEMD_UNIT=%n
15+
Type=simple
1416
Restart=always
17+
User={{ zenith_proxy_podman_user }}
18+
Group={{ zenith_proxy_podman_user }}
1519
ExecStart=/usr/bin/podman run \
1620
--network slirp4netns:cidr={{ podman_cidr }} \
17-
--sdnotify=conmon \
1821
--cgroups=no-conmon \
1922
--replace \
20-
--name {{ zenith_proxy_container_name }} \
21-
--restart=always \
23+
--restart=no \
24+
--pod {{ zenith_proxy_pod_name }} \
25+
--name {{ zenith_proxy_client_container_name }} \
2226
--security-opt label=disable \
23-
--detach=True \
2427
--volume /etc/zenith/{{ zenith_proxy_service_name }}:/etc/zenith:ro \
2528
--volume {{ zenith_proxy_service_name }}-ssh:/home/zenith/.ssh \
2629
{{ zenith_proxy_client_image }}
27-
ExecStop=/usr/bin/podman stop --ignore {{ zenith_proxy_container_name }} -t 10
28-
ExecStopPost=/usr/bin/podman rm --ignore -f {{ zenith_proxy_container_name }}
29-
KillMode=none
30-
Type=notify
31-
NotifyAccess=all
32-
User={{ zenith_proxy_podman_user }}
33-
Group={{ zenith_proxy_podman_user }}
34-
TimeoutStartSec=180
30+
ExecStop=/usr/bin/podman stop --ignore -t 10 {{ zenith_proxy_client_container_name }}
31+
ExecStopPost=/usr/bin/podman rm --ignore -f {{ zenith_proxy_client_container_name }}
3532

3633
[Install]
3734
WantedBy=multi-user.target default.target
Lines changed: 11 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,27 @@
1-
# container-filebeat.service
2-
# based off
3-
# podman generate systemd filebeat --restart-policy always --new --name
4-
# with pid/cidfiles replaced with --sdnotify=conmon approach
1+
52

63
[Unit]
7-
Description=Podman container-filebeat.service
8-
Documentation=man:podman-generate-systemd(1)
4+
Description=Podman {{ zenith_proxy_mitm_service_name }}.service
95
Wants=network.target
106
After=network-online.target
7+
BindsTo={{ zenith_proxy_service_name }}.service
8+
PartOf={{ zenith_proxy_service_name }}.service
9+
After={{ zenith_proxy_service_name }}.service
1110

1211
[Service]
1312
Environment=PODMAN_SYSTEMD_UNIT=%n
13+
Type=simple
1414
Restart=always
15+
User={{ zenith_proxy_podman_user }}
16+
Group={{ zenith_proxy_podman_user }}
1517
ExecStart=/usr/bin/podman run \
1618
--network slirp4netns:cidr={{ podman_cidr }} \
17-
--sdnotify=conmon \
1819
--cgroups=no-conmon \
1920
--replace \
21+
--restart=no \
22+
--pod {{ zenith_proxy_pod_name }} \
2023
--name {{ zenith_proxy_mitm_container_name }} \
21-
--restart=always \
2224
--security-opt label=disable \
23-
--detach=True \
2425
--env ZENITH_PROXY_LISTEN_PORT={{ zenith_proxy_mitm_listen_port }} \
2526
--env ZENITH_PROXY_UPSTREAM_SCHEME={{ zenith_proxy_upstream_scheme }} \
2627
--env ZENITH_PROXY_UPSTREAM_HOST={{ zenith_proxy_upstream_host }} \
@@ -39,14 +40,8 @@ ExecStart=/usr/bin/podman run \
3940
--env ZENITH_PROXY_AUTH_BEARER_TOKEN={{ zenith_proxy_mitm_auth_bearer_token }} \
4041
{% endif %}
4142
{{ zenith_proxy_mitm_image }}
42-
ExecStop=/usr/bin/podman stop --ignore {{ zenith_proxy_mitm_container_name }} -t 10
43+
ExecStop=/usr/bin/podman stop --ignore -t 10 {{ zenith_proxy_mitm_container_name }}
4344
ExecStopPost=/usr/bin/podman rm --ignore -f {{ zenith_proxy_mitm_container_name }}
44-
KillMode=none
45-
Type=notify
46-
NotifyAccess=all
47-
User={{ zenith_proxy_podman_user }}
48-
Group={{ zenith_proxy_podman_user }}
49-
TimeoutStartSec=180
5045

5146
[Install]
5247
WantedBy=multi-user.target default.target
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
[Unit]
2+
Description=Podman {{ zenith_proxy_service_name }}.service
3+
Wants=network.target
4+
After=network-online.target
5+
6+
[Service]
7+
Environment=PODMAN_SYSTEMD_UNIT=%n
8+
Type=simple
9+
Restart=always
10+
User={{ zenith_proxy_podman_user }}
11+
Group={{ zenith_proxy_podman_user }}
12+
ExecStartPre=/usr/bin/podman pod create --replace --name {{ zenith_proxy_pod_name }}
13+
ExecStartPre=/usr/bin/podman pod start {{ zenith_proxy_pod_name }}
14+
ExecStart=/usr/bin/podman-pod-infra-attach.sh {{ zenith_proxy_pod_name }}
15+
ExecStop=/usr/bin/podman pod stop --ignore -t 10 {{ zenith_proxy_pod_name }}
16+
ExecStopPost=/usr/bin/podman pod rm --ignore -f {{ zenith_proxy_pod_name }}
17+
18+
[Install]
19+
WantedBy=multi-user.target default.target

roles/zenith_proxy/templates/zenith-client.yaml.j2

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ verify_ssl: {{ 'yes' if zenith_registrar_verify_ssl else 'no' }}
99
server_address: {{ zenith_sshd_host }}
1010
server_port: {{ zenith_sshd_port }}
1111
{% if zenith_proxy_mitm_enabled %}
12-
forward_to_host: {{ zenith_proxy_mitm_host }}
12+
forward_to_host: 127.0.0.1
1313
forward_to_port: {{ zenith_proxy_mitm_listen_port }}
1414
{% else %}
1515
forward_to_host: {{ zenith_proxy_upstream_host }}

slurm-infra.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -144,6 +144,7 @@
144144
zenith_proxy_upstream_port: "{{ grafana_port }}"
145145
zenith_proxy_client_token: "{{ zenith_token_monitoring }}"
146146
zenith_proxy_client_auth_params: {}
147+
zenith_proxy_mitm_enabled: yes
147148
when: zenith_subdomain_monitoring is defined
148149

149150
- import_playbook: vendor/stackhpc/ansible-slurm-appliance/ansible/adhoc/hpctests.yml

0 commit comments

Comments
 (0)