diff --git a/docs/HIGH_AVAILABILITY.md b/docs/HIGH_AVAILABILITY.md
index 911a5a4a..41297a8b 100644
--- a/docs/HIGH_AVAILABILITY.md
+++ b/docs/HIGH_AVAILABILITY.md
@@ -65,7 +65,7 @@ For the framework to access the properties of the Azure Load Balancer in a high
1. Enable system managed identity on the management server by following the steps in [Configure managed identities on Azure VMs](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/how-to-configure-managed-identities?pivots=qs-configure-portal-windows-vm#system-assigned-managed-identity).
1. Open the Azure Load Balancer used for the high availability deployment of your SAP system on Azure.
1. In the Azure Load Balancer panel, go to Access control (IAM).
-1. Follow steps 5 to 10 from [Use managed identity to access Azure Resource](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/how-to-configure-managed-identities?pivots=qs-configure-portal-windows-vm#system-assigned-managed-identity) to complete the configuration.
+1. Follow steps from [Use managed identity to access Azure Resource](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments-portal) to complete the configuration.
#### Configuring access using user-assigned managed identity
diff --git a/requirements.txt b/requirements.txt
index 299a8412..8e65cdb3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -80,7 +80,7 @@ cryptography==44.0.2
# azure-storage-queue
# msal
# pyjwt
-dill==0.3.9
+dill==0.4.0
# via pylint
exceptiongroup==1.2.2
# via pytest
@@ -130,11 +130,11 @@ msal-extensions==1.3.1
# via azure-identity
mypy-extensions==1.0.0
# via black
-numpy==2.2.4
+numpy==2.2.5
# via
# -r requirements.in
# pandas
-packaging==24.2
+packaging==25.0
# via
# ansible-compat
# ansible-core
diff --git a/src/module_utils/commands.py b/src/module_utils/commands.py
index b31eface..3ff96d81 100644
--- a/src/module_utils/commands.py
+++ b/src/module_utils/commands.py
@@ -33,12 +33,12 @@
]
-FREEZE_FILESYSTEM = lambda file_system: [
+FREEZE_FILESYSTEM = lambda file_system, mount_point: [
"mount",
"-o",
"ro",
file_system,
- "/hana/shared",
+ mount_point,
]
PACEMAKER_STATUS = ["systemctl", "is-active", "pacemaker"]
diff --git a/src/modules/check_indexserver.py b/src/modules/check_indexserver.py
index e6862bf6..7f52a235 100644
--- a/src/modules/check_indexserver.py
+++ b/src/modules/check_indexserver.py
@@ -116,12 +116,20 @@ def check_indexserver(self) -> None:
}
},
],
- "suse": {
- "[ha_dr_provider_suschksrv]": {
- "provider": "susChkSrv",
- "path": "/usr/share/SAPHanaSR",
- }
- },
+ "suse": [
+ {
+ "[ha_dr_provider_suschksrv]": {
+ "provider": "susChkSrv",
+ "path": "/usr/share/SAPHanaSR",
+ },
+ },
+ {
+ "[ha_dr_provider_suschksrv]": {
+ "provider": "susChkSrv",
+ "path": "/hana/shared/myHooks",
+ }
+ },
+ ],
}
os_props_list = expected_properties.get(self.os_distribution)
diff --git a/src/modules/filesystem_freeze.py b/src/modules/filesystem_freeze.py
index 6ec9f25a..61e225e2 100644
--- a/src/modules/filesystem_freeze.py
+++ b/src/modules/filesystem_freeze.py
@@ -6,7 +6,7 @@
"""
import logging
-from typing import Dict, Any
+from typing import Dict, Any, Tuple
from ansible.module_utils.basic import AnsibleModule
try:
@@ -94,12 +94,14 @@ def __init__(self, database_sid: str) -> None:
super().__init__()
self.database_sid = database_sid
- def _find_filesystem(self) -> str:
+ def _find_filesystem(self) -> Tuple[str, str]:
"""
Find the filesystem mounted on /hana/shared.
+ This method reads the /proc/mounts file to identify the filesystem.
+ It returns the filesystem device and the mount point as /hana/shared.
:return: The filesystem mounted on /hana/shared.
- :rtype: str
+ :rtype: Tuple[str, str]
"""
try:
with open("/proc/mounts", "r", encoding="utf-8") as mounts_file:
@@ -109,10 +111,10 @@ def _find_filesystem(self) -> str:
"/hana/shared",
f"/hana/shared/{self.database_sid}",
]:
- return parts[0]
+ return parts[0], "/hana/shared"
except FileNotFoundError as ex:
self.handle_error(ex)
- return None
+ return None, None
def run(self) -> Dict[str, Any]:
"""
@@ -121,22 +123,24 @@ def run(self) -> Dict[str, Any]:
:return: A dictionary containing the result of the test case.
:rtype: Dict[str, Any]
"""
- file_system = self._find_filesystem()
+ file_system, mount_point = self._find_filesystem()
- self.log(
- logging.INFO,
- f"Found the filesystem mounted on /hana/shared: {file_system}",
- )
-
- if file_system:
- read_only_output = self.execute_command_subprocess(FREEZE_FILESYSTEM(file_system))
+ if file_system and mount_point:
+ self.log(
+ logging.INFO,
+ f"Found the filesystem mounted on: {file_system} at {mount_point}",
+ )
+ read_only_output = self.execute_command_subprocess(
+ FREEZE_FILESYSTEM(file_system, mount_point)
+ )
self.log(logging.INFO, read_only_output)
self.result.update(
{
"changed": True,
- "message": "The file system (/hana/shared) was successfully mounted read-only.",
+ "message": f"The file system ({mount_point}) was mounted read-only.",
"status": TestStatus.SUCCESS.value,
"details": read_only_output,
+ "mount_point": mount_point,
}
)
else:
diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py
index cf715a84..58bff662 100644
--- a/src/modules/get_pcmk_properties_db.py
+++ b/src/modules/get_pcmk_properties_db.py
@@ -177,7 +177,8 @@ class HAClusterValidator(SapAutomationQA):
CONSTRAINTS_CATEGORIES = (".//*", "CONSTRAINTS_DEFAULTS")
RESOURCE_CATEGORIES = {
- "stonith": ".//primitive[@class='stonith']",
+ "sbd_stonith": ".//primitive[@type='external/sbd']",
+ "fence_agent": ".//primitive[@type='fence_azure_arm']",
"topology": ".//clone/primitive[@type='SAPHanaTopology']",
"topology_meta": ".//clone/meta_attributes",
"hana": ".//master/primitive[@type='SAPHana']",
@@ -242,12 +243,9 @@ def _get_resource_expected_value(self, resource_type, section, param_name, op_na
:return: The expected value for the resource configuration parameter.
:rtype: str
"""
- resource_defaults = self.constants["RESOURCE_DEFAULTS"].get(self.os_type, {})
-
- if resource_type == "stonith":
- resource_defaults = resource_defaults.get("stonith", {}).get(self.fencing_mechanism, {})
- else:
- resource_defaults = resource_defaults.get(resource_type, {})
+ resource_defaults = (
+ self.constants["RESOURCE_DEFAULTS"].get(self.os_type, {}).get(resource_type, {})
+ )
if section == "meta_attributes":
return resource_defaults.get("meta_attributes", {}).get(param_name)
@@ -333,16 +331,20 @@ def _parse_nvpair_elements(self, elements, category, subcategory=None, op_name=N
"""
parameters = []
for nvpair in elements:
- parameters.append(
- self._create_parameter(
- category=category,
- subcategory=subcategory,
- op_name=op_name,
- id=nvpair.get("id", ""),
- name=nvpair.get("name", ""),
- value=nvpair.get("value", ""),
+ name = nvpair.get("name", "")
+ if name in ["passwd", "password", "login"]:
+ continue
+ else:
+ parameters.append(
+ self._create_parameter(
+ category=category,
+ subcategory=subcategory,
+ op_name=op_name,
+ id=nvpair.get("id", ""),
+ name=name,
+ value=nvpair.get("value", ""),
+ )
)
- )
return parameters
def _parse_os_parameters(self):
@@ -404,6 +406,9 @@ def _parse_global_ini_parameters(self):
for param_name, expected_value in global_ini_defaults.items():
value = global_ini_properties.get(param_name, "")
+ if isinstance(expected_value, list):
+ if value in expected_value:
+ expected_value = value
parameters.append(
self._create_parameter(
category="global_ini",
diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py
index 1a2965d8..8468ae96 100644
--- a/src/modules/get_pcmk_properties_scs.py
+++ b/src/modules/get_pcmk_properties_scs.py
@@ -327,16 +327,20 @@ def _parse_nvpair_elements(self, elements, category, subcategory=None, op_name=N
"""
parameters = []
for nvpair in elements:
- parameters.append(
- self._create_parameter(
- category=category,
- subcategory=subcategory,
- op_name=op_name,
- id=nvpair.get("id", ""),
- name=nvpair.get("name", ""),
- value=nvpair.get("value", ""),
+ name = nvpair.get("name", "")
+ if name in ["passwd", "password", "login"]:
+ continue
+ else:
+ parameters.append(
+ self._create_parameter(
+ category=category,
+ subcategory=subcategory,
+ op_name=op_name,
+ id=nvpair.get("id", ""),
+ name=name,
+ value=nvpair.get("value", ""),
+ )
)
- )
return parameters
def _parse_resource(self, element, category):
diff --git a/src/roles/ha_db_hana/tasks/azure-lb.yml b/src/roles/ha_db_hana/tasks/azure-lb.yml
index 9f15a4bc..85739b5e 100644
--- a/src/roles/ha_db_hana/tasks/azure-lb.yml
+++ b/src/roles/ha_db_hana/tasks/azure-lb.yml
@@ -13,11 +13,6 @@
- name: "Pre Validations: Validate the Azure Load Balancer config"
become: true
block:
- - name: "Install azure management network package"
- ansible.builtin.pip:
- name: "azure-mgmt-network"
- state: present
-
- name: "Retrieve Subscription ID and Resource Group Name"
ansible.builtin.uri:
url: http://169.254.169.254/metadata/instance?api-version=2021-02-01
diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml
index a20075a7..69a1a323 100644
--- a/src/roles/ha_db_hana/tasks/block-network.yml
+++ b/src/roles/ha_db_hana/tasks/block-network.yml
@@ -56,7 +56,7 @@
delegate_to: localhost
ansible.builtin.shell: |
for i in $(seq 1 30); do
- if ! ping -c 1 -w 1 {{ ansible_host }}; then
+ if ! nc -zv -w1 {{ ansible_host }} {{ sap_port_to_ping }}; then
echo "Connection failed on attempt $i"
exit 1
fi
diff --git a/src/roles/ha_db_hana/tasks/files/constants.yaml b/src/roles/ha_db_hana/tasks/files/constants.yaml
index fd89e4fe..f67a8dba 100644
--- a/src/roles/ha_db_hana/tasks/files/constants.yaml
+++ b/src/roles/ha_db_hana/tasks/files/constants.yaml
@@ -47,7 +47,7 @@ VALID_CONFIGS:
SUSE: {}
AFA:
have-watchdog: "false"
- stonith-timeout: "900"
+ stonith-timeout: "900s"
ISCSI:
have-watchdog: "true"
stonith-timeout: "144s"
@@ -57,30 +57,31 @@ VALID_CONFIGS:
# cibadmin --query --scope resources
RESOURCE_DEFAULTS:
SUSE:
- stonith:
- AFA:
- instance_attributes:
- pcmk_delay_max: "30s"
- pcmk_monitor_retries: "4"
- pcmk_action_limit: "3"
- pcmk_reboot_timeout: "900"
- power_timeout: "240"
- pcmk_monitor_timeout: "120"
- operations:
- monitor:
- interval: "3600"
- ISCSI:
- instance_attributes:
- pcmk_delay_max: "30s"
- pcmk_monitor_retries: "4"
- pcmk_action_limit: "3"
- pcmk_reboot_timeout: "900"
- power_timeout: "240"
- pcmk_monitor_timeout: "120"
- operations:
- monitor:
- interval: "600"
- timeout: "15"
+ fence_agent:
+ instance_attributes:
+ pcmk_delay_max: "15"
+ pcmk_monitor_retries: "4"
+ pcmk_action_limit: "3"
+ pcmk_reboot_timeout: "900"
+ power_timeout: "240"
+ pcmk_monitor_timeout: "120"
+ operations:
+ monitor:
+ interval: "3600"
+ timeout: "120"
+
+ sbd_stonith:
+ instance_attributes:
+ pcmk_delay_max: "15"
+ pcmk_monitor_retries: "4"
+ pcmk_action_limit: "3"
+ pcmk_reboot_timeout: "900"
+ power_timeout: "240"
+ pcmk_monitor_timeout: "120"
+ operations:
+ monitor:
+ interval: "600"
+ timeout: "15"
topology:
meta_attributes:
@@ -151,29 +152,29 @@ RESOURCE_DEFAULTS:
resource-stickiness: "0"
REDHAT:
- stonith:
- AFA:
- instance_attributes:
- pcmk_delay_max: "30s"
- pcmk_monitor_retries: "4"
- pcmk_action_limit: "3"
- pcmk_reboot_timeout: "900"
- power_timeout: "240"
- pcmk_monitor_timeout: "120"
- operations:
- monitor:
- interval: "3600"
- ISCSI:
- instance_attributes:
- pcmk_monitor_retries: "4"
- pcmk_action_limit: "3"
- pcmk_reboot_timeout: "900"
- power_timeout: "240"
- pcmk_monitor_timeout: "120"
- operations:
- monitor:
- interval: "600"
- timeout: "15"
+ fence_agent:
+ instance_attributes:
+ pcmk_delay_max: "15"
+ pcmk_monitor_retries: "4"
+ pcmk_action_limit: "3"
+ pcmk_reboot_timeout: "900"
+ power_timeout: "240"
+ pcmk_monitor_timeout: "120"
+ operations:
+ monitor:
+ interval: "3600"
+
+ sbd_stonith:
+ instance_attributes:
+ pcmk_monitor_retries: "4"
+ pcmk_action_limit: "3"
+ pcmk_reboot_timeout: "900"
+ power_timeout: "240"
+ pcmk_monitor_timeout: "120"
+ operations:
+ monitor:
+ interval: "600"
+ timeout: "15"
topology:
meta_attributes:
@@ -285,7 +286,7 @@ OS_PARAMETERS:
GLOBAL_INI:
SUSE:
provider: "SAPHanaSR"
- path: "/usr/share/SAPHanaSR"
+ path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"]
execution_order: "1"
REDHAT:
diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml
index 4cc0f756..51eae262 100644
--- a/src/roles/ha_db_hana/tasks/resource-migration.yml
+++ b/src/roles/ha_db_hana/tasks/resource-migration.yml
@@ -31,6 +31,44 @@
test_execution_start: "{{ now(utc=true, fmt='%Y-%m-%d %H:%M:%S') }}"
test_execution_hostname: "{{ hostvars[cluster_status_pre.primary_node].ansible_hostname }}"
+ - name: "Test Execution: Get HANA resource id"
+ block:
+ - name: "Try master resource ID"
+ ansible.builtin.shell: >-
+ set -o pipefail && {{ commands
+ | selectattr('name','equalto','get_hana_resource_id')
+ | map(attribute=(ansible_os_family|upper))
+ | first
+ }}
+ args:
+ executable: /bin/bash
+ changed_when: false
+ register: hana_resource_id
+ failed_when: hana_resource_id.rc != 0
+ rescue:
+ - name: "Try clone resource ID"
+ ansible.builtin.shell: >-
+ set -o pipefail && {{ commands
+ | selectattr('name','equalto','get_hana_resource_id')
+ | map(attribute='REDHAT')
+ | first
+ }}
+ args:
+ executable: /bin/bash
+ changed_when: false
+ register: hana_resource_id
+ failed_when: hana_resource_id.rc != 0
+ ignore_errors: true
+ always:
+ - name: "Test Execution: Set the resource name"
+ when:
+ - hana_resource_id.rc == 0
+ - hana_resource_id.stdout is defined
+ - hana_resource_id.stdout | type_debug != 'NoneType'
+ - hana_resource_id.stdout | trim | length > 1
+ ansible.builtin.set_fact:
+ hana_resource_name: "{{ hana_resource_id.stdout }}"
+
- name: "Test Execution: Move the resource to the targeted node"
ansible.builtin.command: "{{ commands | selectattr(
'name', 'equalto', 'resource_migration_cmd') | map(
diff --git a/src/roles/misc/tasks/rescue.yml b/src/roles/misc/tasks/rescue.yml
index 84c867ab..c3e04643 100644
--- a/src/roles/misc/tasks/rescue.yml
+++ b/src/roles/misc/tasks/rescue.yml
@@ -14,7 +14,7 @@
ansible.builtin.set_fact:
cluster_status_pre: "{{ cluster_status_pre | default({}) }}"
- - name: "Set node variables based on cluster status with safe fallbacks"
+ - name: "Set first_node variable"
ansible.builtin.set_fact:
first_node: >-
{{
@@ -24,6 +24,9 @@
)
)
}}
+
+ - name: "Set second_node variable"
+ ansible.builtin.set_fact:
second_node: >-
{{
cluster_status_pre.secondary_node | default(
@@ -32,6 +35,7 @@
)
)
}}
+
- name: "Combine logs from both Nodes"
ansible.builtin.set_fact:
combined_logs: >-
diff --git a/src/roles/misc/tasks/test-case-setup.yml b/src/roles/misc/tasks/test-case-setup.yml
index 49b30a60..7bc6dc6b 100644
--- a/src/roles/misc/tasks/test-case-setup.yml
+++ b/src/roles/misc/tasks/test-case-setup.yml
@@ -14,7 +14,7 @@
test_case_status: "Starting"
test_case_details: {}
test_case_message: "Test case execution started"
-
+ test_case_var_log_messages: []
- name: "Ensure test_case_invocation_id is available on all nodes"
ansible.builtin.set_fact:
diff --git a/src/vars/input-api.yaml b/src/vars/input-api.yaml
index 52960cf3..9b2d9e28 100644
--- a/src/vars/input-api.yaml
+++ b/src/vars/input-api.yaml
@@ -160,11 +160,25 @@ test_groups:
service recovery without data loss.
enabled: true
+
+# Default values for HANA DB HA Test Cases
+sap_sid: "HDB"
+db_sid: "HDB"
+db_instance_number: "00"
+scs_instance_number: "00"
+ers_instance_number: "01"
+NFS_provider: "AFS"
+sap_port_to_ping: "1128"
+
# Commands for HANA DB HA Test Cases based on OS family
commands:
+ - name: get_hana_resource_id
+ SUSE: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"master\\[@id='\\K[^']+\""
+ REDHAT: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"clone\\[@id='\\K[^']+\""
+
- name: resource_migration_cmd
- SUSE: "crm resource move msl_SAPHana_{{ db_sid | upper }}_HDB{{ db_instance_number }} {{ cluster_status_pre.secondary_node | default('') }} force"
- REDHAT: "pcs resource move SAPHana_{{ db_sid | upper }}_{{ db_instance_number }}-clone --master"
+ SUSE: "crm resource move {{ hana_resource_name | default('msl_SAPHana_' ~ (db_sid | upper) ~ '_HDB' ~ db_instance_number) }} {{ cluster_status_pre.secondary_node | default('') }} force"
+ REDHAT: "pcs resource move {{ hana_resource_name | default('SAPHana_' ~ (db_sid | upper) ~ '_' ~ db_instance_number ~ '-clone') }} --master"
- name: crm_report_cmd
SUSE: "crm_report -f '{{ test_group_start_time }}' /tmp/{{ test_group_invocation_id }}"
@@ -177,11 +191,3 @@ commands:
- name: ascs_resource_unmigrate_cmd
SUSE: "crm resource clear rsc_sap_{{ sap_sid }}_ASCS{{ scs_instance_number }}"
REDHAT: "pcs resource clear rsc_sap_{{ sap_sid }}_ASCS{{ scs_instance_number }}"
-
-# Default values for HANA DB HA Test Cases
-sap_sid: "HDB"
-db_sid: "HDB"
-db_instance_number: "00"
-scs_instance_number: "00"
-ers_instance_number: "01"
-NFS_provider: "AFS"
diff --git a/tests/modules/filesystem_freeze_test.py b/tests/modules/filesystem_freeze_test.py
index f7f5a815..e9dbccaa 100644
--- a/tests/modules/filesystem_freeze_test.py
+++ b/tests/modules/filesystem_freeze_test.py
@@ -56,7 +56,7 @@ def test_file_system_exists(self, monkeypatch, filesystem_freeze):
:param filesystem_freeze: FileSystemFreeze instance.
:type filesystem_freeze: FileSystemFreeze
"""
- mount_points = ["/hana/shared/SID", "/hana/shared"]
+ mount_points = ["/hana/shared"]
for mount_point in mount_points:
with monkeypatch.context() as monkey_patch:
monkey_patch.setattr(
@@ -70,10 +70,7 @@ def test_file_system_exists(self, monkeypatch, filesystem_freeze):
result = filesystem_freeze.get_result()
assert result["status"] == "PASSED"
- assert (
- result["message"]
- == "The file system (/hana/shared) was successfully mounted read-only."
- )
+ assert result["message"] == "The file system (/hana/shared) was mounted read-only."
assert result["changed"] is True
def test_file_system_not_exists(self, monkeypatch, filesystem_freeze):
@@ -135,10 +132,7 @@ def exit_json(self, **kwargs):
assert mock_result["changed"] is True
assert mock_result["status"] == "PASSED"
- assert (
- mock_result["message"]
- == "The file system (/hana/shared) was successfully mounted read-only."
- )
+ assert mock_result["message"] == "The file system (/hana/shared) was mounted read-only."
def test_main_method_non_anf_provider(self, monkeypatch):
"""
diff --git a/tests/modules/get_pcmk_properties_db_test.py b/tests/modules/get_pcmk_properties_db_test.py
index fdb635cd..97d8ce66 100644
--- a/tests/modules/get_pcmk_properties_db_test.py
+++ b/tests/modules/get_pcmk_properties_db_test.py
@@ -43,6 +43,8 @@
+
+
diff --git a/tests/roles/ha_db_hana/block_network_test.py b/tests/roles/ha_db_hana/block_network_test.py
index ad341437..6ec64a81 100644
--- a/tests/roles/ha_db_hana/block_network_test.py
+++ b/tests/roles/ha_db_hana/block_network_test.py
@@ -48,7 +48,7 @@ def test_environment(self, ansible_inventory):
"project/library/filesystem_freeze",
"bin/crm_resource",
"bin/iptables",
- "bin/ping",
+ "bin/nc",
"bin/echo",
"bin/sleep",
]
@@ -63,6 +63,7 @@ def test_environment(self, ansible_inventory):
"node_tier": "hana",
"NFS_provider": "ANF",
"database_cluster_type": "ISCSI",
+ "sap_port_to_ping": "1128",
},
)
@@ -114,7 +115,7 @@ def test_functional_db_primary_node_success(self, test_environment, ansible_inve
assert len(ok_events) > 0
# There will be 1 failed event, connection failure to primary node
- # This is the behavior be have mocked in the ping functionality
+ # This is the behavior be have mocked in the nc functionality
assert len(failed_events) == 1
post_status = {}
diff --git a/tests/roles/ha_db_hana/resource_migration_test.py b/tests/roles/ha_db_hana/resource_migration_test.py
index d6dd53b8..a5f65e79 100644
--- a/tests/roles/ha_db_hana/resource_migration_test.py
+++ b/tests/roles/ha_db_hana/resource_migration_test.py
@@ -53,8 +53,13 @@ def test_environment(self, ansible_inventory):
commands = [
{
"name": "resource_migration_cmd",
- "SUSE": "crm resource move SAPHana_HDB_HDB00 db02 force",
- }
+ "SUSE": "crm resource move {{ hana_resource_name | default('msl_SAPHana_' ~ "
+ "(db_sid | upper) ~ '_HDB' ~ db_instance_number) }} db02 force",
+ },
+ {
+ "name": "get_hana_resource_id",
+ "SUSE": "cibadmin --query --scope resources",
+ },
]
temp_dir = self.setup_test_environment(
@@ -67,6 +72,7 @@ def test_environment(self, ansible_inventory):
"project/library/log_parser",
"project/library/send_telemetry_data",
"project/library/location_constraints",
+ "bin/cibadmin",
"bin/crm_resource",
"bin/crm",
],
@@ -126,6 +132,7 @@ def test_functional_db_migration_success(self, test_environment, ansible_invento
task_result = event.get("event_data", {}).get("res")
if task and "Move the resource to the targeted node" in task:
assert task_result.get("rc") == 0
+ assert task_result.get("cmd")[3] == "msl_SAPHana_HDB"
elif task and "Test Execution: Validate HANA DB cluster status 1" in task:
assert task_result.get("secondary_node") == ""
elif task and "Test Execution: Validate HANA DB cluster status 2" in task:
@@ -136,6 +143,9 @@ def test_functional_db_migration_success(self, test_environment, ansible_invento
pre_status = task_result
elif task and "Remove any location_constraints" in task:
assert task_result.get("changed")
+ elif task and "Test Execution: Get HANA resource id" in task:
+ assert task_result.get("rc") == 0
+ assert task_result.get("stdout")
assert post_status.get("primary_node") == pre_status.get("secondary_node")
assert post_status.get("secondary_node") == pre_status.get("primary_node")
diff --git a/tests/roles/mock_data/cibadmin.txt b/tests/roles/mock_data/cibadmin.txt
new file mode 100644
index 00000000..81d8e566
--- /dev/null
+++ b/tests/roles/mock_data/cibadmin.txt
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+echo 'msl_SAPHana_HDB'
+exit 0
\ No newline at end of file
diff --git a/tests/roles/mock_data/ping.txt b/tests/roles/mock_data/nc.txt
similarity index 100%
rename from tests/roles/mock_data/ping.txt
rename to tests/roles/mock_data/nc.txt