|
| 1 | +# This file is part of Ansible |
| 2 | +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
| 3 | + |
| 4 | +from __future__ import absolute_import, division, print_function |
| 5 | + |
| 6 | +__metaclass__ = type |
| 7 | + |
| 8 | +from copy import deepcopy |
| 9 | +import datetime |
| 10 | +import functools |
| 11 | +import time |
| 12 | + |
| 13 | +try: |
| 14 | + import botocore |
| 15 | +except ImportError: |
| 16 | + pass # caught by AnsibleAWSModule |
| 17 | + |
| 18 | +from ansible_collections.amazon.aws.plugins.module_utils.ec2 import ( |
| 19 | + ansible_dict_to_boto3_tag_list, |
| 20 | + camel_dict_to_snake_dict, |
| 21 | + compare_aws_tags, |
| 22 | +) |
| 23 | +from ansible_collections.amazon.aws.plugins.module_utils.core import is_boto3_error_code |
| 24 | +from ansible_collections.amazon.aws.plugins.module_utils.tagging import ( |
| 25 | + boto3_tag_list_to_ansible_dict, |
| 26 | +) |
| 27 | +from ansible.module_utils.six import string_types |
| 28 | + |
| 29 | + |
| 30 | +def get_domain_status(client, module, domain_name): |
| 31 | + """ |
| 32 | + Get the status of an existing OpenSearch cluster. |
| 33 | + """ |
| 34 | + try: |
| 35 | + response = client.describe_domain(DomainName=domain_name) |
| 36 | + except is_boto3_error_code("ResourceNotFoundException"): |
| 37 | + return None |
| 38 | + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except |
| 39 | + module.fail_json_aws(e, msg="Couldn't get domain {0}".format(domain_name)) |
| 40 | + return response["DomainStatus"] |
| 41 | + |
| 42 | + |
| 43 | +def get_domain_config(client, module, domain_name): |
| 44 | + """ |
| 45 | + Get the configuration of an existing OpenSearch cluster, convert the data |
| 46 | + such that it can be used as input parameter to client.update_domain(). |
| 47 | + The status info is removed. |
| 48 | + The returned config includes the 'EngineVersion' property, it needs to be removed |
| 49 | + from the dict before invoking client.update_domain(). |
| 50 | +
|
| 51 | + Return (domain_config, domain_arn) or (None, None) if the domain does not exist. |
| 52 | + """ |
| 53 | + try: |
| 54 | + response = client.describe_domain_config(DomainName=domain_name) |
| 55 | + except is_boto3_error_code("ResourceNotFoundException"): |
| 56 | + return (None, None) |
| 57 | + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: # pylint: disable=duplicate-except |
| 58 | + module.fail_json_aws(e, msg="Couldn't get domain {0}".format(domain_name)) |
| 59 | + domain_config = {} |
| 60 | + arn = None |
| 61 | + if response is not None: |
| 62 | + for k in response["DomainConfig"]: |
| 63 | + domain_config[k] = response["DomainConfig"][k]["Options"] |
| 64 | + domain_config["DomainName"] = domain_name |
| 65 | + # If ES cluster is attached to the Internet, the "VPCOptions" property is not present. |
| 66 | + if "VPCOptions" in domain_config: |
| 67 | + # The "VPCOptions" returned by the describe_domain_config API has |
| 68 | + # additional attributes that would cause an error if sent in the HTTP POST body. |
| 69 | + dc = {} |
| 70 | + if "SubnetIds" in domain_config["VPCOptions"]: |
| 71 | + dc["SubnetIds"] = deepcopy(domain_config["VPCOptions"]["SubnetIds"]) |
| 72 | + if "SecurityGroupIds" in domain_config["VPCOptions"]: |
| 73 | + dc["SecurityGroupIds"] = deepcopy(domain_config["VPCOptions"]["SecurityGroupIds"]) |
| 74 | + domain_config["VPCOptions"] = dc |
| 75 | + # The "StartAt" property is converted to datetime, but when doing comparisons it should |
| 76 | + # be in the string format "YYYY-MM-DD". |
| 77 | + for s in domain_config["AutoTuneOptions"]["MaintenanceSchedules"]: |
| 78 | + if isinstance(s["StartAt"], datetime.datetime): |
| 79 | + s["StartAt"] = s["StartAt"].strftime("%Y-%m-%d") |
| 80 | + # Provisioning of "AdvancedOptions" is not supported by this module yet. |
| 81 | + domain_config.pop("AdvancedOptions", None) |
| 82 | + |
| 83 | + # Get the ARN of the OpenSearch cluster. |
| 84 | + domain = get_domain_status(client, module, domain_name) |
| 85 | + if domain is not None: |
| 86 | + arn = domain["ARN"] |
| 87 | + return (domain_config, arn) |
| 88 | + |
| 89 | + |
| 90 | +def normalize_opensearch(client, module, domain): |
| 91 | + """ |
| 92 | + Merge the input domain object with tags associated with the domain, |
| 93 | + convert the attributes from camel case to snake case, and return the object. |
| 94 | + """ |
| 95 | + try: |
| 96 | + domain["Tags"] = boto3_tag_list_to_ansible_dict( |
| 97 | + client.list_tags(ARN=domain["ARN"], aws_retry=True)["TagList"] |
| 98 | + ) |
| 99 | + except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e: |
| 100 | + module.fail_json_aws( |
| 101 | + e, "Couldn't get tags for domain %s" % domain["domain_name"] |
| 102 | + ) |
| 103 | + except KeyError: |
| 104 | + module.fail_json(msg=str(domain)) |
| 105 | + |
| 106 | + return camel_dict_to_snake_dict(domain, ignore_list=["Tags"]) |
| 107 | + |
| 108 | + |
| 109 | +def wait_for_domain_status(client, module, domain_name, waiter_name): |
| 110 | + if not module.params["wait"]: |
| 111 | + return |
| 112 | + timeout = module.params["wait_timeout"] |
| 113 | + deadline = time.time() + timeout |
| 114 | + status_msg = "" |
| 115 | + while time.time() < deadline: |
| 116 | + status = get_domain_status(client, module, domain_name) |
| 117 | + if status is None: |
| 118 | + status_msg = "Not Found" |
| 119 | + if waiter_name == "domain_deleted": |
| 120 | + return |
| 121 | + else: |
| 122 | + status_msg = "Created: {0}. Processing: {1}. UpgradeProcessing: {2}".format( |
| 123 | + status["Created"], |
| 124 | + status["Processing"], |
| 125 | + status["UpgradeProcessing"], |
| 126 | + ) |
| 127 | + if ( |
| 128 | + waiter_name == "domain_available" |
| 129 | + and status["Created"] |
| 130 | + and not status["Processing"] |
| 131 | + and not status["UpgradeProcessing"] |
| 132 | + ): |
| 133 | + return |
| 134 | + time.sleep(15) |
| 135 | + # Timeout occured. |
| 136 | + module.fail_json( |
| 137 | + msg=f"Timeout waiting for wait state '{waiter_name}'. {status_msg}" |
| 138 | + ) |
| 139 | + |
| 140 | + |
| 141 | +def parse_version(engine_version): |
| 142 | + ''' |
| 143 | + Parse the engine version, which should be Elasticsearch_X.Y or OpenSearch_X.Y |
| 144 | + Return dict { 'engine_type': engine_type, 'major': major, 'minor': minor } |
| 145 | + ''' |
| 146 | + version = engine_version.split("_") |
| 147 | + if len(version) != 2: |
| 148 | + return None |
| 149 | + semver = version[1].split(".") |
| 150 | + if len(semver) != 2: |
| 151 | + return None |
| 152 | + engine_type = version[0] |
| 153 | + if engine_type not in ['Elasticsearch', 'OpenSearch']: |
| 154 | + return None |
| 155 | + if not (semver[0].isdigit() and semver[1].isdigit()): |
| 156 | + return None |
| 157 | + major = int(semver[0]) |
| 158 | + minor = int(semver[1]) |
| 159 | + return {'engine_type': engine_type, 'major': major, 'minor': minor} |
| 160 | + |
| 161 | + |
| 162 | +def compare_domain_versions(version1, version2): |
| 163 | + supported_engines = { |
| 164 | + 'Elasticsearch': 1, |
| 165 | + 'OpenSearch': 2, |
| 166 | + } |
| 167 | + if isinstance(version1, string_types): |
| 168 | + version1 = parse_version(version1) |
| 169 | + if isinstance(version2, string_types): |
| 170 | + version2 = parse_version(version2) |
| 171 | + if version1 is None and version2 is not None: |
| 172 | + return -1 |
| 173 | + elif version1 is not None and version2 is None: |
| 174 | + return 1 |
| 175 | + elif version1 is None and version2 is None: |
| 176 | + return 0 |
| 177 | + e1 = supported_engines.get(version1.get('engine_type')) |
| 178 | + e2 = supported_engines.get(version2.get('engine_type')) |
| 179 | + if e1 < e2: |
| 180 | + return -1 |
| 181 | + elif e1 > e2: |
| 182 | + return 1 |
| 183 | + else: |
| 184 | + if version1.get('major') < version2.get('major'): |
| 185 | + return -1 |
| 186 | + elif version1.get('major') > version2.get('major'): |
| 187 | + return 1 |
| 188 | + else: |
| 189 | + if version1.get('minor') < version2.get('minor'): |
| 190 | + return -1 |
| 191 | + elif version1.get('minor') > version2.get('minor'): |
| 192 | + return 1 |
| 193 | + else: |
| 194 | + return 0 |
| 195 | + |
| 196 | + |
| 197 | +def get_target_increment_version(client, module, domain_name, target_version): |
| 198 | + """ |
| 199 | + Returns the highest compatible version which is less than or equal to target_version. |
| 200 | + When upgrading a domain from version V1 to V2, it may not be possible to upgrade |
| 201 | + directly from V1 to V2. The domain may have to be upgraded through intermediate versions. |
| 202 | + Return None if there is no such version. |
| 203 | + For example, it's not possible to upgrade directly from Elasticsearch 5.5 to 7.10. |
| 204 | + """ |
| 205 | + api_compatible_versions = None |
| 206 | + try: |
| 207 | + api_compatible_versions = client.get_compatible_versions(DomainName=domain_name) |
| 208 | + except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e: |
| 209 | + module.fail_json_aws( |
| 210 | + e, |
| 211 | + msg="Couldn't get compatible versions for domain {0}".format( |
| 212 | + domain_name), |
| 213 | + ) |
| 214 | + compat = api_compatible_versions.get('CompatibleVersions') |
| 215 | + if compat is None: |
| 216 | + module.fail_json( |
| 217 | + "Unable to determine list of compatible versions", |
| 218 | + compatible_versions=api_compatible_versions) |
| 219 | + if len(compat) == 0: |
| 220 | + module.fail_json( |
| 221 | + "Unable to determine list of compatible versions", |
| 222 | + compatible_versions=api_compatible_versions) |
| 223 | + if compat[0].get("TargetVersions") is None: |
| 224 | + module.fail_json( |
| 225 | + "No compatible versions found", |
| 226 | + compatible_versions=api_compatible_versions) |
| 227 | + compatible_versions = [] |
| 228 | + for v in compat[0].get("TargetVersions"): |
| 229 | + if target_version == v: |
| 230 | + # It's possible to upgrade directly to the target version. |
| 231 | + return target_version |
| 232 | + semver = parse_version(v) |
| 233 | + if semver is not None: |
| 234 | + compatible_versions.append(semver) |
| 235 | + # No direct upgrade is possible. Upgrade to the highest version available. |
| 236 | + compatible_versions = sorted(compatible_versions, key=functools.cmp_to_key(compare_domain_versions)) |
| 237 | + # Return the highest compatible version which is lower than target_version |
| 238 | + for v in reversed(compatible_versions): |
| 239 | + if compare_domain_versions(v, target_version) <= 0: |
| 240 | + return v |
| 241 | + return None |
| 242 | + |
| 243 | + |
| 244 | +def ensure_tags(client, module, resource_arn, existing_tags, tags, purge_tags): |
| 245 | + if tags is None: |
| 246 | + return False |
| 247 | + tags_to_add, tags_to_remove = compare_aws_tags(existing_tags, tags, purge_tags) |
| 248 | + changed = bool(tags_to_add or tags_to_remove) |
| 249 | + if tags_to_add: |
| 250 | + if module.check_mode: |
| 251 | + module.exit_json( |
| 252 | + changed=True, msg="Would have added tags to domain if not in check mode" |
| 253 | + ) |
| 254 | + try: |
| 255 | + client.add_tags( |
| 256 | + ARN=resource_arn, |
| 257 | + TagList=ansible_dict_to_boto3_tag_list(tags_to_add), |
| 258 | + ) |
| 259 | + except ( |
| 260 | + botocore.exceptions.ClientError, |
| 261 | + botocore.exceptions.BotoCoreError, |
| 262 | + ) as e: |
| 263 | + module.fail_json_aws( |
| 264 | + e, "Couldn't add tags to domain {0}".format(resource_arn) |
| 265 | + ) |
| 266 | + if tags_to_remove: |
| 267 | + if module.check_mode: |
| 268 | + module.exit_json( |
| 269 | + changed=True, msg="Would have removed tags if not in check mode" |
| 270 | + ) |
| 271 | + try: |
| 272 | + client.remove_tags(ARN=resource_arn, TagKeys=tags_to_remove) |
| 273 | + except ( |
| 274 | + botocore.exceptions.ClientError, |
| 275 | + botocore.exceptions.BotoCoreError, |
| 276 | + ) as e: |
| 277 | + module.fail_json_aws( |
| 278 | + e, "Couldn't remove tags from domain {0}".format(resource_arn) |
| 279 | + ) |
| 280 | + return changed |
0 commit comments