diff --git a/README.md b/README.md index 11bbc062f..55fdb24cd 100755 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@
- [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.4.1)  • [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/excel-templates/)  • [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/) •  [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp)  • [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/tutorials/)  • [Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724)  • [Slack Channel](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/queries) + [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.4.2)  • [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/excel-templates/)  • [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/) •  [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp)  • [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/tutorials/)  • [Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724)  • [Slack Channel](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/queries)
diff --git a/cd3_automation_toolkit/Database/create_terraform_adb.py b/cd3_automation_toolkit/Database/create_terraform_adb.py index 0ccc5566d..0ea9ac644 100644 --- a/cd3_automation_toolkit/Database/create_terraform_adb.py +++ b/cd3_automation_toolkit/Database/create_terraform_adb.py @@ -180,8 +180,8 @@ def create_terraform_adb(inputfile, outdir, service_dir, prefix, ct): k +=1 else: wl_str = "" - tempdict = {'whitelisted_ips': wl_str,'network_compartment_id': network_compartment_id, 'vcn_name': vcn_name, - 'subnet_id': subnet_id } + #tempdict = {'whitelisted_ips': wl_str,'network_compartment_id': network_compartment_id, 'vcn_name': vcn_name,'subnet_id': subnet_id } + tempdict = {'whitelisted_ips': wl_str } tempStr.update(tempdict) diff --git a/cd3_automation_toolkit/Database/templates/adb-template b/cd3_automation_toolkit/Database/templates/adb-template index 82e87f6ec..73b159bb0 100644 --- a/cd3_automation_toolkit/Database/templates/adb-template +++ b/cd3_automation_toolkit/Database/templates/adb-template @@ -43,7 +43,7 @@ adb = { database_edition = "{{ database_edition }}" #Only for BYOL license model {% endif %} - {% if data_storage_size_in_tbs == "" %} + {% if data_storage_size_in_tb == "" %} data_storage_size_in_tbs = {{ data_storage_size_in_tb }} {% endif %} db_version = "19c" diff --git a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py index 6388a1c47..d50a96eb8 100644 --- a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py +++ b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py @@ -112,7 +112,7 @@ def export_users(inputfile, outdir, service_dir, config, signer, ct,export_domai domain_name = domain_key.split("@")[1] domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer, service_endpoint=idcs_endpoint) - users = domain_client.list_users() + users = domain_client.list_users(limit=100000) # change this to pagination once api supports index = 0 for user in users.data.resources: defined_tags_info = user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags diff --git a/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py b/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py index 3645e223a..47fade8ac 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py @@ -243,7 +243,10 @@ def create_drg_and_attachments(inputfile, outdir): if columnname == "DRG Name": drg_name = columnvalue - drg_tf_name = commonTools.check_tf_variable(drg_name) + if ("ocid1.drg.oc" not in drg_name): + drg_tf_name = commonTools.check_tf_variable(drg_name) + else: + drg_tf_name = drg_name tempdict['drg_tf_name'] = drg_tf_name if (columnname == 'Attached To'): @@ -327,6 +330,8 @@ def create_drg_and_attachments(inputfile, outdir): #if it is Auto Generated RT(during export) dont attach any RT to DRG attachment if(columnvalue in commonTools.drg_auto_RTs): drg_rt_tf_name = '' + elif("ocid1.drgroutetable.oc1" in columnvalue): + drg_rt_tf_name = columnvalue elif(columnvalue!=''): drg_rt_tf_name = commonTools.check_tf_variable(drg_name + "_" + columnvalue) tempStr['drg_rt_tf_name'] = drg_rt_tf_name @@ -340,7 +345,9 @@ def create_drg_and_attachments(inputfile, outdir): drgstr_skeleton = drg_template.render(count=0)[:-1] region_included_drg.append(region) tempStr['drg_version'] = drg_versions[region, drg_name] - drgstr = drg_template.render(tempStr) + drgstr='' + if ("ocid1.drg.oc" not in drg_tf_name): + drgstr = drg_template.render(tempStr) if(attachedto=="attached"): drg_attach = drg_attach_template.render(tempStr) @@ -360,7 +367,8 @@ def create_drg_and_attachments(inputfile, outdir): if region in region_included_drg: if(drg_attach_tfStr[region]!=''): drg_attach_tfStr[region] = drg_attach_skeleton + drg_attach_tfStr[region] - drg_tfStr[region] = drgstr_skeleton + drg_tfStr[region] + if(drg_tfStr[region] != ''): + drg_tfStr[region] = drgstr_skeleton + drg_tfStr[region] def processVCN(tempStr): rt_tf_name = '' diff --git a/cd3_automation_toolkit/Network/BaseNetwork/create_terraform_route.py b/cd3_automation_toolkit/Network/BaseNetwork/create_terraform_route.py index 7ee13657c..0083caedc 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/create_terraform_route.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/create_terraform_route.py @@ -185,7 +185,7 @@ def create_terraform_drg_route(inputfile, outdir, service_dir, prefix, ct, non_g drg_rt_dstrb_tf_name = '' drg_rt_dstrb_res_name = '' region = str(df.loc[i, 'Region']).strip() - + vcn_connectivity_in_excel = "" if str(df.loc[i, 'Attached To']).lower().startswith("rpc"): vcn_connectivity_in_excel = "connectivity" elif str(df.loc[i, 'Attached To']).lower().startswith("vcn"): @@ -218,6 +218,9 @@ def create_terraform_drg_route(inputfile, outdir, service_dir, prefix, ct, non_g # Dont create any route table or route distribution name if using Auto Generated ones if (DRG_RT in commonTools.drg_auto_RTs and DRG_RD in commonTools.drg_auto_RDs): continue + # Dont create any oute table or route distribution name if OCID is goven in DRG RT Name + if ("ocid1.drgroutetable.oc" in DRG_RT): + continue region = region.strip().lower() if region not in ct.all_regions: diff --git a/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py b/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py index f86deef41..686866c3c 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/exportRoutetable.py @@ -12,23 +12,39 @@ def get_network_entity_name(config,signer,network_identity_id): vcn1 = VirtualNetworkClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY,signer=signer) if('internetgateway' in network_identity_id): igw=vcn1.get_internet_gateway(network_identity_id) - network_identity_name = "igw:"+igw.data.display_name + network_entity_comp_id=igw.data.compartment_id + if network_entity_comp_id in export_compartment_ids: + network_identity_name = "igw:"+igw.data.display_name + else: + network_identity_name = "igw:" + igw.data.id return network_identity_name elif ('servicegateway' in network_identity_id): sgw = vcn1.get_service_gateway(network_identity_id) - network_identity_name = "sgw:"+sgw.data.display_name + network_entity_comp_id = sgw.data.compartment_id + if network_entity_comp_id in export_compartment_ids: + network_identity_name = "sgw:" + sgw.data.display_name + else: + network_identity_name = "sgw:"+sgw.data.id return network_identity_name elif ('natgateway' in network_identity_id): ngw = vcn1.get_nat_gateway(network_identity_id) - network_identity_name = "ngw:"+ngw.data.display_name + network_entity_comp_id = ngw.data.compartment_id + if network_entity_comp_id in export_compartment_ids: + network_identity_name = "ngw:" + ngw.data.display_name + else: + network_identity_name = "ngw:"+ngw.data.id return network_identity_name elif ('localpeeringgateway' in network_identity_id): lpg = vcn1.get_local_peering_gateway(network_identity_id) - network_identity_name = "lpg:"+lpg.data.display_name + network_entity_comp_id = lpg.data.compartment_id + if network_entity_comp_id in export_compartment_ids: + network_identity_name = "lpg:" + lpg.data.display_name + else: + network_identity_name = "lpg:"+lpg.data.id return network_identity_name elif ('drgattachment' in network_identity_id): drg_attach = vcn1.get_drg_attachment(network_identity_id) @@ -46,7 +62,11 @@ def get_network_entity_name(config,signer,network_identity_id): return network_identity_name elif ('drg' in network_identity_id): drg = vcn1.get_drg(network_identity_id) - network_identity_name = "drg:"+drg.data.display_name + network_entity_comp_id = drg.data.compartment_id + if network_entity_comp_id in export_compartment_ids: + network_identity_name = "drg:" + drg.data.display_name + else: + network_identity_name = "drg:"+drg.data.id return network_identity_name """ @@ -282,6 +302,7 @@ def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export config=config1 global signer,tf_or_tofu signer=signer1 + global export_compartment_ids tf_or_tofu = ct.tf_or_tofu tf_state_list = [tf_or_tofu, "state", "list"] @@ -316,6 +337,10 @@ def export_routetable(inputfile, outdir, service_dir,config1,signer1, ct, export "import_commands_network_routerules.sh") importCommands[reg] = '' + export_compartment_ids = [] + for comp in export_compartments: + export_compartment_ids.append(ct.ntk_compartment_ids[comp]) + for reg in export_regions: config.__setitem__("region", commonTools().region_dict[reg]) state = {'path': f'{outdir}/{reg}/{service_dir}', 'resources': []} diff --git a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py index cdb9d9c5b..de900ce8d 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/export_network_nonGreenField.py @@ -22,14 +22,18 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, drg_attachment_info, drg_rt_info, - import_drg_route_distribution_info, drg_route_distribution_statements): + import_drg_route_distribution_info, drg_route_distribution_statements,write_drg_ocids): for col_header in values_for_column_drgv2.keys(): if (col_header == "Region"): values_for_column_drgv2[col_header].append(region) elif (col_header == "Compartment Name"): values_for_column_drgv2[col_header].append(comp_name) elif (col_header == "DRG Name"): - values_for_column_drgv2[col_header].append(drg_info.display_name) + if write_drg_ocids == True: + values_for_column_drgv2[col_header].append(drg_info.id) + else: + values_for_column_drgv2[col_header].append(drg_info.display_name) + elif (col_header == "Attached To"): if (drg_attachment_info is None): values_for_column_drgv2[col_header].append('') @@ -53,12 +57,19 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, if (drg_rt_info == None): values_for_column_drgv2[col_header].append("") else: - values_for_column_drgv2[col_header].append(drg_rt_info.display_name) + if write_drg_ocids==True: + values_for_column_drgv2[col_header].append(drg_rt_info.id) + else: + values_for_column_drgv2[col_header].append(drg_rt_info.display_name) + elif (col_header == 'Import DRG Route Distribution Name'): if import_drg_route_distribution_info == None: values_for_column_drgv2[col_header].append("") else: - values_for_column_drgv2[col_header].append(import_drg_route_distribution_info.display_name) + if write_drg_ocids == True: + values_for_column_drgv2[col_header].append(import_drg_route_distribution_info.id) + else: + values_for_column_drgv2[col_header].append(import_drg_route_distribution_info.display_name) elif (col_header == "Import DRG Route Distribution Statements"): statement_val = '' if (drg_route_distribution_statements == None): @@ -90,7 +101,7 @@ def print_drgv2(values_for_column_drgv2, region, comp_name, vcn_info, drg_info, def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_attachment_info, igw_info, ngw_info, sgw_info, - lpg_display_names,state): + lpg_display_names,state,write_drg_ocids): drg_info=None for col_header in values_for_column_vcns.keys(): @@ -106,10 +117,17 @@ def print_vcns(values_for_column_vcns, region, comp_name, vnc,vcn_info, drg_atta values_for_column_vcns[col_header].append("n") else: route_table_id = drg_attachment_info.route_table_id - if (route_table_id is not None): - val = drg_info.display_name + "::" + vnc.get_route_table(route_table_id).data.display_name + if write_drg_ocids == True: + if (route_table_id is not None): + val = drg_info.id + "::" + vnc.get_route_table(route_table_id).data.display_name + else: + val = drg_info.id else: - val = drg_info.display_name + if (route_table_id is not None): + val = drg_info.display_name + "::" + vnc.get_route_table(route_table_id).data.display_name + else: + val = drg_info.display_name + values_for_column_vcns[col_header].append(val) else: values_for_column_vcns[col_header].append("n") @@ -620,6 +638,10 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp print("Tabs- VCNs and DRGs would be overwritten during export process!!!\n") + export_compartment_ids = [] + for comp in export_compartments: + export_compartment_ids.append(ct.ntk_compartment_ids[comp]) + # Fetch DRGs for reg in export_regions: current_region = reg @@ -637,6 +659,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp ntk_compartment_name], attachment_type="ALL") # ,lifecycle_state ="ATTACHED")#,attachment_type="ALL") rpc_execution = True + write_drg_ocids=False for drg_attachment_info in DRG_Attachments.data: if (drg_attachment_info.lifecycle_state != "ATTACHED"): continue @@ -647,6 +670,11 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp # Attachment Data drg_display_name = drg_info.display_name drg_comp_id = drg_info.compartment_id + + if drg_comp_id not in export_compartment_ids: + drg_display_name=drg_id + write_drg_ocids=True + for key, val in ct.ntk_compartment_ids.items(): if val == drg_comp_id: if ("::" in key): @@ -660,7 +688,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp if (drg_id not in drg_ocid): oci_obj_names[reg].write("\nDRG Version::::" + drg_display_name + "::::" + drg_version) tf_resource = f'module.drgs[\\"{tf_name}\\"].oci_core_drg.drg' - if tf_resource not in state["resources"]: + if tf_resource not in state["resources"] and write_drg_ocids == False: importCommands[reg].write( f'\n{tf_or_tofu} import "{tf_resource}" {str(drg_info.id)}') drg_ocid.append(drg_id) @@ -700,15 +728,17 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp drg_route_table_info = vnc.get_drg_route_table(drg_route_table_id).data import_drg_route_distribution_id = drg_route_table_info.import_drg_route_distribution_id + if (import_drg_route_distribution_id != None): import_drg_route_distribution_info = vnc.get_drg_route_distribution( import_drg_route_distribution_id).data + drg_route_distribution_statements = vnc.list_drg_route_distribution_statements( import_drg_route_distribution_info.id) tf_name = commonTools.check_tf_variable( drg_display_name + "_" + import_drg_route_distribution_info.display_name) - if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs): + if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and "ocid1.drg.oc" not in drg_display_name): tf_resource = f'module.drg-route-distributions[\\"{tf_name}\\"].oci_core_drg_route_distribution.drg_route_distribution' if tf_resource not in state["resources"]: importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') @@ -722,7 +752,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp print_drgv2(values_for_column_drgv2, region, drg_comp_name, vcn_info, drg_info, drg_attachment_info, drg_route_table_info, import_drg_route_distribution_info, - drg_route_distribution_statements) + drg_route_distribution_statements,write_drg_ocids) # RPC elif attach_type.upper() == "REMOTE_PEERING_CONNECTION" and rpc_execution: @@ -747,7 +777,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp tf_name = commonTools.check_tf_variable( drg_display_name + "_" + import_drg_route_distribution_info.display_name) - if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs): + if (import_drg_route_distribution_info.display_name not in commonTools.drg_auto_RDs and write_drg_ocids == False): tf_resource = f'module.drg-route-distributions[\\"{tf_name}\\"].oci_core_drg_route_distribution.drg_route_distribution' if tf_resource not in state["resources"]: importCommands[reg].write(f'\n{tf_or_tofu} import "{tf_resource}" {str(import_drg_route_distribution_info.id)}') @@ -784,6 +814,13 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp drg_info = vnc.get_drg(drg_id).data drg_display_name = drg_info.display_name + #Do not process if DRG (and its RTs/RDs are in different compartment than the export_compartments list + drg_comp_id=drg_info.compartment_id + if drg_comp_id not in export_compartment_ids: + continue + + write_drg_ocids=False + if drg_info.default_drg_route_tables is not None: DRG_RTs = oci.pagination.list_call_get_all_results(vnc.list_drg_route_tables, drg_id=drg_id) @@ -820,7 +857,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp print_drgv2(values_for_column_drgv2, region, drg_comp_name, vcn_info, drg_info, drg_attachment_info, drg_route_table_info, import_drg_route_distribution_info, - drg_route_distribution_statements) + drg_route_distribution_statements,write_drg_ocids) commonTools.write_to_cd3(values_for_column_drgv2, cd3file, "DRGs") print("RPCs exported to CD3\n") @@ -861,6 +898,14 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp if (drg_attachment_info.lifecycle_state != "ATTACHED"): continue + write_drg_ocids=False + if drg_attachment_info != None: + drg_id = drg_attachment_info.drg_id + drg_info = vnc.get_drg(drg_id).data + drg_comp_id=drg_info.compartment_id + if drg_comp_id not in export_compartment_ids: + write_drg_ocids= True + # igw_display_name = "n" IGWs = oci.pagination.list_call_get_all_results(vnc.list_internet_gateways, compartment_id=ct.ntk_compartment_ids[ @@ -933,7 +978,7 @@ def export_major_objects(inputfile, outdir, service_dir, config, signer, ct, exp # Fill VCNs Tab print_vcns(values_for_column_vcns, region, ntk_compartment_name, vnc,vcn_info, drg_attachment_info, igw_info, ngw_info, - sgw_info, lpg_display_names,state) + sgw_info, lpg_display_names,state,write_drg_ocids) commonTools.write_to_cd3(values_for_column_vcns, cd3file, "VCNs") print("VCNs exported to CD3\n") diff --git a/cd3_automation_toolkit/Network/BaseNetwork/modify_secrules_tf.py b/cd3_automation_toolkit/Network/BaseNetwork/modify_secrules_tf.py index 6f81aa60b..4ce712ce5 100644 --- a/cd3_automation_toolkit/Network/BaseNetwork/modify_secrules_tf.py +++ b/cd3_automation_toolkit/Network/BaseNetwork/modify_secrules_tf.py @@ -82,7 +82,7 @@ def generate_security_rules(region_seclist_name,processed_seclist,tfStr,region,t commonTools.backup_file(outdir + "/" + reg + "/" + service_dir, resource, prefix + default_auto_tfvars_filename) with open('out.csv') as secrulesfile: - reader = csv.DictReader(ct.skipCommentedLine(secrulesfile)) + reader = csv.DictReader(secrulesfile) ingress_rule = '' egress_rule = '' processed_seclist = [] diff --git a/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py b/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py index 61c8836d0..16b3d4aad 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py @@ -220,7 +220,7 @@ def create_backendset_backendservers(inputfile, outdir, service_dir, prefix, ct) tempback = {'backup': backup } tempStr.update(tempback) - backend_server_tf_name = commonTools.check_tf_variable(servername+"-"+str(cnt)) + backend_server_tf_name = commonTools.check_tf_variable(servername+"-"+serverport) e = servername.count(".") if (e == 3): backend_server_ip_address = "IP:"+servername diff --git a/cd3_automation_toolkit/Network/LoadBalancers/create_nlb_backendset_backendservers.py b/cd3_automation_toolkit/Network/LoadBalancers/create_nlb_backendset_backendservers.py index 86bad49e8..3a957137b 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/create_nlb_backendset_backendservers.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/create_nlb_backendset_backendservers.py @@ -170,7 +170,7 @@ def create_nlb_backendset_backendservers(inputfile, outdir, service_dir, prefix, servername = serverinfo.split(":")[0].strip() serverport = serverinfo.split(":")[1].strip() - backend_server_tf_name = commonTools.check_tf_variable(servername + "-" + str(cnt)) + backend_server_tf_name = commonTools.check_tf_variable(servername + "-" + serverport) e = servername.count(".") if (e == 3): backend_server_ip_address = "IP:" + servername diff --git a/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py b/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py index e42776fb8..2e3cfbb85 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/export_lbr_nonGreenField.py @@ -1066,7 +1066,7 @@ def export_lbr(inputfile, outdir, service_dir, config1, signer1, ct, export_comp for keys in values.backends: cnt = cnt + 1 backendservers_name = keys.name - backendservers_tf_name = commonTools.check_tf_variable(keys.ip_address + "-" + str(cnt)) + backendservers_tf_name = commonTools.check_tf_variable(keys.ip_address + "-" + str(keys.port)) tf_resource = f'module.backends[\\"{tf_name}_{backendsets_tf_name}_{backendservers_tf_name}\\"].oci_load_balancer_backend.backend' if tf_resource not in state["resources"]: importCommands[ diff --git a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py index f71fdab27..90a907b1b 100644 --- a/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py +++ b/cd3_automation_toolkit/Network/LoadBalancers/export_nlb_nonGreenField.py @@ -86,13 +86,13 @@ def print_nlb_backendset_backendserver(region, ct, values_for_column_bss,NLBs, n backend = instance_comp_name+"@"+instance_display_name+":"+port backend_list = backend_list + "," + backend - backendservers_name = instance_display_name +"-"+str(cnt_bes) + backendservers_name = instance_display_name +"-"+port backendservers_tf_name = commonTools.check_tf_variable(backendservers_name) else: backend = backend_value backend_list= backend_list+","+backend - backendservers_name = backend.split(":")[0] +"-"+str(cnt_bes) + backendservers_name = backend.split(":")[0] +"-"+port backendservers_tf_name = commonTools.check_tf_variable(backendservers_name) tf_resource = f'module.nlb-backends[\\"{tf_name}_{backendsets_tf_name}_{backendservers_tf_name}\\"].oci_network_load_balancer_backend.backend' if tf_resource not in state["resources"]: diff --git a/cd3_automation_toolkit/Release-Notes b/cd3_automation_toolkit/Release-Notes index 181f5096b..591b2331c 100644 --- a/cd3_automation_toolkit/Release-Notes +++ b/cd3_automation_toolkit/Release-Notes @@ -1,3 +1,16 @@ +------------------------------------- +CD3 Automation Toolkit Tag v2024.4.2 +Nov 8th, 2024 +------------------------------------- +1. Introduced deleteTenancyConfig.py script which will remove all the components created by createTenancyConfig.py +2. Bug fixes/Enhancements - + a. Modified Network sheets to accept OCID for DRG , DRG RT and Destination Object for Route Rule. + b. Identity Domain Users/Groups bug fixes for parameter(terraform as well as python) - user_can_request_access, group description as optional for identity domain groups. + c. cd3Validator bugs - corrected some false errors for buckets, network. + d. Retention rule bug for buckets. + e. Bug related to backed server tf name for LBaaS and NLB. + f. Bug related to git push while using an existing repo during createTenancyConfig.py + ------------------------------------- CD3 Automation Toolkit Tag v2024.4.1 Sep 30th, 2024 diff --git a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py index b015a6c8d..8bce5bbac 100644 --- a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py +++ b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py @@ -117,9 +117,9 @@ def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_ source_details = '' else: source_id = blockvols.source_details.id - source_details = blockvols.source_details.type.strip() + "::" + blockvols.display_name.strip() + source_details = blockvols.source_details.type.strip() + "::" + commonTools.check_tf_variable(blockvols.display_name.strip()) tmp_key = region + "--" + source_id - source_ocids[tmp_key] = blockvols.display_name.strip() + source_ocids[tmp_key] = commonTools.check_tf_variable(blockvols.display_name.strip()) autotune_type = '' max_vpus_per_gb = '' if len(blockvols.autotune_policies) == 0: diff --git a/cd3_automation_toolkit/Storage/ObjectStorage/create_terraform_oss.py b/cd3_automation_toolkit/Storage/ObjectStorage/create_terraform_oss.py index 8e842f039..3faa39b5c 100644 --- a/cd3_automation_toolkit/Storage/ObjectStorage/create_terraform_oss.py +++ b/cd3_automation_toolkit/Storage/ObjectStorage/create_terraform_oss.py @@ -279,7 +279,7 @@ def create_terraform_oss(inputfile, outdir, service_dir, prefix, ct): time_rule_locked = rule_components[3] if time_rule_locked.endswith(".000Z"): time_rule_locked = time_rule_locked[:-5] + "Z" - elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z|\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",time_rule_locked): + elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z",time_rule_locked): # Convert from "dd-mm-yyyy" to "YYYY-MM-DDThh:mm:ssZ" format if re.match(r"\d{2}-\d{2}-\d{4}", time_rule_locked): try: diff --git a/cd3_automation_toolkit/cd3Validator.py b/cd3_automation_toolkit/cd3Validator.py index 4879a73b6..181fc840e 100644 --- a/cd3_automation_toolkit/cd3Validator.py +++ b/cd3_automation_toolkit/cd3Validator.py @@ -220,6 +220,7 @@ def validate_subnets(filename, comp_ids, vcnobj): # Loop through each row for i in dfsub.index: + subnet_or_vlan = (str(dfsub.loc[i, 'Subnet or VLAN']).strip()).split("::")[0] count = count + 1 # Check for in the inputs; if found the validation ends there and return the status of flag if (str(dfsub.loc[i, 'Region']).strip() in commonTools.endNames): @@ -229,7 +230,7 @@ def validate_subnets(filename, comp_ids, vcnobj): region = str(dfsub.loc[i, 'Region']).strip() if (region.lower() != "nan" and region.lower() not in ct.all_regions): - log(f'ROW {i+3} : Region {region} is not subscribed to tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') subnet_reg_check = True # Check for invalid compartment name @@ -252,30 +253,31 @@ def validate_subnets(filename, comp_ids, vcnobj): # Check if the dns_label field has special characters or if it has greater than 15 characters or is duplicate dns_value = str(dfsub.loc[i, 'DNS Label']).strip() - dns_subnetname = str(dfsub.loc[i, 'Display Name']).strip() - dns_vcn = str(dfsub.loc[i, 'VCN Name']).strip() + if dns_value.lower() != 'n': + dns_subnetname = str(dfsub.loc[i, 'Display Name']).strip() + dns_vcn = str(dfsub.loc[i, 'VCN Name']).strip() - if (dns_value.lower() == "nan"): - subnet_dns.append("") - else: - if (dns_vcn not in vcn_list): - vcn_list.append(dns_vcn) - if (dns_subnetname not in subnetname_list): - subnetname_list.append(dns_subnetname) - if (dns_value not in subnet_dns): - subnet_dns.append(dns_value) + if (dns_value.lower() == "nan"): + subnet_dns.append("") else: - if (dns_value not in subnet_dns): - subnet_dns.append(dns_value) + if (dns_vcn not in vcn_list): + vcn_list.append(dns_vcn) + if (dns_subnetname not in subnetname_list): + subnetname_list.append(dns_subnetname) + if (dns_value not in subnet_dns): + subnet_dns.append(dns_value) else: - log(f'ROW {i+3} : Duplicate "DNS Label" value "{dns_value}" for subnet "{dns_subnetname}" of vcn "{dns_vcn}".') - subnet_dns.append(dns_value) - subnet_dnsdup_check = True - subnet_dnswrong_check = checklabel(dns_value, count) + if (dns_value not in subnet_dns): + subnet_dns.append(dns_value) + else: + log(f'ROW {i+3} : Duplicate "DNS Label" value "{dns_value}" for subnet "{dns_subnetname}" of vcn "{dns_vcn}".') + subnet_dns.append(dns_value) + #subnet_dnsdup_check = True + subnet_dnswrong_check = checklabel(dns_value, count) - if (len(dns_value) > 15): - log(f'ROW {i+3} : "DNS Label" value "{dns_value}" for subnet "{dns_subnetname}" of vcn "{dns_vcn}" has more alphanumeric characters than the allowed maximum limit of 15.') - subnet_dns_length = True + if (len(dns_value) > 15): + log(f'ROW {i+3} : "DNS Label" value "{dns_value}" for subnet "{dns_subnetname}" of vcn "{dns_vcn}" has more alphanumeric characters than the allowed maximum limit of 15.') + subnet_dns_length = True # Check if the Service and Internet gateways are set appropriately; if not display the message; sgw_value = str(dfsub.loc[i, 'Configure SGW Route(n|object_storage|all_services)']).strip() @@ -316,6 +318,8 @@ def validate_subnets(filename, comp_ids, vcnobj): if j in labels or commonTools.check_column_headers(j) in commonTools.tagColumns: pass else: + if j == "Type(private|public)" and subnet_or_vlan.lower() == "vlan": + continue log(f'ROW {count+2} : Empty value at column "{j}".') subnet_empty_check = True @@ -403,7 +407,7 @@ def validate_vcns(filename, comp_ids, vcnobj):# config): # ,vcn_cidrs,vcn_compa # Check for invalid Region region = str(dfv.loc[i, 'Region']).strip() if (region.lower() != "nan" and region.lower() not in ct.all_regions): - log(f'ROW {i+3} : Region {region} is not subscribed to tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') vcn_reg_check = True # Check for invalid Compartment Name @@ -557,7 +561,7 @@ def validate_dhcp(filename, comp_ids, vcnobj): region = str(dfdhcp.loc[i, 'Region']).strip() if (region.lower() != "nan" and region.lower() not in ct.all_regions): - log(f'ROW {i+3} : Region {region} is not subscribed to tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') dhcp_reg_check = True # Check for invalid compartment name @@ -625,7 +629,7 @@ def validate_drgv2(filename, comp_ids, vcnobj): log(f'ROW {i + 3} : Empty value at column "Region".') drgv2_empty_check = True elif region not in ct.all_regions: - log(f'ROW {i + 3} : Region {region} is not subscribed to tenancy.') + log(f'ROW {i + 3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') drgv2_invalid_check = True # Check for invalid Compartment Name @@ -645,7 +649,7 @@ def validate_drgv2(filename, comp_ids, vcnobj): if drg_name.lower() == 'nan': log(f'ROW {i + 3} : Empty value at column "DRG Name".') drgv2_empty_check = True - if drg_name not in vcnobj.vcns_having_drg.values(): + if drg_name not in vcnobj.vcns_having_drg.values() and "ocid1.drg.oc" not in drg_name: log(f'ROW {i + 3}: DRG Name {drg_name} not part of VCNs Tab.') drgv2_drg_check = True @@ -658,14 +662,14 @@ def validate_drgv2(filename, comp_ids, vcnobj): drgv2_format_check = True else: attached_to=attached_to.split("::") - if(len(attached_to)!=2): + if(len(attached_to)< 2 and len(attached_to) > 3): log(f'ROW {i + 3} : Wrong value at column Attached To - {attached_to}. Valid format is ::') drgv2_format_check = True elif attached_to[0].strip().upper()=="VCN": vcn_name = attached_to[1].strip() try: - if (vcn_name.lower() != "nan" and vcnobj.vcns_having_drg[vcn_name,region]!=drg_name): + if (vcn_name.lower() != "nan" and vcnobj.vcns_having_drg[vcn_name,region]!=drg_name) and "ocid1.drg.oc" not in drg_name: log(f'ROW {i + 3}: VCN {vcn_name} in column Attached To is not as per DRG Required column of VCNs Tab.') drgv2_vcn_check = True except KeyError: @@ -720,7 +724,7 @@ def validate_dns(filename,comp_ids): log(f'ROW {i + 3} : Empty value at column "Region".') mandat_val_check = True elif region not in ct.all_regions: - log(f'ROW {i + 3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i + 3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') mandat_val_check = True # Check for invalid Compartment Name @@ -769,7 +773,7 @@ def validate_dns(filename,comp_ids): log(f'ROW {i + 3} : Empty value at column "Region".') mandat_val_check = True elif region not in ct.all_regions: - log(f'ROW {i + 3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i + 3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') mandat_val_check = True # Check for invalid Compartment Name @@ -843,7 +847,7 @@ def validate_instances(filename,comp_ids,subnetobj,vcn_subnet_list,vcn_nsg_list) log(f'ROW {i+3} : Empty value at column "Region".') inst_empty_check = True elif region not in ct.all_regions: - log(f'ROW {i+3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') inst_invalid_check = True # Check for invalid Compartment Name @@ -908,7 +912,7 @@ def validate_instances(filename,comp_ids,subnetobj,vcn_subnet_list,vcn_nsg_list) log(f'ROW {i+3} : Empty value at column "Source Details".') inst_empty_check = True - elif (not columnvalue.startswith("image::") and not columnvalue.startswith("bootVolume::") and not columnvalue.startswith("ocid1.image.oc")): + elif (not columnvalue.startswith("image::") and not columnvalue.startswith("bootVolume::") and not columnvalue.startswith("ocid1.image.oc") and not columnvalue.startswith("ocid1.bootvolume.oc")): log(f'ROW {i+3} : Wrong value at column Source Details - {columnvalue}. Valid format is image:: or bootVolume::.') inst_invalid_check = True @@ -961,7 +965,7 @@ def validate_blockvols(filename,comp_ids): log(f'ROW {i+3} : Empty value at column "Region".') bvs_empty_check = True elif region not in ct.all_regions: - log(f'ROW {i+3} : Region {region} is not subscribed to tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') bvs_invalid_check = True # Check for invalid Compartment Name @@ -1030,7 +1034,7 @@ def validate_blockvols(filename,comp_ids): block_volume_replicas_region = (block_volume_replicas_ads[0]).lower() block_volume_replicas_ad = (block_volume_replicas_ads[1]).upper() if block_volume_replicas_region not in ct.all_regions or block_volume_replicas_ad not in ADS: - log(f'ROW {i + 3} : Volume replication Region is not subscribed to tenancy or AD is not present in destination region. Check column "' +columnname+"\"") + log(f'ROW {i + 3} : Volume replication Region is not subscribed to tenancy or toolkit is not yet configured to be used for this region or AD is not present in destination region. Check column "' +columnname+"\"") bvs_invalid_check = True elif block_volume_replicas_region == str(dfvol.loc[i, 'Region']).strip().lower() and block_volume_replicas_ad == str(dfvol.loc[i, 'Availability Domain(AD1|AD2|AD3)']).strip().upper(): log(f'ROW {i + 3} : Replication Region and AD can not be same as Volume Region and AD. Check column "' +columnname+"\"") @@ -1120,7 +1124,7 @@ def validate_fss(filename,comp_ids,subnetobj,vcn_subnet_list,vcn_nsg_list): fss_empty_check = True if region!='nan' and region not in ct.all_regions: - log(f'ROW {i+3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i+3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') fss_invalid_check = True # Check for invalid Compartment Name @@ -1486,7 +1490,7 @@ def validate_buckets(filename, comp_ids): log(f'ROW {i + 3} : Empty value at column "Region".') buckets_empty_check = True elif region not in ct.all_regions: - log(f'ROW {i + 3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i + 3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') bucket_reg_check = True # Check for invalid Compartment Name @@ -1577,6 +1581,8 @@ def validate_buckets(filename, comp_ids): current_time = datetime.datetime.utcnow() #Check for the retention policy details if columnname == 'Retention Rules': + if columnvalue == "nan": + continue rule_values = columnvalue.split("\n") if rule_values and str(dfbuckets.loc[i, 'Object Versioning']).strip().lower() == 'enabled': log(f'ROW {i + 3} : Retention policy cannot be created when Object Versioning is enabled.') @@ -1618,7 +1624,7 @@ def validate_buckets(filename, comp_ids): if time_rule_locked: if time_rule_locked.endswith(".000Z"): time_rule_locked = time_rule_locked[:-5] + "Z" - elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z|\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z",time_rule_locked): + elif not re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.*Z",time_rule_locked): # Convert from "dd-mm-yyyy" to "YYYY-MM-DDThh:mm:ssZ" format if re.match(r"\d{2}-\d{2}-\d{4}", time_rule_locked): try: @@ -1634,7 +1640,12 @@ def validate_buckets(filename, comp_ids): continue # Parse the time_rule_locked into a datetime object try: - time_rule_locked_datetime = datetime.datetime.strptime(time_rule_locked, "%Y-%m-%dT%H:%M:%SZ") + if len(time_rule_locked.split(".")) > 1: + time_rule_locked_datetime = datetime.datetime.strptime(time_rule_locked, + "%Y-%m-%dT%H:%M:%S.%fZ" + ) + else: + time_rule_locked_datetime = datetime.datetime.strptime(time_rule_locked, "%Y-%m-%dT%H:%M:%SZ") except ValueError: log(f'ROW {i + 3} : "time_rule_locked" of retention rule is not in valid format. It should be in the format "YYYY-MM-DDThh:mm:ssZ".') buckets_invalid_check = True @@ -1721,7 +1732,7 @@ def validate_kms(filename,comp_ids): if region == 'nan': pass elif region != 'nan' and region not in ct.all_regions: - log(f'ROW {i + 3} : "Region" {region} is not subscribed for tenancy.') + log(f'ROW {i + 3} : Either "Region" {region} is not subscribed to tenancy or toolkit is not yet configured to be used for this region.') kms_invalid_check = True vault_compartment_name = str(dfkms.loc[i, 'Vault Compartment Name']).strip() @@ -1780,7 +1791,7 @@ def validate_kms(filename,comp_ids): elif replica_region == 'nan': pass elif replica_region != 'nan' and replica_region not in ct.all_regions: - log(f'ROW {i + 3} : "Replica Region" {region} is not subscribed for tenancy.') + log(f'ROW {i + 3} : "Replica Region" {region} is either not subscribed to tenancy or toolkit is not yet configured to be used for this region') kms_invalid_check = True diff --git a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx index 0000db17e..4c696d581 100644 Binary files a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx and b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx index 4ecb1ab7c..bd4811999 100644 Binary files a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx and b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx index 0740c9105..3f2c7d330 100644 Binary files a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx and b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx differ diff --git a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx index 4bcb35baf..bc062c4b0 100644 Binary files a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx and b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx differ diff --git a/cd3_automation_toolkit/setUpOCI.py b/cd3_automation_toolkit/setUpOCI.py index 9b6b353d7..165c474da 100644 --- a/cd3_automation_toolkit/setUpOCI.py +++ b/cd3_automation_toolkit/setUpOCI.py @@ -2087,6 +2087,31 @@ def create_firewall(inputfile, outdir, service_dir, prefix, ct,sub_options=[]): ## Fetch Subscribed Regions subscribed_regions = ct.get_subscribedregions(config,signer) + +# Check for new region subscriptions +region_dir_list =[] +for name in os.listdir(outdir): + if os.path.isdir(os.path.join(outdir,name)): + region_dir_list.append(name) + +region_dir_not_configured = list(set(ct.all_regions).difference(region_dir_list)) + +if region_dir_not_configured!=[]: + #None of the subscribed regions dir exist. Looks like user has not executed createTenancyConfig.py even once + if list(set(ct.all_regions).difference(region_dir_not_configured)) == []: + print("Make sure that CD3 container has been connected to the tenancy using createTenancyConfig.py script.") + print("Follow the documentation link: https://oracle-devrel.github.io/cd3-automation-toolkit/latest/install-cd3/") + print("Exiting!!!") + exit(0) + + #New region subscription + else: + print("WARNING!!!!!!!!") + print("Regions "+str(region_dir_not_configured) + " are subscribed to the tenancy but not yet configured with CD3 Automation Toolkit.") + print("Re-run createTenancyConfig.py with same tenancyconfig.properties used for prefix '"+ prefix+"' to configure new regions with the toolkit.") + print("Till then OCI resources can not be managed through terraform for them.\n\n") + ct.all_regions = list(set(ct.all_regions).difference(region_dir_not_configured)) + home_region = ct.home_region ## Fetch Region ADs diff --git a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py index a6499629e..ddb11f5e1 100644 --- a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py +++ b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py @@ -9,18 +9,14 @@ import argparse import logging import os -import re import shutil import sys import datetime import configparser -import distutils -from distutils import dir_util import oci from oci.object_storage import ObjectStorageClient import git import glob -import yaml import subprocess sys.path.append(os.getcwd()+"/..") from os import environ @@ -74,8 +70,15 @@ def create_devops_resources(config,signer): retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer) - create_topic_response = ons_client.create_topic(create_topic_details=oci.ons.models.CreateTopicDetails( + try: + create_topic_response = ons_client.create_topic(create_topic_details=oci.ons.models.CreateTopicDetails( name=topic_name, compartment_id=compartment_ocid, description="Created by Automation ToolKit")).data + except Exception as e: + print(e.message) + if ('Topic with same name already exists in the same tenant or compartment' in str(e.message)): + print("If it is in Deleting State, wait for few minutes for it to get terminated and re-try.") + print("\nExiting!!!") + exit() toolkit_topic_id = create_topic_response.topic_id @@ -158,21 +161,26 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us "User " + str(devops_user) + "\n " \ "IdentityFile " + str(devops_user_key) + "\n" + ''' user_ssh_dir = os.path.expanduser("~") + "/.ssh" if not os.path.exists(user_ssh_dir): os.makedirs(user_ssh_dir) + ''' + #ssh_config_file = user_ssh_dir + '/config' - ssh_config_file = user_ssh_dir + '/config' + if not os.path.exists(jenkins_home): + os.mkdir(jenkins_home) + git_config_file = jenkins_home + '/git_config' - #if /cd3user/.ssh/config file exists - if os.path.exists(ssh_config_file): - f = open(ssh_config_file,"r") + #if git_config_file exists + if os.path.exists(git_config_file): + f = open(git_config_file,"r") config_file_data = f.read() f.close() # new prefix config if prefix not in config_file_data: - f = open(ssh_config_file,"a") + f = open(git_config_file,"a") config_file_data = "\n\n" + new_data f.write(config_file_data) f.close() @@ -182,7 +190,7 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us # file doesnot exist else: - f = open(ssh_config_file, "w") + f = open(git_config_file, "w") config_file_data = new_data f.write(config_file_data) f.close() @@ -190,10 +198,10 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us #shutil.copyfile(git_config_file, user_ssh_dir + '/config') # change permissions of private key file and config file for GIT os.chmod(devops_user_key, 0o600) - os.chmod(user_ssh_dir + '/config', 0o600) + os.chmod(git_config_file, 0o600) #os.chmod(git_config_file, 0o600) - ''' + # create symlink for Git Config file for SSH operations. src = git_config_file if not os.path.exists("/cd3user/.ssh"): @@ -204,11 +212,8 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us except FileExistsError as e: os.unlink(dst) os.symlink(src,dst) - ''' # create jenkins.properties file - if not os.path.exists(jenkins_home): - os.mkdir(jenkins_home) jenkins_properties_file_path = jenkins_home+"/jenkins.properties" if dir_values: @@ -233,28 +238,23 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us file = open(jenkins_properties_file_path, "w") jenkins_config.write(file) + file.close() except Exception as e: print(e) - file.close() + if os.path.exists(devops_dir+"../.terraform_files"): + shutil.rmtree(devops_dir+"../.terraform_files", ignore_errors=True) + os.rename(devops_dir,devops_dir+"../.terraform_files") - """# Update Environment variable for jenkins - yaml_file_path = jenkins_install + "/jcasc.yaml" - if (os.path.exists(yaml_file_path)): - with open(yaml_file_path) as yaml_file: - cfg = yaml.load(yaml_file, Loader=yaml.FullLoader) - cfg["jenkins"]["globalNodeProperties"] = [{'envVars': {'env': [{'key': 'customer_prefix', 'value': prefix}]}}] - with open(yaml_file_path, "w") as yaml_file: - cfg = yaml.dump(cfg, stream=yaml_file, default_flow_style=False, sort_keys=False)""" - # Clean repo config if exists and initiate git repo - subprocess.run(['git', 'init'], cwd=devops_dir,stdout=DEVNULL) + if not os.path.exists(devops_dir): + os.makedirs(devops_dir) subprocess.run(['git', 'config', '--global', 'init.defaultBranch', "main"], cwd=devops_dir) + subprocess.run(['git', 'init'], cwd=devops_dir,stdout=DEVNULL) subprocess.run(['git', 'config', '--global', 'safe.directory', devops_dir], cwd=devops_dir) - f = open(devops_dir + ".gitignore", "w") - git_ignore_file_data = ".DS_Store\n*tfstate*\n*terraform*\ntfplan.out\ntfplan.json\n*backup*\nimport_commands*\n*cis_report*\n*showoci_report*\n*.safe\n*stacks.zip\n*cd3Validator*" - f.write(git_ignore_file_data) - f.close() + subprocess.run(['git', 'config','--global','user.email',devops_user], cwd=devops_dir) + subprocess.run(['git', 'config', '--global', 'user.name', devops_user], cwd=devops_dir) + # Cleanup existing "origin" remote and create required one existing_remote = subprocess.run(['git', 'remote'], cwd=devops_dir,capture_output=True).stdout existing_remote = str(existing_remote).split('\'')[1][:-2] @@ -271,42 +271,56 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us f.close() exit(1) - for f in glob.glob(jenkins_install + "/*.groovy"): - shutil.copy2(f, devops_dir) + # Create local branch "main" from remote "main" - subprocess.run(['git', 'checkout', '-B', 'main','-q'], cwd=devops_dir,stdout=DEVNULL) - subprocess.run(['git', 'pull', 'origin', 'main','-q'], cwd=devops_dir,stdout=DEVNULL,stderr=DEVNULL) - subprocess.run(['git', 'add', '-A'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'checkout', '-B', 'main'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'pull', 'origin', 'main','--rebase'], cwd=devops_dir,stdout=DEVNULL) + f = open(devops_dir + ".gitignore", "w") + git_ignore_file_data = ".DS_Store\n*tfstate*\n*terraform*\ntfplan.out\ntfplan.json\n*backup*\nimport_commands*\n*cis_report*\n*showoci_report*\n*.safe\n*stacks.zip\n*cd3Validator*" + f.write(git_ignore_file_data) + f.close() + + all_items = glob.glob(devops_dir + "../.terraform_files/*")+ [devops_dir+ "/../.terraform_files/.safe"] + for f in all_items: + actual_file = f.split("/")[-1] + path = devops_dir+actual_file + if os.path.exists(path) and os.path.isfile(path): + os.remove(devops_dir+actual_file) + if os.path.exists(path) and os.path.isdir(path): + shutil.rmtree(path, ignore_errors=True) + if actual_file.endswith(".tf_backup") or actual_file.endswith(".tfstate"): + continue + shutil.move(f,devops_dir) + for f in glob.glob(jenkins_install + "/*.groovy"): + actual_file = f.split("/")[-1] + path = devops_dir+actual_file + if os.path.exists(path) and os.path.isfile(path): + os.remove(devops_dir+actual_file) + shutil.copy(f,devops_dir) + + last_commit_id = subprocess.run(['git', 'rev-parse', '--short', 'HEAD'], cwd=devops_dir,capture_output=True).stdout current_status = subprocess.run(['git', 'status','--porcelain'], cwd=devops_dir,capture_output=True).stdout current_status = str(current_status).split('\'')[1] + subprocess.run(['git', 'add', '-A'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'commit', '-m','Initial commit from createTenancyConfig'], cwd=devops_dir,stdout=DEVNULL) if current_status and files_in_repo > 0: - subprocess.run(['git', 'stash','-q'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'rebase','origin/main','-q'], cwd=devops_dir,stdout=DEVNULL) repo_changes = input("\nData in local terraform_files and repo is not same, which changes you want to retain? Enter local or repo, default is local : ") if ("repo" in repo_changes.lower()): print("Ignoring local changes......") - #subprocess.run(['git', 'stash'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'pull','origin','main'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'stash', 'drop', f'stash@{{{0}}}'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'branch','main', '-u', 'origin/main'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git','reset','--hard','@{u}'], cwd=devops_dir,stdout=DEVNULL) + subprocess.run(['git', 'pull','origin','main'], cwd=devops_dir,stdout=DEVNULL) else: print("Updating remote with local changes.....") - #subprocess.run(['git', 'stash'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'pull','origin','main'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'stash', 'apply', f'stash@{{{0}}}'], cwd=devops_dir,stdout=DEVNULL) - subprocess.run(['git', 'stash', 'pop'], cwd=devops_dir,stdout=DEVNULL) - subprocess.run(['git', 'add', '-A'], cwd=devops_dir,stdout=DEVNULL) - #subprocess.run(['git', 'stash', 'drop', f'stash@{{{0}}}'], cwd=devops_dir,stdout=DEVNULL) - subprocess.run(['git', 'config','--global','user.email',devops_user], cwd=devops_dir) - subprocess.run(['git', 'config', '--global', 'user.name', devops_user], cwd=devops_dir) commit_id='None' try: - subprocess.run(['git', 'commit', '-m','Initial commit from createTenancyConfig.py'], cwd=devops_dir,stdout=DEVNULL) + #subprocess.run(['git', 'commit', '-m','Initial commit from createTenancyConfig.py'], cwd=devops_dir,stdout=DEVNULL) commit_id = subprocess.run(['git', 'rev-parse', '--short', 'HEAD'], cwd=devops_dir,capture_output=True).stdout commit_id = str(commit_id).split('\'')[1][:-2] subprocess.run(['git', 'push','origin','main'], cwd=devops_dir, stdout=DEVNULL) - print("Initial Commit to DevOps Repository done with commit id: " + commit_id) + print("Latest Commit to DevOps Repository done with commit id: " + commit_id) except git.exc.GitCommandError as e: if ("nothing to commit, working directory clean" in str(e)): print("Nothing to commit to DevOps Repository.") @@ -315,8 +329,9 @@ def update_devops_config(prefix, repo_ssh_url,files_in_repo,dir_values,devops_us print("Exiting...") exit(1) # Create develop branch from main - subprocess.run(['git', 'checkout', '-B', 'develop','main', '-q'], cwd=devops_dir, stdout=DEVNULL) - subprocess.run(['git', 'push', 'origin', 'develop'], cwd=devops_dir, stdout=DEVNULL) + subprocess.run(['git', 'checkout', '-B', 'develop','main'], cwd=devops_dir, stdout=DEVNULL) + subprocess.run(['git', 'pull', 'origin', 'develop','--rebase'], cwd=devops_dir,stdout=DEVNULL,stderr=DEVNULL) + subprocess.run(['git', 'push', 'origin', 'develop','-f'], cwd=devops_dir, stdout=DEVNULL) return commit_id def create_bucket(config, signer): @@ -349,8 +364,8 @@ def create_bucket(config, signer): auto_keys_dir = user_dir + "/tenancies/keys" toolkit_dir = os.path.dirname(os.path.abspath(__file__))+"/.." #toolkit_dir = user_dir +"/oci_tools/cd3_automation_toolkit" -modules_dir = toolkit_dir + "/user-scripts/terraform" -variables_example_file = modules_dir + "/variables_example.tf" +terraform_dir = toolkit_dir + "/user-scripts/terraform" +variables_example_file = terraform_dir + "/variables_example.tf" setupoci_props_toolkit_file_path = toolkit_dir + "/setUpOCI.properties" jenkins_install = toolkit_dir + "/../jenkins_install" @@ -452,8 +467,8 @@ def create_bucket(config, signer): remote_state = config.get('Default', 'use_remote_state').strip().lower() remote_state_bucket = config.get('Default', 'remote_state_bucket_name').strip() - use_devops = config.get('Default', 'use_oci_devops_git').strip().strip().lower() - devops_repo = config.get('Default', 'oci_devops_git_repo_name').strip().strip() + use_devops = config.get('Default', 'use_oci_devops_git').strip().lower() + devops_repo = config.get('Default', 'oci_devops_git_repo_name').strip() devops_user = config.get('Default', 'oci_devops_git_user').strip() devops_user_key = config.get('Default', 'oci_devops_git_key').strip() @@ -573,7 +588,7 @@ def create_bucket(config, signer): "key_file = "+_key_path+"\n" "region = "+region+"\n") ''' - # copy config file to customer specific directory and create symlink for TF execution + # copy config file to prefix specific directory and create symlink for TF execution config_file_path_user_home = user_dir + "/.oci/config" # To take care of multiple executions of createTenancyConfig,py if not os.path.islink(config_file_path_user_home): @@ -620,11 +635,17 @@ def create_bucket(config, signer): ## Check the remote state requirements -backend_file = open(modules_dir + "/backend.tf", 'r') +backend_file = open(terraform_dir + "/backend.tf", 'r') backend_file_data = backend_file.readlines() global_backend_file_data = "" if remote_state == "yes": + #fetch compartment ocid from compartment name + if "ocid1.compartment.oc" not in compartment_ocid and "ocid1.tenancy.oc" not in compartment_ocid: + print("Fetching existing Compartments from Tenancy...") + ct.get_network_compartment_ids(config['tenancy'], "root", config, signer) + compartment_ocid = ct.ntk_compartment_ids[compartment_ocid] + print("\nCreating Tenancy specific remote tfstate Items - bucket, S3 credentials.................") s3_credential_file_path = config_files + "/" + prefix + "_s3_credentials" buckets_client = ObjectStorageClient(config=config, @@ -805,7 +826,7 @@ def create_bucket(config, signer): if not os.path.exists(f"{terraform_files}/global/rpc"): os.makedirs(f"{terraform_files}/global/rpc") - shutil.copyfile(modules_dir + "/provider.tf", f"{terraform_files}/global/rpc/provider.tf") + shutil.copyfile(terraform_dir + "/provider.tf", f"{terraform_files}/global/rpc/provider.tf") with open(f"{terraform_files}/global/rpc/provider.tf", 'r+') as provider_file: provider_file_data = provider_file.read().rstrip() @@ -833,12 +854,24 @@ def create_bucket(config, signer): if os.path.exists(terraform_files+region): continue - os.mkdir(terraform_files+region) + #os.mkdir(terraform_files+region) + + # 7. Copy terraform modules and variables file to outdir + ''' + if modules_source+"/" == terraform_files: + try: + shutil.copytree(terraform_dir + "/modules", terraform_files+"/modules") + except FileExistsError as fe: + pass + shutil.copytree(terraform_dir, terraform_files + "/" + region + "/", ignore=shutil.ignore_patterns("modules")) + ''' + shutil.copytree(terraform_dir, terraform_files + "/" + region + "/") + + #Prepare variables file linux_image_id = '' windows_image_id = '' - new_config = deepcopy(config) new_config.__setitem__("region", ct.region_dict[region]) cc = oci.core.ComputeClient(config=new_config,signer=signer) @@ -874,8 +907,7 @@ def create_bucket(config, signer): f.write(variables_example_file_data) f.close() - # 7. Copy terraform modules and variables file to outdir - distutils.dir_util.copy_tree(modules_dir, terraform_files +"/" + region) + with open(terraform_files +"/" + region + "/provider.tf", 'r+') as provider_file: provider_file_data = provider_file.read().rstrip() if auth_mechanism == 'instance_principal': @@ -995,8 +1027,9 @@ def create_bucket(config, signer): f.write(regions_file_data[:-1]) f.close() # create all compartments_file - print("Fetching existing Compartments from Tenancy...") - ct.get_network_compartment_ids(config['tenancy'], "root", config, signer) + if ct.ntk_compartment_ids == {}: + print("Fetching existing Compartments from Tenancy...") + ct.get_network_compartment_ids(config['tenancy'], "root", config, signer) compartments_file_data = "" comp_done = [] for k, v in ct.ntk_compartment_ids.items(): @@ -1058,7 +1091,7 @@ def create_bucket(config, signer): f.write(data) f.close() -logging.info("Customer Specific Working Directory Path: "+customer_tenancy_dir+"\n") +logging.info("Prefix Specific Working Directory Path: "+customer_tenancy_dir+"\n") if remote_state == 'yes': logging.info("Remote State Bucket Name: "+ bucket_name+ " in "+rg+".") diff --git a/cd3_automation_toolkit/user-scripts/deleteTenancyConfig.py b/cd3_automation_toolkit/user-scripts/deleteTenancyConfig.py new file mode 100644 index 000000000..6ea6df177 --- /dev/null +++ b/cd3_automation_toolkit/user-scripts/deleteTenancyConfig.py @@ -0,0 +1,414 @@ +#!/usr/bin/python3 +# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved. +# +# This script will help in initializing the docker container; creates config and variables files. +# +# Author: Suruchi +# + +import argparse +import logging +import os +import shutil +import sys +import datetime +import configparser +import time + +import oci +from oci.object_storage import ObjectStorageClient +from copy import deepcopy +sys.path.append(os.getcwd()+"/..") +import subprocess +from os import environ +from commonTools import * + + + +# Execution of code begins here +parser = argparse.ArgumentParser(description="cleans up prefix specific files form container and oci objects") +parser.add_argument("propsfile", help="Full Path of properties file. eg tenancyconfig.properties") +args = parser.parse_args() +config = configparser.RawConfigParser() +config.read(args.propsfile) + +current_time=str(datetime.datetime.now()) + +# Initialize Toolkit Variables +user_dir = "/cd3user" + +prefix = config.get('Default', 'prefix').strip() +if prefix == "" or prefix == "\n": + print("Invalid Prefix. Please try again......Exiting !!") + exit(1) + + +print("Executing Deletion for Prefix "+prefix+"...") +prefix_rem=input("Press y to continue. Default is n: ") +if(prefix_rem.lower()!='y'): + print("Exiting without any deletion...") + exit() + +customer_tenancy_dir = user_dir + "/tenancies/" + prefix +config_files= user_dir + "/tenancies/" + prefix +"/.config_files" +config_file_path = config_files + "/" + prefix + "_oci_config" + +auto_keys_dir = user_dir + "/tenancies/keys" +safe_file = user_dir + "/tenancies/createTenancyConfig.safe" + +# Read Config file Variables +try: + user='' + _key_path='' + fingerprint='' + + tenancy = config.get('Default', 'tenancy_ocid').strip() + if tenancy == "" or tenancy == "\n": + print("Tenancy ID cannot be left empty...Exiting !!") + exit(1) + + auth_mechanism = config.get('Default', 'auth_mechanism').strip().lower() + if auth_mechanism == "" or auth_mechanism == "\n" or (auth_mechanism!='api_key' and auth_mechanism!='session_token' and auth_mechanism!='instance_principal'): + print("Auth Mechanism cannot be left empty...Exiting !!") + exit(1) + + region = config.get('Default', 'region').strip() + if region == "" or region == "\n": + print("Region cannot be left empty...Exiting !!") + exit(1) + rg=region + + if auth_mechanism == 'api_key': + print("=================================================================") + print("NOTE: Make sure the API Public Key is added to the OCI Console!!!") + print("=================================================================") + + fingerprint = config.get('Default', 'fingerprint').strip() + if fingerprint == "" or fingerprint == "\n": + print("Fingerprint cannot be left empty...Exiting !!") + exit(1) + + key_path = config.get('Default', 'key_path').strip() + if key_path == "" or key_path == "\n": + key_path = auto_keys_dir +"/oci_api_private.pem" + if not os.path.isfile(key_path): + print("Invalid PEM Key File at "+key_path+". Please try again......Exiting !!") + exit(1) + + user = config.get('Default', 'user_ocid').strip() + if user == "" or user == "\n": + print("user_ocid cannot be left empty...Exiting !!") + exit(1) + + ## Advanced parameters ## + remote_state = config.get('Default', 'use_remote_state').strip().lower() + remote_state_bucket = config.get('Default', 'remote_state_bucket_name').strip() + + use_devops = config.get('Default', 'use_oci_devops_git').strip().strip().lower() + devops_repo = config.get('Default', 'oci_devops_git_repo_name').strip().strip() + devops_user = config.get('Default', 'oci_devops_git_user').strip() + devops_user_key = config.get('Default', 'oci_devops_git_key').strip() + outdir_structure_file = config.get('Default', 'outdir_structure_file').strip() + multi_outdir=False + if (outdir_structure_file != '' and outdir_structure_file != "\n") and os.path.isfile(outdir_structure_file): + outdir_config = configparser.RawConfigParser() + outdir_config.read(outdir_structure_file) + multi_outdir=True + + if use_devops == 'yes' or remote_state == 'yes': + #Use remote state if using devops + remote_state='yes' + + # OCI DevOps GIT User and Key are mandatory while using instance_principal or session_token + if auth_mechanism == 'instance_principal' or auth_mechanism == 'session_token': + if devops_user == "" or devops_user == "\n": + print("OCI DevOps GIT User cannot be left empty when using instance_principal or session_token...Exiting !!") + exit(1) + if use_devops == 'yes' and devops_user_key == "" or devops_user_key == "\n": + print("OCI DevOps GIT Key cannot be left empty when using instance_principal or session_token...Exiting !!") + exit(1) + if auth_mechanism == 'api_key': + # Use same user and key as $user_ocid and $key_path for OCI Devops GIT operations + if devops_user == '' or devops_user=="\n": + devops_user = user + if devops_user_key == '' or devops_user_key=="\n": + devops_user_key = config_files+"/"+os.path.basename(key_path) + + if remote_state == 'yes': + # Use same oci_devops_git_user for managing terraform remote state backend + remote_state_user=devops_user + + # Bucket Name + if remote_state_bucket == '' or remote_state_bucket == "\n": + bucket_name = prefix + "-automation-toolkit-bucket" + else: + bucket_name = remote_state_bucket.strip() +except Exception as e: + print(e.message) + print('Check if input properties exist and try again..exiting...') + exit(1) + + +#Removes OCI Objects +exception= False +if use_devops == 'yes': + print("Removing OCI Objects...") + + if devops_repo == '' or devops_repo == "\n": + topic_name = prefix + "-automation-toolkit-topic" + project_name = prefix + "-automation-toolkit-project" + repo_name = prefix + "-automation-toolkit-repo" + devops_exists = False + else: + topic_name = '' + project_name = devops_repo.split("/")[0] + repo_name = devops_repo.split("/")[1] + devops_exists = True + + ct = commonTools() + config, signer = ct.authenticate(auth_mechanism, config_file_path) + try: + ct.get_subscribedregions(config,signer) + except Exception as e: + print(str(e.message)) + f = open(safe_file, "a") + data = prefix + "\t" + "FAIL\t" + current_time + "\n" + f.write(data) + f.close() + exit(1) + home_region = ct.home_region + + # Remove bucket + buckets_client = ObjectStorageClient(config=config, + retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) + namespace = buckets_client.get_namespace().data + + object_list=None + try: + buckets_client.get_bucket(namespace, bucket_name).data + object_list = buckets_client.list_objects(namespace, bucket_name).data + except Exception as e: + print(e.message) + + count = 0 + if object_list!=None: + for o in object_list.objects: + count += 1 + buckets_client.delete_object(namespace, bucket_name, o.name) + + if count > 0: + print(f'Deleted {count} objects in {bucket_name}') + else: + print(f'Bucket is empty. No objects to delete.') + + try: + cmd = "oci os object bulk-delete-versions -ns "+namespace+" -bn "+bucket_name+" --force --auth "+auth_mechanism + cmd_list = cmd.split() + if auth_mechanism == "instance_principal": + subprocess.run(cmd_list) + else: + cmd_list.append("--config-file") + cmd_list.append(config_file_path) + subprocess.run(cmd_list) + print("Force deleted object versions") + response = buckets_client.delete_bucket(namespace, bucket_name) + print(f'Deleted bucket {bucket_name}') + except Exception as e: + print(e.message) + exception=True + + #Remove Customer Secret Key + new_config = deepcopy(config) + new_config.__setitem__("region", ct.region_dict[home_region]) + identity_client = oci.identity.IdentityClient(config=new_config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) + # Get user ocid for DevOps User Name + if "ocid1.user.oc" not in remote_state_user: + if '@' in remote_state_user: + remote_state_user = remote_state_user.rsplit("@",1)[0] + + user_data = identity_client.list_users(compartment_id=tenancy).data + found=0 + for user_d in user_data: + if user_d.name==remote_state_user and user_d.lifecycle_state=="ACTIVE": + remote_state_user = user_d.id + found =1 + break + if found == 0: + print("Unable to find the user ocid for creating customer secret key. Exiting...") + exit(1) + + customer_secret_cred_name = prefix+"-automation-toolkit-csk" + list_customer_secret_key_response = identity_client.list_customer_secret_keys(user_id=remote_state_user).data + customer_secret_key_id='' + for keys in list_customer_secret_key_response: + if keys.display_name == customer_secret_cred_name: + customer_secret_key_id=keys.id + break + + if customer_secret_key_id!='': + try: + identity_client.delete_customer_secret_key(user_id=remote_state_user, + customer_secret_key_id=customer_secret_key_id) + print("Customer Secret Key deleted for user "+remote_state_user+"\n") + except Exception as e: + print(e.message) + exception=True + else: + print("Customer Secret Key not found for user "+remote_state_user+"\n") + + # Remove Devops GIT Repo and Project + devops_client = oci.devops.DevopsClient(config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) + ons_client = oci.ons.NotificationControlPlaneClient(config=config, + retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) + + resource_search = oci.resource_search.ResourceSearchClient(config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, + signer=signer) + + #Fetch Topic + ons_query = 'query onstopic resources where displayname = \'' + topic_name + '\'' + ons_search_details = oci.resource_search.models.StructuredSearchDetails(type='Structured', + query=ons_query) + ons_resources = oci.pagination.list_call_get_all_results(resource_search.search_resources, ons_search_details, + limit=1000) + toolkit_topic_id='' + topic_state='' + for ons in ons_resources.data: + topic_state = ons.lifecycle_state + toolkit_topic_id = ons.identifier + + if toolkit_topic_id!='': + if topic_state != 'ACTIVE': + print("Topic "+topic_name+" not in ACTIVE state\n") + else: + try: + ons_client.delete_topic(topic_id=toolkit_topic_id) + print("Topic "+topic_name+" is in Deleting State. Wait for few minutes to rerun createTenancyConfig.py with same prefix\n") + except Exception as e: + print(e.message) + exception=True + else: + print("Topic "+topic_name+" does not exist in OCI\n") + + #Fetch Project + devops_query = 'query devopsproject resources where displayname = \'' + project_name + '\'' + devops_search_details = oci.resource_search.models.StructuredSearchDetails(type='Structured', + query=devops_query) + devops_resources = oci.pagination.list_call_get_all_results(resource_search.search_resources, devops_search_details, + limit=1000) + toolkit_project_id='' + for project in devops_resources.data: + project_state = project.lifecycle_state + toolkit_project_id = project.identifier + + if toolkit_project_id!='': + if project_state != 'ACTIVE': + print("Devops Project " + project_name + " not in ACTIVE state\n") + else: + #Fetch Repo + list_repository_response = devops_client.list_repositories(project_id=toolkit_project_id, name=repo_name).data + repo_state = "" + repo_id='' + for item in list_repository_response.items: + repo_state = item.lifecycle_state + repo_id = item.id + + if repo_id!='': + if repo_state != "ACTIVE": + print("Repo " + repo_name + " not in ACTIVE state\n") + else: + try: + devops_client.delete_repository(repository_id=repo_id) + print("Waiting for repository ("+repo_name+") to be in DELETED state.") + except Exception as e: + print(e.message) + exception = True + try: + repo_state='' + while repo_state != "DELETED": + repo_data = devops_client.get_repository(repository_id=repo_id).data + repo_state = repo_data.lifecycle_state + except Exception as e: + print("Devops Repo "+repo_name+" deleted\n") + else: + print("Devops Repo "+repo_name+" does not exist in OCI\n") + + try: + devops_client.delete_project(project_id=toolkit_project_id) + print("Devops Project "+project_name+" deleted.\n\n") + except Exception as e: + print(e.message) + exception = True + + else: + print("Devops Project "+project_name+" does not exist in OCI\n\n") + +print("Cleaning up local files from container after successful deletion of OCI objects...") + +# Removes prefix from jenkins.properties +jenkins_home = user_dir+"/tenancies/jenkins_home" +if environ.get('JENKINS_HOME') is not None: + jenkins_home = os.environ['JENKINS_HOME'] + +if (os.path.exists(jenkins_home+"/jenkins.properties")): + jenkins_properties_file_path = jenkins_home+"/jenkins.properties" + jenkins_config = configparser.RawConfigParser() + jenkins_config.read(jenkins_properties_file_path) + if (prefix in jenkins_config.sections()): + jenkins_config.remove_section(prefix) + file = open(jenkins_properties_file_path, "w") + jenkins_config.write(file) + file.close() + + #Removes prefix from git config file +#user_ssh_dir = os.path.expanduser("~") + "/.ssh" +#ssh_config_file = user_ssh_dir + '/config' +ssh_config_file = jenkins_home+"/git_config" +if os.path.exists(ssh_config_file): + f = open(ssh_config_file,"r") + new_lines = [] + config_file_data = f.readlines() + ptr = 1 + found=False + for line in config_file_data: + if prefix in line or found==True: + found=True + if ptr==7: + found=False + ptr=ptr+1 + + else: + found=False + new_lines.append(line) + f.close() + + file = open(ssh_config_file, "w") + file.writelines(new_lines) + file.close() + +if exception == False: + # Removes prefix directory + if (os.path.exists(customer_tenancy_dir)): + shutil.rmtree(customer_tenancy_dir) + if (os.path.exists(jenkins_home+"/jobs/"+prefix)): + shutil.rmtree(jenkins_home+"/jobs/"+prefix) + + #Removes prefix from createTenancyConfig.safe + if os.path.exists(safe_file): + f=open(safe_file,"r") + safe_file_lines = f.readlines() + new_lines = [] + for l in safe_file_lines: + if prefix not in l: + new_lines.append(l) + f.close() + file = open(safe_file, "w") + file.writelines(new_lines) + file.close() + + print("\nCleanup Completed!\n") +else: + print("\nRe run this script to remove OCI Objects completely.\n") diff --git a/cd3_automation_toolkit/user-scripts/terraform/identity.tf b/cd3_automation_toolkit/user-scripts/terraform/identity.tf index 8657ae690..110a8d3e6 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/identity.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/identity.tf @@ -298,7 +298,7 @@ module "groups" { for_each = var.identity_domain_groups group_name = each.value.group_name - group_description = each.value.group_description + group_description = each.value.group_description != null ? each.value.group_description : null matching_rule = each.value.matching_rule compartment_id = each.value.domain_compartment_id != "root" ? (length(regexall("ocid1.compartment.oc*", each.value.domain_compartment_id)) > 0 ? each.value.domain_compartment_id : var.compartment_ocids[each.value.domain_compartment_id]) : var.tenancy_ocid identity_domain = data.oci_identity_domains.iam_domains[each.key].domains[0] diff --git a/cd3_automation_toolkit/user-scripts/terraform/instance.tf b/cd3_automation_toolkit/user-scripts/terraform/instance.tf index cd10c1e88..0cf2bf380 100755 --- a/cd3_automation_toolkit/user-scripts/terraform/instance.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/instance.tf @@ -47,7 +47,7 @@ module "instances" { #nsg_ids = each.value.nsg_ids != [] ? [for nsg in each.value.nsg_ids : length(regexall("ocid1.networksecuritygroup.oc*",nsg)) > 0 ? nsg : merge(module.nsgs.*...)[nsg]["nsg_tf_id"]] : [] boot_volume_size_in_gbs = each.value.boot_volume_size_in_gbs != null ? each.value.boot_volume_size_in_gbs : null memory_in_gbs = each.value.memory_in_gbs != null ? each.value.memory_in_gbs : null - capacity_reservation_id = each.value.capacity_reservation_id != null ? lookup(var.capacity_reservation_ocids, each.value.capacity_reservation_id, null) : null + capacity_reservation_id = each.value.capacity_reservation_id != null ? (length(regexall("ocid1.capacityreservation.oc*", each.value.capacity_reservation_id)) > 0 ? each.value.capacity_reservation_id : lookup(var.capacity_reservation_ocids, each.value.capacity_reservation_id, null)) : null create_is_pv_encryption_in_transit_enabled = each.value.create_is_pv_encryption_in_transit_enabled boot_tf_policy = each.value.backup_policy != null ? each.value.backup_policy : null diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf index ddb6f3e7e..64f770eff 100755 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/compute/instance/main.tf @@ -126,7 +126,7 @@ resource "oci_core_instance" "instance" { } lifecycle { - ignore_changes = [create_vnic_details[0].defined_tags["Oracle-Tags.CreatedOn"], create_vnic_details[0].defined_tags["Oracle-Tags.CreatedBy"]] + ignore_changes = [create_vnic_details[0].defined_tags["Oracle-Tags.CreatedOn"], create_vnic_details[0].defined_tags["Oracle-Tags.CreatedBy"],metadata,extended_metadata] } } diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/main.tf index 4b175c5e7..79ac0b08c 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/main.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/main.tf @@ -36,12 +36,13 @@ resource "oci_identity_domains_group" "group" { value = local.user_ids[members.value][0] } } - urnietfparamsscimschemasoracleidcsextensionrequestable_group { - + dynamic "urnietfparamsscimschemasoracleidcsextensionrequestable_group" { + for_each = var.user_can_request_access == false ?[1]:[] + content { #Optional requestable = var.user_can_request_access - - } + } +} urnietfparamsscimschemasoracleidcsextension_oci_tags { # Optional diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/variables.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/variables.tf index a702f1ed8..c24585f26 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/variables.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/modules/identity/identity-domain-group/variables.tf @@ -76,5 +76,6 @@ variable "freeform_tags_value" { variable "user_can_request_access" { type = bool description = "Specifies whether user can request access to the group" + default = false } diff --git a/cd3_automation_toolkit/user-scripts/terraform/provider.tf b/cd3_automation_toolkit/user-scripts/terraform/provider.tf index 3a75cbc5e..99c8b0959 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/provider.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/provider.tf @@ -19,7 +19,7 @@ terraform { required_providers { oci = { source = "oracle/oci" - version = "6.3.0" + version = "6.15.0" } } } diff --git a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf index 62dff5b76..4c754b842 100644 --- a/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf +++ b/cd3_automation_toolkit/user-scripts/terraform/variables_example.tf @@ -98,7 +98,7 @@ variable "dbsystem_ssh_keys" { # Platform Image OCIDs, Source OCIDS # and Market Place Images ################################### - +# Reference url to get the OCIDS : https://docs.oracle.com/en-us/iaas/images/ variable "instance_source_ocids" { type = map(any) default = { @@ -262,7 +262,7 @@ variable "groups" { variable "identity_domain_groups" { type = map(object({ group_name = string - group_description = string + group_description = optional(string) idcs_endpoint = string domain_compartment_id = string matching_rule = optional(string) @@ -1533,6 +1533,7 @@ variable "nlb_backends" { network_load_balancer_id = string port = number #vnic_vlan = optional(string) + #vnic_ip = optional(string) ip_address = string instance_compartment = string is_drain = optional(bool) diff --git a/jenkins_install/jenkins.sh b/jenkins_install/jenkins.sh index cbe88b3d0..0a2ab258e 100644 --- a/jenkins_install/jenkins.sh +++ b/jenkins_install/jenkins.sh @@ -20,6 +20,9 @@ fi #cp -r ${JENKINS_INSTALL}/scriptler $JENKINS_HOME cp ${JENKINS_INSTALL}/jcasc.yaml "$JENKINS_HOME/" +if [ ! -e "/cd3user/.ssh/config" ]; then + ln -s /cd3user/tenancies/jenkins_home/git_config /cd3user/.ssh/config +fi # Copy scriptler directory cp -r "${JENKINS_INSTALL}/scriptler" "$JENKINS_HOME" diff --git a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy index 36c4ea6e9..83a0d6789 100644 --- a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy +++ b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy @@ -1,3 +1,4 @@ +List suboptions_list = SubOptions.split(",") def reg_list = new File("/cd3user/tenancies/${Prefix}/.config_files/regions_file") as String[] def string_list = reg_list.join(", ") reg_options = "" @@ -13,28 +14,43 @@ for(item in string_list2.split(",")){ html_to_be_rendered = "" if(Workflow.toLowerCase().contains("export")){ - -html_to_be_rendered = """ - ${html_to_be_rendered} - +region_filter_option = """ + - + """ +compartment_filter_option = """ + - -""" + """ +List default_params_set = [] +for (item in MainOptions.split(",")) { + if (item != "Export Identity") { + html_to_be_rendered = "${html_to_be_rendered} ${region_filter_option} ${compartment_filter_option}" + default_params_set = ["region","compartment"] + break; + } +} + if (("Export Groups" in suboptions_list) || ("Export Users" in suboptions_list)) { + if ("compartment" in default_params_set) { + html_to_be_rendered = "${html_to_be_rendered}" + }else { + html_to_be_rendered = "${html_to_be_rendered}${compartment_filter_option}" + default_params_set.add("compartment") + } +} } domain_filter_val = "Unset" -for (item in SubOptions.split(",")) { +for (item in suboptions_list) { if ((item in ["Export Groups","Export Users"]) && (domain_filter_val.equals("Unset"))) { - html_to_be_rendered = """ + html_to_be_rendered = """ ${html_to_be_rendered} @@ -229,7 +245,6 @@ for (item in SubChildOptions.split(",")) { export_network_rules = "set" } - if (item.equals("Export DR Plan")) { html_to_be_rendered = """ ${html_to_be_rendered} @@ -280,4 +295,4 @@ for (item in SubChildOptions.split(",")) { } html_to_be_rendered = "${html_to_be_rendered}
" -return html_to_be_rendered +return html_to_be_rendered \ No newline at end of file diff --git a/jenkins_install/setUpOCI.groovy b/jenkins_install/setUpOCI.groovy index 81f80bdba..8a64a1eb5 100644 --- a/jenkins_install/setUpOCI.groovy +++ b/jenkins_install/setUpOCI.groovy @@ -76,11 +76,12 @@ properties([ choiceType: 'ET_FORMATTED_HTML', description: 'Select additional filters', name: 'AdditionalFilters', - referencedParameters: 'Workflow,SubOptions,SubChildOptions', + referencedParameters: 'Workflow,SubOptions,SubChildOptions,MainOptions', script: [ $class: 'ScriptlerScript', scriptlerScriptId: 'AdditionalFilters.groovy', parameters: [ + [name:'MainOptions',value:'${MainOptions}'], [name:'SubOptions', value: '${SubOptions}'], [name:'SubChildOptions', value: '${SubChildOptions}'], [name:'Workflow', value: '${Workflow}'], @@ -243,7 +244,7 @@ pipeline { latest_fsdr_XL=`ls -t ${prefix_dir}/othertools_files/*.xl* | head -n 1` last_modified=`stat -c \"%Y\" ${latest_fsdr_XL}` if [ $(($last_modified-$current_timestamp)) -gt 0 ]; then - cp ${latest_fsdr_XL} . + cp "${latest_fsdr_XL}" . fi fi ''') @@ -273,8 +274,8 @@ pipeline { file_path = labelledShell( label: 'Preparing archival', script: ''' set +x cd3_file=`grep '^cd3file' ${prop_file}| cut -d'=' -f2` - cp $cd3_file . - echo $cd3_file + cp "$cd3_file" . + echo "$cd3_file" ''', returnStdout: true).trim() } archiveArtifacts "${file_path.split("/")[(file_path.split("/")).length-1]}, *.zip,*.xl*" diff --git a/othertools/oci-fsdr/commonLib.py b/othertools/oci-fsdr/commonLib.py index beeecf9c1..3cee80dbf 100644 --- a/othertools/oci-fsdr/commonLib.py +++ b/othertools/oci-fsdr/commonLib.py @@ -1,12 +1,16 @@ import re import json +from oci import regions -def get_region_from_ocid(ocid, region_map): - match = re.search(r'oc1\.(.*?)\.', ocid) +def get_region_from_ocid(ocid, region_map={}): + region_value = 'unknown-region' + match = re.search(r'oc\d{1,2}.(.*?)\.', ocid) if match: region_code = match.group(1) - return region_map.get(region_code, 'unknown-region') - return 'unknown-region' + if region_code: + # region_code = "phx"/"us-chicago-1" + region_value = regions.get_region_from_short_name(region_code) + return region_value def load_region_map(region_file): with open(region_file, 'r') as f: diff --git a/othertools/oci-fsdr/export_drplan.py b/othertools/oci-fsdr/export_drplan.py index c11b19e0e..bde80eab1 100644 --- a/othertools/oci-fsdr/export_drplan.py +++ b/othertools/oci-fsdr/export_drplan.py @@ -17,9 +17,10 @@ args = parser.parse_args() try: - region_file = os.path.dirname(os.path.abspath(__file__))+"/region_file.json" - region_map = load_region_map(region_file) - region = get_region_from_ocid(args.ocid, region_map) + #region_file = os.path.dirname(os.path.abspath(__file__))+"/region_file.json" + #region_map = load_region_map(region_file) + #region = get_region_from_ocid(args.ocid, region_map) + region = get_region_from_ocid(args.ocid) except Exception as e: print(f"Error loading region map: {str(e)}") exit(1) diff --git a/othertools/oci-fsdr/update_drplan.py b/othertools/oci-fsdr/update_drplan.py index ea21c8fd4..27953f955 100644 --- a/othertools/oci-fsdr/update_drplan.py +++ b/othertools/oci-fsdr/update_drplan.py @@ -15,9 +15,10 @@ args = parser.parse_args() try: - region_file = os.path.dirname(os.path.abspath(__file__)) + "/region_file.json" + """region_file = os.path.dirname(os.path.abspath(__file__)) + "/region_file.json" region_map = load_region_map(region_file) - region = get_region_from_ocid(args.ocid, region_map) + region = get_region_from_ocid(args.ocid, region_map)""" + region = get_region_from_ocid(args.ocid) except Exception as e: print(f"Error loading region map or determining region from OCID: {str(e)}") print(".....Exiting!!!")