diff --git a/Dockerfile b/Dockerfile
index 27d174602..6e54cace1 100755
--- a/Dockerfile
+++ b/Dockerfile
@@ -2,9 +2,22 @@ FROM oraclelinux:9-slim
LABEL maintainer="Team at Oracle"
LABEL description="OCI format to generate CD3 image"
-ARG USERNAME=cd3user
+
+########### Input Parameters for image creation ############
+# UID of user on underlying OS. eg 503 for Mac
ARG USER_UID=1001
+# Whether to download Jenkins as part of image creation
+ARG USE_DEVOPS=YES
+#############################################################
+
+
+ARG USERNAME=cd3user
ARG USER_GID=$USER_UID
+# Whether to download Provider as part of image creation
+ARG DOWNLOAD_PROVIDER=YES
+# TF Provider version
+ARG TF_OCI_PROVIDER=6.15.0
+ARG TF_NULL_PROVIDER=3.2.1
RUN microdnf install -y sudo && \
groupadd --gid $USER_GID $USERNAME && \
@@ -15,6 +28,8 @@ RUN microdnf install -y sudo && \
chown -R $USERNAME:$USERNAME /cd3user/tenancies/ && \
microdnf install -y vim && \
microdnf install -y dnf && \
+ microdnf install -y wget && \
+ microdnf install -y unzip && \
microdnf install -y graphviz && \
echo 'alias vi="vim"' >> /etc/bashrc
@@ -29,12 +44,26 @@ RUN sudo dnf install -y oraclelinux-release-el9 && \
sudo chown -R $USERNAME:$USERNAME /cd3user/ && \
sudo sed -i -e 's/\r$//' /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh && \
bash /cd3user/oci_tools/cd3_automation_toolkit/shell_script.sh && \
-sudo chown -R cd3user:cd3user /cd3user/ && \
sudo dnf clean all && \
sudo rm -rf /var/cache/dnf && \
-sudo chmod -R 740 /cd3user/
+sudo chmod -R 740 /cd3user/ && \
+sudo chown -R cd3user:cd3user /cd3user/
+RUN if [ "$DOWNLOAD_PROVIDER" == "YES" ]; then \
+# oci provider
+sudo wget https://releases.hashicorp.com/terraform-provider-oci/${TF_OCI_PROVIDER}/terraform-provider-oci_${TF_OCI_PROVIDER}_linux_amd64.zip && \
+sudo mkdir -p /cd3user/.terraform.d/plugins/registry.terraform.io/oracle/oci/${TF_OCI_PROVIDER}/linux_amd64 && \
+sudo unzip terraform-provider-oci_${TF_OCI_PROVIDER}_linux_amd64.zip -d /cd3user/.terraform.d/plugins/registry.terraform.io/oracle/oci/${TF_OCI_PROVIDER}/linux_amd64 && \
+# null provider
+sudo wget https://releases.hashicorp.com/terraform-provider-null/${TF_NULL_PROVIDER}/terraform-provider-null_${TF_NULL_PROVIDER}_linux_amd64.zip && \
+sudo mkdir -p /cd3user/.terraform.d/plugins/registry.terraform.io/hashicorp/null/${TF_NULL_PROVIDER}/linux_amd64 && \
+sudo unzip terraform-provider-null_${TF_NULL_PROVIDER}_linux_amd64.zip -d /cd3user/.terraform.d/plugins/registry.terraform.io/hashicorp/null/${TF_NULL_PROVIDER}/linux_amd64 && \
+sudo cp -r /cd3user/.terraform.d/plugins/registry.terraform.io /cd3user/.terraform.d/plugins/registry.opentofu.org && \
+sudo chown -R cd3user:cd3user /cd3user/ && \
+sudo rm -rf terraform-provider-null_${TF_NULL_PROVIDER}_linux_amd64.zip terraform-provider-oci_${TF_OCI_PROVIDER}_linux_amd64.zip ;\
+fi
+
##################################### START INSTALLING JENKINS ###################################
ARG JENKINS_VERSION=2.444
ARG JENKINS_SHA=ab093a455fc35951c9b46361002e17cc3ed7c59b0943bbee3a57a363f3370d2e
@@ -45,15 +74,7 @@ ARG JENKINS_HOME=/cd3user/tenancies/jenkins_home
ARG JENKINS_INSTALL=/usr/share/jenkins
ARG REF=/usr/share/jenkins/ref
-RUN sudo microdnf install -y java-21-openjdk && \
- sudo microdnf install -y java-21-openjdk-devel && \
- sudo microdnf install git-2.39.3 -y && \
- sudo mkdir -p ${REF}/init.groovy.d && \
- sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} && \
- sudo curl -fsSL http://updates.jenkins-ci.org/download/war/${JENKINS_VERSION}/jenkins.war -o ${JENKINS_INSTALL}/jenkins.war && \
- echo "${JENKINS_SHA} ${JENKINS_INSTALL}/jenkins.war" | sha256sum -c - && \
- sudo curl -fsSL ${PLUGIN_CLI_URL} -o ${JENKINS_INSTALL}/jenkins-plugin-manager.jar
-
+ENV USE_DEVOPS ${USE_DEVOPS}
ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk
ENV JENKINS_HOME ${JENKINS_HOME}
ENV JENKINS_INSTALL ${JENKINS_INSTALL}
@@ -64,11 +85,21 @@ ENV JENKINS_INCREMENTALS_REPO_MIRROR=https://repo.jenkins-ci.org/incrementals
ENV JAVA_OPTS="-Djenkins.install.runSetupWizard=false"
ENV COPY_REFERENCE_FILE_LOG ${JENKINS_HOME}/copy_reference_file.log
ENV CASC_JENKINS_CONFIG ${JENKINS_HOME}/jcasc.yaml
-
COPY --chown=cd3user:cd3user jenkins_install ${JENKINS_INSTALL}/
COPY --chown=cd3user:cd3user jenkins_install/init/*.groovy ${REF}/init.groovy.d/
COPY --chown=cd3user:cd3user jenkins_install/plugins.txt ${REF}/plugins.txt
-RUN sudo java -jar ${JENKINS_INSTALL}/jenkins-plugin-manager.jar --war ${JENKINS_INSTALL}/jenkins.war --verbose -f ${REF}/plugins.txt && \
+
+RUN if [ "$USE_DEVOPS" == "YES" ]; then \
+ sudo microdnf install -y java-21-openjdk && \
+ sudo microdnf install -y java-21-openjdk-devel && \
+ sudo microdnf install git-2.39.3 -y && \
+ sudo mkdir -p ${REF}/init.groovy.d && \
+ sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} && \
+ sudo curl -fsSL http://updates.jenkins-ci.org/download/war/${JENKINS_VERSION}/jenkins.war -o ${JENKINS_INSTALL}/jenkins.war && \
+ echo "${JENKINS_SHA} ${JENKINS_INSTALL}/jenkins.war" | sha256sum -c - && \
+ sudo curl -fsSL ${PLUGIN_CLI_URL} -o ${JENKINS_INSTALL}/jenkins-plugin-manager.jar && \
+ sudo java -jar ${JENKINS_INSTALL}/jenkins-plugin-manager.jar --war ${JENKINS_INSTALL}/jenkins.war --verbose -f ${REF}/plugins.txt && \
sudo chown -R cd3user:cd3user ${JENKINS_INSTALL} && \
- sudo chmod +x ${JENKINS_INSTALL}/jenkins.sh
+ sudo chmod +x ${JENKINS_INSTALL}/jenkins.sh ; \
+ fi
diff --git a/OCIWorkVMStack/modules/network/locals.tf b/OCIWorkVMStack/modules/network/locals.tf
index 7e2bd9675..23eb9f69c 100644
--- a/OCIWorkVMStack/modules/network/locals.tf
+++ b/OCIWorkVMStack/modules/network/locals.tf
@@ -4,4 +4,5 @@ locals {
create_inet_gw = (var.vcn_strategy == "Create New VCN" && var.subnet_type == "Public") ? 1 : 0
create_nat_gw = (var.vcn_strategy == "Create New VCN" && var.subnet_type == "Private") ? 1 : 0
create_nsg_rule = (var.vcn_strategy == "Create New VCN" && length(var.source_cidr) != 0) ? 1 : 0
+ route_rule_drg = var.drg_attachment == true ? ( length(var.source_cidr) > 0 ? var.source_cidr : [] ) : []
}
\ No newline at end of file
diff --git a/OCIWorkVMStack/modules/network/network.tf b/OCIWorkVMStack/modules/network/network.tf
index d2fb1a6e9..bed845c00 100644
--- a/OCIWorkVMStack/modules/network/network.tf
+++ b/OCIWorkVMStack/modules/network/network.tf
@@ -49,12 +49,23 @@ resource "oci_core_route_table" "rt" {
vcn_id = local.vcn_id
display_name = "${var.subnet_name}-rt"
+ # Route rules to NGW or IGW
route_rules {
destination = "0.0.0.0/0"
destination_type = "CIDR_BLOCK"
network_entity_id = local.create_inet_gw == 1 ? oci_core_internet_gateway.internet_gw[0].id : oci_core_nat_gateway.nat_gw[0].id
}
+ # Route rules to DRG
+ dynamic route_rules {
+ for_each = local.route_rule_drg
+ content {
+ destination = route_rules.value
+ destination_type = "CIDR_BLOCK"
+ network_entity_id = var.existing_drg_id
+ }
+ }
+
}
resource "oci_core_security_list" "security_list" {
count = local.create_vcn
diff --git a/OCIWorkVMStack/schema.yaml b/OCIWorkVMStack/schema.yaml
index 890701e8c..e550c268e 100644
--- a/OCIWorkVMStack/schema.yaml
+++ b/OCIWorkVMStack/schema.yaml
@@ -406,7 +406,7 @@ variables:
- drg_attachment
- true
type: string
- pattern: '^ocid1\.([a-z0-9_-]{1,32})\.([a-z0-9_-]{1,15})\.([a-z0-9]{0,24})\.([a-z0-9]{60})$'
+ pattern: '^ocid1\.([a-z0-9_-]{1,32})\.([a-z0-9_-]{1,15})\.([a-z0-9_-]{0,24})\.([a-z0-9]{60})$'
required: true
title: Enter Existing DRG OCID
description: Enter existing DRG OCID
diff --git a/README.md b/README.md
index 55fdb24cd..8ae26e332 100755
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
- [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.4.2) • [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/excel-templates/) • [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/) • [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp) • [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/tutorials/) • [Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724) • [Slack Channel](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/queries)
+ [What's New](https://github.com/oracle-devrel/cd3-automation-toolkit/releases/tag/v2024.4.3) • [Excel Templates](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/excel-templates/) • [CD3 Docs](https://oracle-devrel.github.io/cd3-automation-toolkit/) • [Watch & Learn](https://www.youtube.com/playlist?list=PLPIzp-E1msrbJ3WawXVhzimQnLw5iafcp) • [Blogs & Tutorials](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/tutorials/) • [Livelabs](https://apexapps.oracle.com/pls/apex/f?p=133:180:112501098061930::::wid:3724) • [Slack Channel](https://oracle-devrel.github.io/cd3-automation-toolkit/latest/queries)
@@ -81,7 +81,7 @@ Additionally, the toolkit also supports seamless resource management using OCI D
📝 Creating Terraform Code for each module/resource can be cumbersome and requires Terraform expertise.
-🔁 Manually created infrastrucutre is hard to rebuild for different environments or regions.
+🔁 Manually created infrastructure is hard to rebuild for different environments or regions.
diff --git a/cd3_automation_toolkit/Compute/create_terraform_instances.py b/cd3_automation_toolkit/Compute/create_terraform_instances.py
index 410c36407..e94057b2b 100755
--- a/cd3_automation_toolkit/Compute/create_terraform_instances.py
+++ b/cd3_automation_toolkit/Compute/create_terraform_instances.py
@@ -226,12 +226,12 @@ def create_terraform_instances(inputfile, outdir, service_dir, prefix, ct):
if columnname == "Source Details":
if columnvalue.strip() != '' and columnvalue.strip().lower() != 'nan':
- if "ocid1.image.oc1" in columnvalue.strip():
+ if "ocid1.image.oc" in columnvalue.strip():
ocid = columnvalue.strip()
type = "image"
source_details.append(type)
source_details.append(ocid)
- elif "ocid1.bootvolume.oc1" in columnvalue.strip():
+ elif "ocid1.bootvolume.oc" in columnvalue.strip():
ocid = columnvalue.strip()
type = "bootVolume"
source_details.append(type)
diff --git a/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py b/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py
index d00f490da..e4dcc5ac4 100644
--- a/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py
+++ b/cd3_automation_toolkit/DeveloperServices/OKE/export_oke_nonGreenField.py
@@ -531,7 +531,7 @@ def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compart
#Virtual NodePool
- if ("ocid1.virtualnodepool.oc1" in nodepool_info.id):
+ if ("ocid1.virtualnodepool.oc" in nodepool_info.id):
nodepool_display_name = nodepool_info.display_name
np_tf_name = commonTools.check_tf_variable(nodepool_display_name)
tf_resource = f'module.virtual-nodepools[\\"{cluster_tf_name}_{np_tf_name}\\"].oci_containerengine_virtual_node_pool.virtual_nodepool'
@@ -540,7 +540,7 @@ def export_oke(inputfile, outdir,service_dir, config, signer, ct, export_compart
nodepool_type = "virtual"
# Managed NodePool
- if ("ocid1.nodepool.oc1" in nodepool_info.id):
+ if ("ocid1.nodepool.oc" in nodepool_info.id):
nodepool_display_name = nodepool_info.name
np_tf_name = commonTools.check_tf_variable(nodepool_display_name)
nodepool_type = "managed"
diff --git a/cd3_automation_toolkit/DeveloperServices/ResourceManager/create_resource_manager_stack.py b/cd3_automation_toolkit/DeveloperServices/ResourceManager/create_resource_manager_stack.py
index 1377eab63..3de257380 100644
--- a/cd3_automation_toolkit/DeveloperServices/ResourceManager/create_resource_manager_stack.py
+++ b/cd3_automation_toolkit/DeveloperServices/ResourceManager/create_resource_manager_stack.py
@@ -119,12 +119,14 @@ def create_resource_manager(outdir,var_file, outdir_struct,prefix,auth_mechanism
rm_dir = region_dir + '/RM/'
- # 1. Copy all the TF files for specified regions to RM directory
+ # 1. Copy all the TF files for specified regions to RM directory. Also copy modules directory
try:
shutil.copytree(region_dir, rm_dir, ignore=shutil.ignore_patterns('*.terraform.lock.hcl','*.terraform','provider.tf','*.zip*','*.safe*','*.log*','*cis_report','*.csv*','*cd3validator', 'variables_*.tf*'))
+ shutil.copytree(outdir+"/modules", rm_dir)
except FileExistsError as fe:
shutil.rmtree(rm_dir)
shutil.copytree(region_dir, rm_dir, ignore=shutil.ignore_patterns('*.terraform.lock.hcl','*.terraform','provider.tf','*.zip*','*.safe*','*.log*','*cis_report','*.csv*','*cd3validator', 'variables_*.tf*'))
+ shutil.copytree(outdir+"/modules", rm_dir+"/modules")
#2. Change the provider.tf and variables_.tf to include just the region variable in all stacks for specified regions
tfStr[region]=''
@@ -286,6 +288,15 @@ def create_resource_manager(outdir,var_file, outdir_struct,prefix,auth_mechanism
zip_name = rm_name + ".zip"
# Fix for make_archive huge zip file issue - Ulag
file_paths = []
+ for file in os.listdir(rm_dir):
+ if ".tf" in file and "variables" not in file and "provider" not in file and 'backend' not in file:
+ with open(file, 'r') as tf_file:
+ module_data = tf_file.read().rstrip()
+ module_data = module_data.replace("\"../modules", "\"./modules")
+ f = open(file, "w+")
+ f.write(module_data)
+ f.close()
+
for root, directories, files in os.walk(rm_dir):
for filename in files:
rel_dir = os.path.relpath(root, rm_dir)
@@ -368,7 +379,7 @@ def create_resource_manager(outdir,var_file, outdir_struct,prefix,auth_mechanism
if os.path.exists(service_dir+"/"+ svc + ".tf"):
with open(service_dir+"/"+ svc + ".tf", 'r') as tf_file:
module_data = tf_file.read().rstrip()
- module_data = module_data.replace("\"../modules", "\"./modules")
+ module_data = module_data.replace("\"../../modules", "\"./modules")
if svc == 'rpc':
f = open(service_dir+"/"+ svc + "-temp.tf", "w+")
else:
diff --git a/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py b/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py
index c2a16a46a..f2a97d348 100644
--- a/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py
+++ b/cd3_automation_toolkit/Governance/Tagging/create_terraform_tags.py
@@ -231,7 +231,7 @@ def create_terraform_tags(inputfile, outdir, service_dir, prefix, ct):
else:
if str(df.loc[i, 'Validator']).strip() == '' or str(df.loc[i, 'Validator']).strip().lower() == 'nan':
is_required_updated = 'true' #Uncomment this if needed
- default_value = '-'
+ default_value = '[CANNOT_BE_EMPTY]'
columnvalue = key_tf_name+"="+default_compartment+"="+default_value+"="+is_required_updated #Uncomment this if needed
if columnvalue not in default_tags:
default_tags.append(columnvalue)
diff --git a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py
index d50a96eb8..2ed55133f 100644
--- a/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py
+++ b/cd3_automation_toolkit/Identity/Users/export_users_nonGreenField.py
@@ -112,9 +112,14 @@ def export_users(inputfile, outdir, service_dir, config, signer, ct,export_domai
domain_name = domain_key.split("@")[1]
domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer,
service_endpoint=idcs_endpoint)
- users = domain_client.list_users(limit=100000) # change this to pagination once api supports
+ list_users_response = domain_client.list_users() # change this to pagination once api supports
+ users = list_users_response.data.resources
+ while list_users_response.has_next_page:
+ list_users_response = domain_client.list_users(page=list_users_response.next_page)
+ users.extend(list_users_response.data.resources)
+
index = 0
- for user in users.data.resources:
+ for user in users:
defined_tags_info = user.urn_ietf_params_scim_schemas_oracle_idcs_extension_oci_tags
user_defined_tags = []
diff --git a/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py b/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py
index e0db64b21..d0a22e6e8 100644
--- a/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py
+++ b/cd3_automation_toolkit/Identity/export_identity_nonGreenField.py
@@ -359,17 +359,31 @@ def process_group(grp_info, members_list,membership_id_list, domain_name, is_dyn
domain_name = domain_key.split("@")[1]
domain_client = oci.identity_domains.IdentityDomainsClient(config=config, signer=signer,
service_endpoint=idcs_endpoint)
- groups = domain_client.list_groups(attributes=['members'], attribute_sets=['all'])
- dyngroups = domain_client.list_dynamic_resource_groups(attributes=['matching_rule'], attribute_sets=['all'])
+ list_groups_response = domain_client.list_groups(attributes=['members'], attribute_sets=['all'])
+ groups = list_groups_response.data.resources
+ while list_groups_response.has_next_page:
+ list_groups_response = domain_client.list_groups(attributes=['members'], attribute_sets=['all'],page=list_groups_response.next_page)
+ groups.extend(list_groups_response.data.resources)
- for grp_info in groups.data.resources:
+ for grp_info in groups:
if grp_info.display_name in ["Domain_Administrators", "All Domain Users", "Administrators"]:
continue
total_g +=1
members_list = [section.name for section in grp_info.members if section and section.name] if grp_info.members else []
importCommands, values_for_column_groups = process_group(grp_info, members_list,[], domain_name, is_dynamic=False, importCommands=importCommands, values_for_column_groups=values_for_column_groups)
- for dg in dyngroups.data.resources:
+ dyngroups_response = domain_client.list_dynamic_resource_groups(attributes=['matching_rule'],
+ attribute_sets=['all']
+ )
+ dyngroups = dyngroups_response.data.resources
+ while dyngroups_response.has_next_page:
+ dyngroups_response = domain_client.list_dynamic_resource_groups(attributes=['matching_rule'],
+ attribute_sets=['all'],
+ page=dyngroups_response.next_page
+ )
+ dyngroups.extend(dyngroups_response.data.resources)
+
+ for dg in dyngroups:
total_g += 1
importCommands, values_for_column_groups = process_group(dg, [],[], domain_name, is_dynamic=True, importCommands=importCommands, values_for_column_groups=values_for_column_groups)
else:
diff --git a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/create_terraform_events.py b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/create_terraform_events.py
index af4c9b4e9..b790488c7 100644
--- a/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/create_terraform_events.py
+++ b/cd3_automation_toolkit/ManagementServices/EventsAndNotifications/create_terraform_events.py
@@ -20,9 +20,9 @@ def extend_event(service_name, resources, listeventid):
event = [ "com.oraclecloud." + service_name + "." + resources ]
listeventid['eventType'].extend(event)
listeventid['eventType'] = list(dict.fromkeys(listeventid['eventType']))
- condition = json.dumps(listeventid)
+ condition = json.dumps(listeventid,separators=(',', ':'))
condition = condition.replace("\"" , "\\\"")
- condition = condition.replace(" " , "")
+ #condition = condition.replace(" " , "")
return (condition)
@@ -95,7 +95,7 @@ def create_terraform_events(inputfile, outdir, service_dir, prefix, ct):
# Dont strip for Description
columnvalue = str(df[columnname][i])
- if columnname == "Event Description":
+ if columnname in ["Event Description","Additional Data"]:
# Check for boolean/null in column values
columnvalue = commonTools.check_columnvalue(columnvalue)
@@ -213,8 +213,9 @@ def create_terraform_events(inputfile, outdir, service_dir, prefix, ct):
d["data"] = json.loads(data.replace("'", "\""))
else:
d["data"] = json.loads(data)
- condition = json.dumps(d)
- condition = condition.replace("\"" , "\\\"").replace("'", "\\\"").replace(" " , "")
+ condition = json.dumps(d,separators=(',', ':'))
+
+ condition = condition.replace("\"" , "\\\"").replace("'", "\\\"")
tempdict = {'condition' : condition}
tempStr.update(tempdict)
@@ -244,8 +245,8 @@ def create_terraform_events(inputfile, outdir, service_dir, prefix, ct):
d["data"] = json.loads(data.replace("'", "\""))
else:
d["data"] = json.loads(data)
- condition = json.dumps(d)
- condition = condition.replace("\"", "\\\"").replace("'", "\\\"").replace(" ", "")
+ condition = json.dumps(d,separators=(',', ':'))
+ condition = condition.replace("\"", "\\\"").replace("'", "\\\"")
tempdict = {'condition' : condition}
tempStr.update(tempdict)
diff --git a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/create_terraform_service_connectors.py b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/create_terraform_service_connectors.py
index 6a8f0b38d..af2741d6f 100644
--- a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/create_terraform_service_connectors.py
+++ b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/create_terraform_service_connectors.py
@@ -129,14 +129,14 @@ def create_service_connectors(inputfile, outdir, service_dir, prefix, ct):
if columnname == 'Target Stream Name':
target_stream_name = columnvalue.strip().split()
- target_stream_name = dict(subString.split("&") for subString in target_stream_name)
+ target_stream_name = dict(subString.split("@") for subString in target_stream_name)
target_stream_name = dict((commonTools.check_tf_variable(k), v) for k, v in target_stream_name.items())
target_stream_name = json.dumps(target_stream_name)
tempdict = {'target_stream_name': target_stream_name}
if columnname == 'Source Stream Name':
source_stream_name = columnvalue.strip().split()
- source_stream_name = dict(subString.split("&") for subString in source_stream_name)
+ source_stream_name = dict(subString.split("@") for subString in source_stream_name)
source_stream_name = dict((commonTools.check_tf_variable(k), v) for k, v in source_stream_name.items())
source_stream_name = json.dumps(source_stream_name)
tempdict = {'source_stream_name': source_stream_name}
@@ -148,7 +148,7 @@ def create_service_connectors(inputfile, outdir, service_dir, prefix, ct):
if columnname == 'Target Log Group Name':
target_log_group_name = columnvalue.strip().split()
- target_log_group_name = dict(subString.split("&") for subString in target_log_group_name)
+ target_log_group_name = dict(subString.split("@") for subString in target_log_group_name)
target_log_group_name = dict(
(commonTools.check_tf_variable(k), v) for k, v in target_log_group_name.items())
target_log_group_name = json.dumps(target_log_group_name)
@@ -162,15 +162,15 @@ def create_service_connectors(inputfile, outdir, service_dir, prefix, ct):
source_log_group_names = columnvalue.strip()
source_log_group_list = source_log_group_names.replace(" ", "").replace("::", "--").split(",")
for index, item in enumerate(source_log_group_list):
- if len(item.split("&")) == 2:
- source_log_group_list[index] = f"{item}&all"
+ if len(item.split("@")) == 2:
+ source_log_group_list[index] = f"{item}@all"
source_log_group_list = json.dumps(source_log_group_list)
tempdict = {'source_log_group_names': source_log_group_list}
if columnname == 'Target Topic Name':
target_topic_name = columnvalue.strip().split()
- target_topic_name = dict(subString.split("&") for subString in target_topic_name)
+ target_topic_name = dict(subString.split("@") for subString in target_topic_name)
target_topic_name = dict((commonTools.check_tf_variable(k), v) for k, v in target_topic_name.items())
target_topic_name = json.dumps(target_topic_name)
tempdict = {'target_topic_name': target_topic_name}
@@ -192,7 +192,7 @@ def create_service_connectors(inputfile, outdir, service_dir, prefix, ct):
if columnname == 'Target Monitoring Details' and columnvalue != "":
target_monitoring_details = columnvalue.strip().replace(" ", "")
- target_monitoring_details = dict(item.split("&") for item in target_monitoring_details.split(";"))
+ target_monitoring_details = dict(item.split("@") for item in target_monitoring_details.split(";"))
target_monitoring_details = {
json.dumps(key): '[' + ','.join(['"' + x + '"' for x in val[1:-1].split(',')]) + ']' for
key, val in target_monitoring_details.items()}
@@ -205,7 +205,7 @@ def create_service_connectors(inputfile, outdir, service_dir, prefix, ct):
if columnname == 'Source Monitoring Details' and columnvalue != "":
# loop through the columnvalue
monitoring_details = columnvalue.strip().replace(" ", "")
- monitoring_details = dict(item.split("&") for item in monitoring_details.split(";"))
+ monitoring_details = dict(item.split("@") for item in monitoring_details.split(";"))
# monitoring_details = dict((commonTools.check_tf_variable(k), v) for k, v in monitoring_details.items())
monitoring_details = {
json.dumps(key): '[' + ','.join(['"' + x + '"' for x in val[1:-1].split(',')]) + ']' for
diff --git a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py
index 9e99ba6e6..124b0a188 100755
--- a/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py
+++ b/cd3_automation_toolkit/ManagementServices/ServiceConnectorHub/export_sch_nonGreenField.py
@@ -57,12 +57,12 @@ def get_comp_details(comp_data):
if log.log_group_id == "_Audit":
log_group_name = "Audit"
comp_name = get_comp_details(log.compartment_id)
- log_source_list.append(f"{comp_name}&{log_group_name}&all")
+ log_source_list.append(f"{comp_name}@{log_group_name}@all")
elif log.log_group_id == "_Audit_Include_Subcompartment":
log_group_name = "Audit_In_Subcompartment"
comp_name = get_comp_details(log.compartment_id)
- log_source_list.append(f"{comp_name}&{log_group_name}&all")
+ log_source_list.append(f"{comp_name}@{log_group_name}@all")
else:
log_group_id = log.log_group_id
@@ -74,9 +74,9 @@ def get_comp_details(comp_data):
log_name = getattr(
log_client.get_log(log_group_id=log_group_id, log_id=log.log_id).data,
'display_name')
- log_source_list.append(f"{comp_name}&{log_group_name}&{log_name}")
+ log_source_list.append(f"{comp_name}@{log_group_name}@{log_name}")
else:
- log_source_list.append(f"{comp_name}&{log_group_name}&all")
+ log_source_list.append(f"{comp_name}@{log_group_name}@all")
except oci.exceptions.ServiceError as e:
print(f"Error retrieving log group details: {e}")
continue
@@ -96,7 +96,7 @@ def get_comp_details(comp_data):
source_stream_name = getattr(stream_client.get_stream(stream_id=source_stream_id).data, 'name')
source_comp_id = getattr(stream_client.get_stream(stream_id=source_stream_id).data, 'compartment_id')
source_stream_comp_name = get_comp_details(source_comp_id)
- source_stream_string = source_stream_comp_name + "&" + source_stream_name
+ source_stream_string = source_stream_comp_name + "@" + source_stream_name
else:
print(f"Error: 'stream_id' not found in source_data/deleted for connector {schs.display_name}.Skipping to the next SCH.")
continue
@@ -131,7 +131,7 @@ def get_comp_details(comp_data):
mon_ns_string = str(mon_namespace_dict).replace("'", "")
mon_ns_string = mon_ns_string.replace("{", "").replace("}", "").replace("],", "];")
- mon_ns_string = mon_ns_string.replace(":", "&").replace(" ", "")
+ mon_ns_string = mon_ns_string.replace(":", "@").replace(" ", "")
mon_ns_string = mon_ns_string.replace("&&", "::")
else:
# Print an error message and continue to the next SCH
@@ -146,7 +146,7 @@ def get_comp_details(comp_data):
metric_namespace = getattr(target_data, 'metric_namespace')
comp_id = getattr(target_data, 'compartment_id')
comp_name = get_comp_details(comp_id)
- target_mon_ns_string = f'{comp_name}&[{metric_name},{metric_namespace}]'
+ target_mon_ns_string = f'{comp_name}@[{metric_name},{metric_namespace}]'
else:
# Print an error message and continue to the next SCH
print(f"Error: 'metric' not found in target_data for connector {schs.display_name}.Skipping to the next SCH.")
@@ -167,7 +167,7 @@ def get_comp_details(comp_data):
target_log_group_name = getattr(dest_logs_compartment_details.data, 'display_name')
target_comp_id = getattr(dest_logs_compartment_details.data, 'compartment_id')
target_comp_name = get_comp_details(target_comp_id)
- target_la_string = target_comp_name + "&" + target_log_group_name
+ target_la_string = target_comp_name + "@" + target_log_group_name
else:
# Print an error message and continue to the next SCH
print(f"Error: 'log_group_id' not found in target_data for connector {schs.display_name}.Skipping to the next SCH.")
@@ -184,7 +184,7 @@ def get_comp_details(comp_data):
target_comp_id = getattr(stream_client.get_stream(stream_id=target_stream_id).data,
'compartment_id')
target_stream_comp_name = get_comp_details(target_comp_id)
- target_stream_string = target_stream_comp_name + "&" + target_stream_name
+ target_stream_string = target_stream_comp_name + "@" + target_stream_name
else:
# Print an error message and continue to the next SCH
print(f"Error: 'stream_id' not found in target_data for connector {schs.display_name}.Skipping to the next SCH.")
@@ -201,7 +201,7 @@ def get_comp_details(comp_data):
target_topic_comp_id = getattr(notification_client.get_topic(topic_id=target_topic_id).data,
'compartment_id')
target_topic_comp_name = get_comp_details(target_topic_comp_id)
- target_topic_string = target_topic_comp_name + "&" + target_topic_name
+ target_topic_string = target_topic_comp_name + "@" + target_topic_name
else:
print(f"Error: 'topic_id' not found in target_data for connector {schs.display_name}.Skipping to the next SCH.")
continue
diff --git a/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py b/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py
index f79a04dc9..4cde85d72 100644
--- a/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py
+++ b/cd3_automation_toolkit/Network/BaseNetwork/create_all_tf_objects.py
@@ -49,5 +49,5 @@ def create_all_tf_objects(inputfile, outdir, service_dir,prefix, ct, non_gf_tena
create_terraform_subnet_vlan(inputfile, outdir, service_dir, prefix, ct, non_gf_tenancy, network_vlan_in_setupoci,modify_network)
if non_gf_tenancy == False:
- print('\n\nMake sure to export all SecRules, RouteRules and DRG RouteRules to CD3. Use sub-options 3,4,5 under option 3(Network) of Main Menu for the same.')
+ print('\n\nMake sure to export all SecRules, RouteRules and DRG RouteRules to CD3. Use sub-options 3,4,5 under option 4(Network) of Main Menu for the same.')
diff --git a/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py b/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py
index 47fade8ac..a660589c9 100644
--- a/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py
+++ b/cd3_automation_toolkit/Network/BaseNetwork/create_major_objects.py
@@ -330,7 +330,7 @@ def create_drg_and_attachments(inputfile, outdir):
#if it is Auto Generated RT(during export) dont attach any RT to DRG attachment
if(columnvalue in commonTools.drg_auto_RTs):
drg_rt_tf_name = ''
- elif("ocid1.drgroutetable.oc1" in columnvalue):
+ elif("ocid1.drgroutetable.oc" in columnvalue):
drg_rt_tf_name = columnvalue
elif(columnvalue!=''):
drg_rt_tf_name = commonTools.check_tf_variable(drg_name + "_" + columnvalue)
diff --git a/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py b/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py
index 60762bab5..5d89ec0aa 100644
--- a/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py
+++ b/cd3_automation_toolkit/Network/BaseNetwork/modify_routerules_tf.py
@@ -166,7 +166,7 @@ def modify_terraform_drg_routerules(inputfile, outdir, service_dir,prefix, ct, n
if columnname == 'Next Hop Attachment':
dest_obj = columnvalue.strip()
if dest_obj != '':
- if("ocid1.drgattachment.oc1" in dest_obj):
+ if("ocid1.drgattachment.oc" in dest_obj):
dest_objs = str(dest_obj).strip().split(".")
if(len(dest_objs)==5):
dest_obj = dest_obj.strip()
diff --git a/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py b/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py
index 16b3d4aad..ae6bf8e0a 100644
--- a/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py
+++ b/cd3_automation_toolkit/Network/LoadBalancers/create_backendset_backendservers.py
@@ -118,7 +118,7 @@ def create_backendset_backendservers(inputfile, outdir, service_dir, prefix, ct)
if columnname == "Certificate Name or OCID":
if columnvalue != "":
- if 'ocid1.certificate.oc1' not in columnvalue:
+ if 'ocid1.certificate.oc' not in columnvalue:
certificate_tf_name = commonTools.check_tf_variable(columnvalue)+"_cert"
tempdict = {'certificate_tf_name': certificate_tf_name}
else:
diff --git a/cd3_automation_toolkit/Network/LoadBalancers/create_listener.py b/cd3_automation_toolkit/Network/LoadBalancers/create_listener.py
index 0f6ece5a7..5568b75e9 100644
--- a/cd3_automation_toolkit/Network/LoadBalancers/create_listener.py
+++ b/cd3_automation_toolkit/Network/LoadBalancers/create_listener.py
@@ -110,7 +110,7 @@ def create_listener(inputfile, outdir, service_dir, prefix, ct):
if columnname == "Certificate Name or OCID":
if columnvalue != '':
- if 'ocid1.certificate.oc1' not in columnvalue:
+ if 'ocid1.certificate.oc' not in columnvalue:
certificate_tf_name = commonTools.check_tf_variable(columnvalue)+"_cert"
tempdict = {'certificate_tf_name': certificate_tf_name}
else:
diff --git a/cd3_automation_toolkit/OCI_Regions b/cd3_automation_toolkit/OCI_Regions
index 7cecb1f0c..5f0fe2f79 100644
--- a/cd3_automation_toolkit/OCI_Regions
+++ b/cd3_automation_toolkit/OCI_Regions
@@ -1,14 +1,11 @@
#Region:Region_Key
-saltlake:us-saltlake-2
amsterdam:eu-amsterdam-1
stockholm:eu-stockholm-1
abudhabi:me-abudhabi-1
-saltlake:us-saltlake-1
bogota:sa-bogota-1
mumbai:ap-mumbai-1
paris:eu-paris-1
cardiff:uk-cardiff-1
-dallas:us-dallas-1
dubai:me-dubai-1
frankfurt:eu-frankfurt-1
saopaulo:sa-saopaulo-1
@@ -18,7 +15,6 @@ seoul:ap-seoul-1
jeddah:me-jeddah-1
johannesburg:af-johannesburg-1
osaka:ap-osaka-1
-kragujevac:eu-kragujevac-1
london:uk-london-1
milan:eu-milan-1
madrid:eu-madrid-1
@@ -27,7 +23,6 @@ marseille:eu-marseille-1
monterrey:mx-monterrey-1
jerusalem:il-jerusalem-1
tokyo:ap-tokyo-1
-neom:me-neom-1
chicago:us-chicago-1
phoenix:us-phoenix-1
queretaro:mx-queretaro-1
diff --git a/cd3_automation_toolkit/Release-Notes b/cd3_automation_toolkit/Release-Notes
index 591b2331c..f40f2fee1 100644
--- a/cd3_automation_toolkit/Release-Notes
+++ b/cd3_automation_toolkit/Release-Notes
@@ -1,3 +1,15 @@
+-------------------------------------
+CD3 Automation Toolkit Tag v2024.4.3
+Dec 27th, 2024
+-------------------------------------
+1. Bug fixes/Enhancements -
+ a. Added pagination for identity domain users/groups during export.
+ b. Removed duplication of modules directory - moved it outside to common terraform_files folder.
+ c. Modified 'ServiceConnectors' sheet to have consistent delimiters/separators as per other CD3 sheets.
+ d. Parameterization of Dockerfile to accept whether need to install jenkins.
+ e. Download of Terraform provider during image creation.
+ f. Few other minor bug fixes in python wrt block volume export, fetch compartments script and RM stack TF update to add route rule for DRG.
+
-------------------------------------
CD3 Automation Toolkit Tag v2024.4.2
Nov 8th, 2024
diff --git a/cd3_automation_toolkit/Security/Firewall/fw_create.py b/cd3_automation_toolkit/Security/Firewall/fw_create.py
index 37535024a..b1d178d07 100644
--- a/cd3_automation_toolkit/Security/Firewall/fw_create.py
+++ b/cd3_automation_toolkit/Security/Firewall/fw_create.py
@@ -120,7 +120,7 @@ def fw_create(inputfile, outdir, service_dir, prefix, ct):
subnet_tf_name = str(columnvalue).strip()
if subnet_tf_name == 'nan' or subnet_tf_name == '':
continue
- if ("ocid1.subnet.oc1" in subnet_tf_name):
+ if ("ocid1.subnet.oc" in subnet_tf_name):
vcn_name = ""
subnet_id = subnet_tf_name
else:
diff --git a/cd3_automation_toolkit/Storage/BlockVolume/create_terraform_block_volumes.py b/cd3_automation_toolkit/Storage/BlockVolume/create_terraform_block_volumes.py
index 75b683258..2d1f5c489 100644
--- a/cd3_automation_toolkit/Storage/BlockVolume/create_terraform_block_volumes.py
+++ b/cd3_automation_toolkit/Storage/BlockVolume/create_terraform_block_volumes.py
@@ -135,17 +135,17 @@ def create_terraform_block_volumes(inputfile, outdir, service_dir, prefix,ct):
columnvalue = int(float(columnvalue))
if columnname == "Source Details":
if columnvalue.strip() != '' and columnvalue.strip().lower() != 'nan':
- if "ocid1.volume.oc1" in columnvalue.strip():
+ if "ocid1.volume.oc" in columnvalue.strip():
ocid = columnvalue.strip()
type = "volume"
source_details.append(type)
source_details.append(ocid)
- elif "ocid1.volumebackup.oc1" in columnvalue.strip():
+ elif "ocid1.volumebackup.oc" in columnvalue.strip():
ocid = columnvalue.strip()
type = "volumeBackup"
source_details.append(type)
source_details.append(ocid)
- elif "ocid1.blockvolumereplica.oc1" in columnvalue.strip():
+ elif "ocid1.blockvolumereplica.oc" in columnvalue.strip():
ocid = columnvalue.strip()
type = "blockVolumeReplica"
source_details.append(type)
diff --git a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py
index 8bce5bbac..3364325ff 100644
--- a/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py
+++ b/cd3_automation_toolkit/Storage/BlockVolume/export_blockvolumes_nonGreenField.py
@@ -119,7 +119,8 @@ def print_blockvolumes(region, BVOLS, bvol, compute, ct, values_for_column, ntk_
source_id = blockvols.source_details.id
source_details = blockvols.source_details.type.strip() + "::" + commonTools.check_tf_variable(blockvols.display_name.strip())
tmp_key = region + "--" + source_id
- source_ocids[tmp_key] = commonTools.check_tf_variable(blockvols.display_name.strip())
+ #source_ocids[tmp_key] = commonTools.check_tf_variable(blockvols.display_name.strip())
+ source_ocids[commonTools.check_tf_variable(blockvols.display_name.strip())] = tmp_key
autotune_type = ''
max_vpus_per_gb = ''
if len(blockvols.autotune_policies) == 0:
@@ -263,9 +264,9 @@ def export_blockvolumes(inputfile, outdir, service_dir, config, signer, ct, expo
tempStrOcids = ""
for k, v in source_ocids.items():
- if k.split("--")[0].lower() == reg:
- k = "\"" + k.split("--")[1] + "\""
- tempStrOcids = "\t" + v + " = " + k + "\n" + tempStrOcids
+ if v.split("--")[0].lower() == reg:
+ v = "\"" + v.split("--")[1] + "\""
+ tempStrOcids = "\t" + k + " = " + v + "\n" + tempStrOcids
tempStrOcids = "\n" + tempStrOcids
tempStrOcids = "#START_blockvolume_source_ocids#" + tempStrOcids + "\t#blockvolume_source_ocids_END#"
var_data[reg] = re.sub('#START_blockvolume_source_ocids#.*?#blockvolume_source_ocids_END#', tempStrOcids,
diff --git a/cd3_automation_toolkit/cd3FirewallValidator.py b/cd3_automation_toolkit/cd3FirewallValidator.py
index c1d106cf4..69a6f3eff 100644
--- a/cd3_automation_toolkit/cd3FirewallValidator.py
+++ b/cd3_automation_toolkit/cd3FirewallValidator.py
@@ -24,7 +24,7 @@
config.__setitem__("region", ct.region_dict[region])
vnc = VirtualNetworkClient(config)
for comp_id in compartment_ids.values():
- if comp_id == 'ocid1.compartment.oc1..aaaaaaaaeifixpi24fbexwhnsohftxam34s5xwidb7wy23rsnxs5pg6qj5da':
+ if comp_id == 'ocid1.compartment.oc1..aaaaaaaaeifixpi24fbexw5xwidb7wy23rsnxs5pg6qj5da':
vcn_list = oci.pagination.list_call_get_all_results(vnc.list_vcns, compartment_id=comp_id, lifecycle_state="AVAILABLE")
for vcn in vcn_list.data:
# if(vcn.lifecycle_state == 'ACTIVE'):
diff --git a/cd3_automation_toolkit/cd3Validator.py b/cd3_automation_toolkit/cd3Validator.py
index 181fc840e..f8e83aada 100644
--- a/cd3_automation_toolkit/cd3Validator.py
+++ b/cd3_automation_toolkit/cd3Validator.py
@@ -106,10 +106,10 @@ def validate_cidr(cidr_list):
for i in range(0, len(cidr_list)):
try:
+ rowN = cidr_list[i][1]
ipaddress.ip_network(cidr_list[i][0])
- rowN= cidr_list[i][1]
except ValueError:
- log(f'Row {str(rowN)} Field "CIDR Block" {cidr_list[i]} is invalid. CIDR range has host bits set.')
+ log(f'Row {str(rowN)} Field "CIDR Block" {cidr_list[i]} is invalid. CIDR range has host bits set.')
cidr_check = True
for i in range(0, len(cidr_list)):
@@ -1791,7 +1791,7 @@ def validate_kms(filename,comp_ids):
elif replica_region == 'nan':
pass
elif replica_region != 'nan' and replica_region not in ct.all_regions:
- log(f'ROW {i + 3} : "Replica Region" {region} is either not subscribed to tenancy or toolkit is not yet configured to be used for this region')
+ log(f'ROW {i + 3} : "Replica Region" {replica_region} is either not subscribed to tenancy or toolkit is not yet configured to be used for this region')
kms_invalid_check = True
diff --git a/cd3_automation_toolkit/commonTools.py b/cd3_automation_toolkit/commonTools.py
index 1d87deca3..340298371 100644
--- a/cd3_automation_toolkit/commonTools.py
+++ b/cd3_automation_toolkit/commonTools.py
@@ -351,7 +351,7 @@ def get_compartment_map(self, var_file, resource_name):
soc = True
elif line.strip().startswith('#compartment_ocids_END#'):
soc = False
- else:
+ elif "=" in line:
line_items = str(line.strip()).split('=')
key = str(line_items[0]).strip()
value = str(line_items[1]).strip()
@@ -360,9 +360,9 @@ def get_compartment_map(self, var_file, resource_name):
self.ntk_compartment_ids[key.replace('--', '::')] = val
f.close()
- if len(var_ocids) == 0:
- print("Please make sure to execute the script for 'Fetch Compartments OCIDs to variables file' under 'CD3 Services' menu option first and re-run this.")
- exit(1)
+ #if len(var_ocids) == 0:
+ # print("Please make sure to execute the script for 'Fetch Compartments OCIDs to variables file' under 'CD3 Services' menu option first and re-run this.")
+ # exit(1)
except Exception as e:
print(str(e))
diff --git a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx
index 4c696d581..f8d1120e5 100644
Binary files a/cd3_automation_toolkit/example/CD3-Blank-template.xlsx and b/cd3_automation_toolkit/example/CD3-Blank-template.xlsx differ
diff --git a/cd3_automation_toolkit/example/CD3-CIS-ManagementServices-template.xlsx b/cd3_automation_toolkit/example/CD3-CIS-ManagementServices-template.xlsx
index c45cac0fc..c8770310a 100644
Binary files a/cd3_automation_toolkit/example/CD3-CIS-ManagementServices-template.xlsx and b/cd3_automation_toolkit/example/CD3-CIS-ManagementServices-template.xlsx differ
diff --git a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx
index bd4811999..3c5785729 100644
Binary files a/cd3_automation_toolkit/example/CD3-CIS-template.xlsx and b/cd3_automation_toolkit/example/CD3-CIS-template.xlsx differ
diff --git a/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx b/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx
index 079f5b078..8f85ae5f2 100644
Binary files a/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx and b/cd3_automation_toolkit/example/CD3-Firewall-template.xlsx differ
diff --git a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx
index 3f2c7d330..f3eff6fff 100644
Binary files a/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx and b/cd3_automation_toolkit/example/CD3-HubSpoke-template.xlsx differ
diff --git a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx
index bc062c4b0..f1b657dcd 100644
Binary files a/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx and b/cd3_automation_toolkit/example/CD3-SingleVCN-template.xlsx differ
diff --git a/cd3_automation_toolkit/setUpOCI.py b/cd3_automation_toolkit/setUpOCI.py
index 165c474da..318be7ae7 100644
--- a/cd3_automation_toolkit/setUpOCI.py
+++ b/cd3_automation_toolkit/setUpOCI.py
@@ -159,7 +159,7 @@ def get_region_list(rm,vizoci):
input_region_names = ct.reg_filter
else:
resource_name = 'OCI resources'
- region_list_str = "\nEnter region (comma separated without spaces if multiple) for which you want to export {}; Identity and Tags will be exported from Home Region.\nPress 'Enter' to export from all the subscribed regions- eg ashburn,phoenix: "
+ region_list_str = "\nEnter region (comma separated without spaces if multiple) for which you want to export {}; Identity and Tags will be exported from Home Region.\nPress 'Enter' to export from all the subscribed regions - eg ashburn,phoenix: "
input_region_names = input(region_list_str.format(resource_name))
elif rm == True and vizoci == False:
if devops:
diff --git a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py
index ddb11f5e1..85e6cd9a7 100644
--- a/cd3_automation_toolkit/user-scripts/createTenancyConfig.py
+++ b/cd3_automation_toolkit/user-scripts/createTenancyConfig.py
@@ -15,7 +15,6 @@
import configparser
import oci
from oci.object_storage import ObjectStorageClient
-import git
import glob
import subprocess
sys.path.append(os.getcwd()+"/..")
@@ -360,6 +359,7 @@ def create_bucket(config, signer):
# Initialize Toolkit Variables
user_dir = "/cd3user"
+
safe_file = user_dir + "/tenancies/createTenancyConfig.safe"
auto_keys_dir = user_dir + "/tenancies/keys"
toolkit_dir = os.path.dirname(os.path.abspath(__file__))+"/.."
@@ -472,6 +472,16 @@ def create_bucket(config, signer):
devops_user = config.get('Default', 'oci_devops_git_user').strip()
devops_user_key = config.get('Default', 'oci_devops_git_key').strip()
+ # Check if Jenkins was installed during image build
+ use_devops_docker = os.environ['USE_DEVOPS']
+ use_devops_docker=use_devops_docker.lower()
+
+ if use_devops_docker != use_devops:
+ use_devops = "no"
+
+ if use_devops == 'yes':
+ import git
+
if use_devops == 'yes' or remote_state == 'yes':
#Use remote state if using devops
remote_state='yes'
@@ -800,6 +810,8 @@ def create_bucket(config, signer):
if not os.path.exists(terraform_files):
os.makedirs(terraform_files)
+# Copy modules dir to terraform_files folder
+shutil.copytree(terraform_dir + "/modules", terraform_files + "/modules")
print("Creating Tenancy specific region directories, terraform provider , variables files.................")
regions_file_data = ""
@@ -866,7 +878,8 @@ def create_bucket(config, signer):
shutil.copytree(terraform_dir, terraform_files + "/" + region + "/", ignore=shutil.ignore_patterns("modules"))
'''
- shutil.copytree(terraform_dir, terraform_files + "/" + region + "/")
+ #shutil.copytree(terraform_dir, terraform_files + "/" + region + "/")
+ shutil.copytree(terraform_dir, terraform_files + "/" + region + "/", ignore=shutil.ignore_patterns("modules"))
#Prepare variables file
linux_image_id = ''
@@ -955,6 +968,8 @@ def create_bucket(config, signer):
with open(file, 'r+') as tf_file:
module_data = tf_file.read().rstrip()
module_data = module_data.replace("# depends_on", "depends_on")
+ module_data = module_data.replace("\"./modules", "\"../modules")
+
tf_file.close()
f = open(file, "w+")
f.write(module_data)
@@ -983,7 +998,7 @@ def create_bucket(config, signer):
shutil.move(region_dir + 'scripts',region_service_dir+'/')
with open(region_dir + service + ".tf", 'r+') as tf_file:
module_data = tf_file.read().rstrip()
- module_data = module_data.replace("\"./modules", "\"../modules")
+ module_data = module_data.replace("\"./modules", "\"../../modules")
f = open(region_service_dir + "/" + service + ".tf", "w+")
f.write(module_data)
diff --git a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties
index 824392e21..4c2ca9854 100644
--- a/cd3_automation_toolkit/user-scripts/tenancyconfig.properties
+++ b/cd3_automation_toolkit/user-scripts/tenancyconfig.properties
@@ -46,7 +46,6 @@ outdir_structure_file=/cd3user/oci_tools/cd3_automation_toolkit/user-scripts/out
# To use OpenTofu, specify tofu
tf_or_tofu=terraform
-
# SSH Key for launched instances; Use '\n' as the delimiter to add multiple ssh keys.
# Example: ssh-rsa AAXXX......yhdlo\nssh-rsa AAxxskj...edfwf
# Optional
diff --git a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf
index dfeddcb58..35ca80eda 100644
--- a/cd3_automation_toolkit/user-scripts/terraform/firewall.tf
+++ b/cd3_automation_toolkit/user-scripts/terraform/firewall.tf
@@ -18,7 +18,7 @@ module "firewalls" {
for_each = var.firewalls != null ? var.firewalls : {}
depends_on = [module.policies, module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists, module.decryption_profiles, module.secrets, module.security_rules, module.decryption_rules]
compartment_id = each.value.compartment_id != null ? (length(regexall("ocid1.compartment.oc*", each.value.compartment_id)) > 0 ? each.value.compartment_id : var.compartment_ocids[each.value.compartment_id]) : var.compartment_ocids[each.value.compartment_id]
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
subnet_id = each.value.subnet_id != "" ? (length(regexall("ocid1.subnet.oc*", each.value.subnet_id)) > 0 ? each.value.subnet_id : data.oci_core_subnets.firewall_subnets[each.key].subnets.*.id[0]) : null
display_name = each.value.display_name
ipv4address = each.value.ipv4address
@@ -45,7 +45,7 @@ module "services" {
for_each = var.services != null ? var.services : {}
depends_on = [module.policies]
service_name = each.value.service_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
service_type = each.value.service_type
port_ranges = each.value.port_ranges
}
@@ -55,7 +55,7 @@ module "service_lists" {
for_each = var.service_lists != null ? var.service_lists : {}
depends_on = [module.services, module.policies]
service_list_name = each.value.service_list_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
services = each.value.services != null ? flatten(tolist([for sid in each.value.services : (length(regexall("ocid1.networkfirewallpolicy.oc*", sid)) > 0 ? merge(module.services.*...)[sid]["service+_tf_id"] : [sid])])) : null
}
@@ -64,7 +64,7 @@ module "address_lists" {
for_each = var.address_lists != null ? var.address_lists : {}
depends_on = [module.policies]
address_list_name = each.value.address_list_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
address_type = each.value.address_type
addresses = each.value.addresses
}
@@ -75,7 +75,7 @@ module "applications" {
depends_on = [module.policies]
icmp_type = each.value.icmp_type
app_list_name = each.value.app_list_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
app_type = each.value.app_type
icmp_code = each.value.icmp_code
}
@@ -85,7 +85,7 @@ module "application_groups" {
for_each = var.application_groups != null ? var.application_groups : {}
depends_on = [module.policies, module.applications]
app_group_name = each.value.app_group_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
apps = each.value.apps != null ? flatten(tolist([for app in each.value.apps : (length(regexall("ocid1.networkfirewallpolicy.oc*", app)) > 0 ? merge(module.applications.*...)[app]["application_tf_id"] : [app])])) : null
}
@@ -94,7 +94,7 @@ module "url_lists" {
for_each = var.url_lists != null ? var.url_lists : {}
depends_on = [module.policies]
urllist_name = each.value.urllist_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
#key_name = each.key
urls_details = each.value.urls
}
@@ -105,7 +105,7 @@ module "security_rules" {
depends_on = [module.policies, module.address_lists, module.application_groups, module.applications, module.services, module.service_lists, module.url_lists]
action = each.value.action
rule_name = each.value.rule_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
application = each.value.condition[0].application != null ? each.value.condition[0].application : []
url = each.value.condition[0].url != null ? each.value.condition[0].url : []
service = each.value.condition[0].service != null ? each.value.condition[0].service : []
@@ -126,7 +126,7 @@ module "secrets" {
for_each = var.secrets != null || var.secrets != {} ? var.secrets : {}
depends_on = [module.policies]
secret_name = each.value.secret_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
secret_source = each.value.secret_source
secret_type = each.value.secret_type
vault_secret_id = each.value.vault_secret_id
@@ -140,7 +140,7 @@ module "decryption_profiles" {
for_each = var.decryption_profiles != null || var.decryption_profiles != {} ? var.decryption_profiles : {}
depends_on = [module.policies, module.secrets]
profile_name = each.value.profile_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
profile_type = each.value.profile_type
are_certificate_extensions_restricted = each.value.are_certificate_extensions_restricted
is_auto_include_alt_name = each.value.is_auto_include_alt_name
@@ -159,7 +159,7 @@ module "decryption_rules" {
depends_on = [module.policies, module.decryption_profiles, module.secrets, module.address_lists]
action = each.value.action
rule_name = each.value.rule_name
- network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc1.*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
+ network_firewall_policy_id = length(regexall("ocid1.networkfirewallpolicy.oc*", each.value.network_firewall_policy_id)) > 0 ? each.value.network_firewall_policy_id : merge(module.policies.*...)[each.value.network_firewall_policy_id]["policy_tf_id"]
source_address = each.value.condition[0].source_address != null ? each.value.condition[0].source_address : []
destination_address = each.value.condition[0].destination_address != null ? each.value.condition[0].destination_address : []
after_rule = each.value.after_rule
diff --git a/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf b/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf
index 19a9a78d2..3e464c126 100644
--- a/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf
+++ b/cd3_automation_toolkit/user-scripts/terraform/loadbalancer.tf
@@ -350,33 +350,3 @@ module "lbr-reserved-ips" {
#private_ip_id = each.value.private_ip_id != null ? (length(regexall("ocid1.privateip.oc*", each.value.private_ip_id)) > 0 ? each.value.private_ip_id : (length(regexall("\\.", each.value.private_ip_id)) == 3 ? local.private_ip_id[0][each.value.private_ip_id] : merge(module.private-ips.*...)[each.value.private_ip_id].private_ip_tf_id)) : null
#public_ip_pool_id = each.value.public_ip_pool_id != null ? (length(regexall("ocid1.publicippool.oc*", each.value.public_ip_pool_id)) > 0 ? each.value.public_ip_pool_id : merge(module.public-ip-pools.*...)[each.value.public_ip_pool_id].public_ip_pool_tf_id) : null
}
-
-/*
-resource "oci_load_balancer_load_balancer_routing_policy" "load_balancer_routing_policy" {
-
- #Required
- condition_language_version = "V1"
- load_balancer_id = "ocid1.loadbalancer.oc1.uk-london-1.aaaaaaaa26pp3ygxyycgrmi2f3wuwmgntltotctwvmi4kr6bcbvwo7t5j2va"
- name = "RP01"
- rules {
- #Required
- actions {
- #Required
- name = "FORWARD_TO_BACKENDSET"
-
- #Optional
- backend_set_name = "bset01"
- }
- condition = "all(http.request.url.path eq (i 'test'), http.request.url.query[(i 'key01')] eq (i 'value01'), all(http.request.url.path eq (i 'testonly')))"
- name = "rule01"
- }
-
- rules {
- actions {
- backend_set_name = "bset01"
- name = "FORWARD_TO_BACKENDSET"
- }
- condition = "any(http.request.url.path eq (i 'gh'))"
- name = "rule02"
- }
-}*/
\ No newline at end of file
diff --git a/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf b/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf
index 5f5bf3799..3347b3532 100755
--- a/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf
+++ b/cd3_automation_toolkit/user-scripts/terraform/managementservices.tf
@@ -118,7 +118,7 @@ module "service-connectors" {
logs_compartment_id = var.tenancy_ocid
source_monitoring_details = each.value.source_details.source_kind == "monitoring" ? { for k, v in each.value.source_details.source_monitoring_details : lookup(var.compartment_ocids, k, "not_found") => v } : {}
target_monitoring_details = each.value.target_details.target_kind == "monitoring" ? { for k, v in each.value.target_details.target_monitoring_details : lookup(var.compartment_ocids, k, "not_found") => v } : {}
- log_group_names = each.value.source_details.source_kind == "logging" ? flatten([for key in each.value.source_details.source_log_group_names : join("&", tolist([lookup(var.compartment_ocids, split("&", key)[0], "null"), split("&", key)[1], split("&", key)[2]]))]) : []
+ log_group_names = each.value.source_details.source_kind == "logging" ? flatten([for key in each.value.source_details.source_log_group_names : join("@", tolist([lookup(var.compartment_ocids, split("@", key)[0], "null"), split("@", key)[1], split("@", key)[2]]))]) : []
display_name = each.value.display_name
description = each.value.description
source_kind = each.value.source_details.source_kind
diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/data.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/data.tf
index b719e5a2c..f876b3934 100755
--- a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/data.tf
+++ b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/data.tf
@@ -9,7 +9,7 @@
locals {
log_group_names = var.log_group_names
source_kind = var.source_kind
- filtered_logs = [for item in var.log_group_names : item if split("&", item)[2] != "all"]
+ filtered_logs = [for item in var.log_group_names : item if split("@", item)[2] != "all"]
}
data "oci_objectstorage_namespace" "os_namespace" {
@@ -37,13 +37,13 @@ data "oci_ons_notification_topics" "target_topics" {
}
data "oci_logging_log_groups" "source_log_groups" {
for_each = toset(var.log_group_names)
- compartment_id = split("&", each.key)[0]
- display_name = split("&", each.key)[1]
+ compartment_id = split("@", each.key)[0]
+ display_name = split("@", each.key)[1]
}
data "oci_logging_logs" "source_logs" {
for_each = toset(local.filtered_logs)
log_group_id = data.oci_logging_log_groups.source_log_groups[each.key].log_groups[0].id
- display_name = split("&", each.key)[2]
+ display_name = split("@", each.key)[2]
}
data "oci_log_analytics_log_analytics_log_groups" "target_log_analytics_log_groups" {
for_each = var.destination_log_group_id
diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/main.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/main.tf
index 15cfbd7a4..bea14e2c2 100755
--- a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/main.tf
+++ b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/main.tf
@@ -18,7 +18,7 @@ resource "oci_sch_service_connector" "service_connector" {
for_each = var.source_monitoring_details
content {
#Optional
- compartment_id = split("&", monitoring_sources.key)[0]
+ compartment_id = split("@", monitoring_sources.key)[0]
namespace_details {
#Required
kind = "selected"
@@ -40,9 +40,9 @@ resource "oci_sch_service_connector" "service_connector" {
dynamic "log_sources" {
for_each = toset(var.log_group_names)
content {
- compartment_id = split("&", log_sources.key)[0]
- log_group_id = length(regexall("Audit", split("&", log_sources.key)[1])) > 0 ? (length(regexall("Audit_In_Subcompartment", split("&", log_sources.key)[1])) > 0 ? "_Audit_Include_Subcompartment" : "_Audit") : data.oci_logging_log_groups.source_log_groups[log_sources.key].log_groups[0].id
- log_id = lower(split("&", log_sources.key)[2]) == "all" ? null : data.oci_logging_logs.source_logs[log_sources.key].logs[0].id
+ compartment_id = split("@", log_sources.key)[0]
+ log_group_id = length(regexall("Audit", split("@", log_sources.key)[1])) > 0 ? (length(regexall("Audit_In_Subcompartment", split("@", log_sources.key)[1])) > 0 ? "_Audit_Include_Subcompartment" : "_Audit") : data.oci_logging_log_groups.source_log_groups[log_sources.key].log_groups[0].id
+ log_id = lower(split("@", log_sources.key)[2]) == "all" ? null : data.oci_logging_logs.source_logs[log_sources.key].logs[0].id
}
}
stream_id = var.source_kind == "streaming" ? data.oci_streaming_streams.source_streams[one(keys(var.source_stream_id))].streams[0].id : null
diff --git a/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/oracle_provider_req.tf b/cd3_automation_toolkit/user-scripts/terraform/modules/managementservices/service-connector/oracle_provider_req.tf
old mode 100644
new mode 100755