${scriptPath}
false
@@ -55,37 +55,60 @@ def createJobXml(scriptPath, gitUrl) {
"""
}
-// Function to create Jenkins job
-def createJob(parent, jobName, xml) {
- def jobXmlStream = new ByteArrayInputStream(xml.getBytes())
- parent.createProjectFromXML(jobName, jobXmlStream)
+// Function to create Jenkins job if it does not exist
+def createJobIfNotExists(parent, jobName, xml) {
+ def job = parent.getItem(jobName)
+ if (job == null) {
+ def jobXmlStream = new ByteArrayInputStream(xml.getBytes())
+ parent.createProjectFromXML(jobName, jobXmlStream)
+ } else {
+ println "Job already exists: ${jobName}"
+ }
}
-// Create jobs for each configuration
-jenkins.with {
- Folder ost = getItem("terraform_files") ?: createProject(Folder.class, "terraform_files")
+// Create jobs for each profile
+profiles.each { profileName, profile ->
+ // Create profile folder
+ Folder profileFolder = jenkins.getItem(profileName) ?: jenkins.createProject(Folder.class, profileName)
+ Folder tfFolder = profileFolder.getItem("terraform_files") ?: profileFolder.createProject(Folder.class, "terraform_files")
- for (os in outdir_structure) {
- Folder global = ost.getItem("global") ?: ost.createProject(Folder.class, "global")
- Folder rpc = global.getItem("rpc") ?: global.createProject(Folder.class, "rpc")
+ // Create global and rpc folders
+ Folder globalFolder = tfFolder.getItem("global") ?: tfFolder.createProject(Folder.class, "global")
+ Folder rpcFolder = globalFolder.getItem("rpc") ?: globalFolder.createProject(Folder.class, "rpc")
- createJob(rpc, tfApplyJobName, createJobXml('tf-apply.groovy', git_url))
- createJob(rpc, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url))
- for (reg in regions) {
- Folder folder = ost.getItem(reg) ?: ost.createProject(Folder.class, reg)
+ // Create jobs in rpc folder
+ createJobIfNotExists(rpcFolder, "apply", createJobXml('apply.groovy', profile.git_url, profile.tf_or_tofu))
+ createJobIfNotExists(rpcFolder, "destroy", createJobXml('destroy.groovy', profile.git_url, profile.tf_or_tofu))
- if (os == "Single_Outdir") {
- createJob(folder, tfApplyJobName, createJobXml('tf-apply.groovy', git_url))
- createJob(folder, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url))
- }
+ profile.regions.each { region ->
+ Folder regionFolder = tfFolder.getItem(region) ?: tfFolder.createProject(Folder.class, region)
- if (os == "Multiple_Outdir" && services) {
- for (svc in services) {
- Folder svcFolder = folder.getItem(svc) ?: folder.createProject(Folder.class, svc)
- createJob(svcFolder, tfApplyJobName, createJobXml('tf-apply.groovy', git_url))
- createJob(svcFolder, tfDestroyJobName, createJobXml('tf-destroy.groovy', git_url))
+ if (profile.outdir_structure.contains("Multiple_Outdir") && profile.services) {
+ profile.services.each { service ->
+ Folder serviceFolder = regionFolder.getItem(service) ?: regionFolder.createProject(Folder.class, service)
+
+ createJobIfNotExists(serviceFolder, "apply", createJobXml('apply.groovy', profile.git_url, profile.tf_or_tofu))
+ createJobIfNotExists(serviceFolder, "destroy", createJobXml('destroy.groovy', profile.git_url, profile.tf_or_tofu))
}
+ } else {
+ createJobIfNotExists(regionFolder, "apply", createJobXml('apply.groovy', profile.git_url, profile.tf_or_tofu))
+ createJobIfNotExists(regionFolder, "destroy", createJobXml('destroy.groovy', profile.git_url, profile.tf_or_tofu))
}
}
+ // Move setupoci directory to the correct location. Default is not picked up in UI.
+ def setupociSrcPath = "$JENKINS_HOME/jobs/${profileName}/setupoci"
+ def setupociDestPath = "$JENKINS_HOME/jobs/${profileName}/jobs/setupoci"
+
+ def setupociSrcDir = new File(setupociSrcPath)
+ def setupociDestDir = new File(setupociDestPath)
+
+ if (setupociSrcDir.exists()) {
+ Files.move(setupociSrcDir.toPath(), setupociDestDir.toPath(), StandardCopyOption.REPLACE_EXISTING)
+ // println "Moved directory from ${setupociSrcDir} to ${setupociDestDir}"
+ }
}
-}
+
+// Reload Jenkins configuration
+Jenkins.instance.reload()
+println "Jenkins configuration reloaded."
+
diff --git a/jenkins_install/init/02_jenkins-view.groovy b/jenkins_install/init/02_jenkins-view.groovy
old mode 100755
new mode 100644
index edec49a67..78b10e758
--- a/jenkins_install/init/02_jenkins-view.groovy
+++ b/jenkins_install/init/02_jenkins-view.groovy
@@ -1,5 +1,9 @@
import jenkins.model.Jenkins
+import hudson.model.ListView
+import hudson.model.ViewGroup
+import com.cloudbees.hudson.plugins.folder.Folder
+// Function to create views for each region within profile directories
def createRegionViews() {
def jenkinsInstance = Jenkins.getInstance()
if (jenkinsInstance == null) {
@@ -7,48 +11,81 @@ def createRegionViews() {
return
}
- def parentPath = "terraform_files"
- def parent = jenkinsInstance.getItemByFullName(parentPath)
+ // Read the properties file
+ def JENKINS_HOME = System.getenv("JENKINS_HOME")
+ File file = new File("$JENKINS_HOME/jenkins.properties")
- if (parent != null && parent instanceof hudson.model.ViewGroup) {
- parent.items.each { regionFolder ->
- def viewName = regionFolder.name
- def view = jenkinsInstance.getView(viewName)
+ // Parse the properties file into profiles
+ def profiles = [:]
+ def currentProfile = ""
+ file.eachLine { line ->
+ if (line.startsWith('[')) {
+ currentProfile = line.replace('[', '').replace(']', '').trim()
+ profiles[currentProfile] = [:]
+ } else if (line.contains('=')) {
+ def parts = line.split('=')
+ profiles[currentProfile][parts[0].trim()] = Eval.me(parts[1].trim())
+ }
+ }
- if (view == null) {
- view = new hudson.model.ListView(viewName, jenkinsInstance)
- jenkinsInstance.addView(view)
- }
+ // Create views for each profile
+ profiles.each { profileName, profile ->
+ def profileFolder = jenkinsInstance.getItem(profileName)
+ if (profileFolder != null && profileFolder instanceof ViewGroup) {
+ profile.regions.each { region ->
+ def viewName = region
+ def view = profileFolder.getView(viewName)
- // Clear the view to remove any existing jobs
- view.items.clear()
+ if (view == null) {
+ println("Creating view: $viewName in profile: $profileName")
+ def newView = new ListView(viewName)
+ profileFolder.addView(newView)
+ newView.save()
+ println("View '$viewName' created successfully in profile '$profileName'.")
+ view = newView
+ } else {
+ println("View '$viewName' already exists in profile '$profileName'.")
+ }
- // Add jobs to the view
- addJobsToView(view, regionFolder)
+ // Clear the view to remove any existing jobs
+ view.items.clear()
- // Set the "Recurse in folders" option
- view.setRecurse(true)
+ // Navigate through the structure to find jobs
+ def terraformFilesFolder = profileFolder.getItem('terraform_files')
+ if (terraformFilesFolder instanceof ViewGroup) {
+ def regionFolder = terraformFilesFolder.getItem(region)
+ if (regionFolder instanceof ViewGroup) {
+ regionFolder.items.each { serviceFolder ->
+ if (serviceFolder instanceof ViewGroup) {
+ addJobsToView(view, serviceFolder)
+ }
+ }
+ }
+ }
- // Save the view configuration
- view.save()
+ // Set the "Recurse in folders" option
+ view.setRecurse(true)
- println("View '$viewName' created successfully.")
+ // Save the view configuration
+ view.save()
+ }
+ } else {
+ println("Profile folder not found: $profileName")
}
- } else {
- println("Parent folder not found: $parentPath")
}
}
-def addJobsToView(hudson.model.ListView view, hudson.model.ViewGroup folder) {
+// Function to add jobs to view
+def addJobsToView(ListView view, ViewGroup folder) {
folder.items.each { item ->
if (item instanceof hudson.model.Job) {
view.add(item)
- } else if (item instanceof hudson.model.ViewGroup) {
+ } else if (item instanceof ViewGroup) {
// Recursively add jobs from sub-folders
addJobsToView(view, item)
}
}
}
-// function to create region views
+// Function to create region views
createRegionViews()
\ No newline at end of file
diff --git a/jenkins_install/jcasc.yaml b/jenkins_install/jcasc.yaml
index a7f8290dc..813e4f224 100644
--- a/jenkins_install/jcasc.yaml
+++ b/jenkins_install/jcasc.yaml
@@ -47,6 +47,7 @@ security:
- "method groovy.lang.GroovyObject invokeMethod java.lang.String java.lang.Object"
- "new java.io.File java.lang.String"
- "staticMethod org.codehaus.groovy.runtime.ResourceGroovyMethods readLines java.io.File"
+ - "method java.io.File listFiles"
unclassified:
buildDiscarders:
configuredBuildDiscarders:
diff --git a/jenkins_install/jenkins.sh b/jenkins_install/jenkins.sh
index 93b4c79fb..ef51505d4 100644
--- a/jenkins_install/jenkins.sh
+++ b/jenkins_install/jenkins.sh
@@ -11,13 +11,53 @@ if [ ! -d "$JENKINS_HOME" ]; then
exit
fi
-# Copy Required files to JENKINS_HOME
+## Copy Required files to JENKINS_HOME
+#cp ${JENKINS_INSTALL}/jcasc.yaml "$JENKINS_HOME/"
+#if [ ! -d "$JENKINS_HOME/jobs/setUpOCI" ]; then
+# mkdir -p "$JENKINS_HOME/jobs/setUpOCI"
+#fi
+#cp ${JENKINS_INSTALL}/setUpOCI_config.xml "$JENKINS_HOME/jobs/setUpOCI/config.xml"
+#cp -r ${JENKINS_INSTALL}/scriptler $JENKINS_HOME
+
cp ${JENKINS_INSTALL}/jcasc.yaml "$JENKINS_HOME/"
-if [ ! -d "$JENKINS_HOME/jobs/setUpOCI" ]; then
- mkdir -p "$JENKINS_HOME/jobs/setUpOCI"
-fi
-cp ${JENKINS_INSTALL}/setUpOCI_config.xml "$JENKINS_HOME/jobs/setUpOCI/config.xml"
-cp -r ${JENKINS_INSTALL}/scriptler $JENKINS_HOME
+
+# Read profiles from jenkins.properties only
+declare -A profiles
+current_profile=""
+while IFS= read -r line; do
+ if [[ "$line" =~ ^\[.*\]$ ]]; then
+ current_profile=$(echo "$line" | tr -d '[]' | xargs -0)
+ profiles["$current_profile"]=""
+ echo "Processing profile: $current_profile" # Debug line
+ elif [[ "$line" == *=* ]]; then
+ key=$(echo "$line" | cut -d'=' -f1 | xargs -0)
+ value=$(echo "$line" | cut -d'=' -f2- | xargs -0)
+ profiles["$current_profile"]+="$key='$value' "
+ fi
+done < "$JENKINS_HOME/jenkins.properties"
+
+# Create setupoci job inside each profile folder
+for profile_name in "${!profiles[@]}"; do
+ # Remove any brackets or whitespace from profile_name
+ profile_folder_path="$JENKINS_HOME/jobs/${profile_name}"
+ setupoci_job_dest="$profile_folder_path/setupoci"
+
+ # Create profile and setupoci directories if they don't exist
+ mkdir -p "$setupoci_job_dest"
+ echo "Creating directory: $setupoci_job_dest" # Debug line
+
+ # Copy setupoci config
+ cp "${JENKINS_INSTALL}/setUpOCI_config.xml" "$setupoci_job_dest/config.xml"
+ echo "Copied setUpOCI_config.xml to $setupoci_job_dest/config.xml" # Debug line
+done
+
+# Copy scriptler directory
+cp -r "${JENKINS_INSTALL}/scriptler" "$JENKINS_HOME"
+echo "Copied scriptler directory to $JENKINS_HOME" # Debug line
+
+echo "SetupOCI jobs created for profiles."
+
+
#Generate Self Signed Cert and Copy to JENKINS_HOME
keytool -genkey -keystore "$JENKINS_INSTALL/oci_toolkit.jks" -alias "automationtoolkit" -keyalg RSA -validity 60 -keysize 2048 -dname "CN=oci-automation, OU=toolkit, C=IN" -ext SAN=dns:automationtoolkit,ip:127.0.0.1 -storepass automationtoolkit && keytool -importkeystore -srckeystore "$JENKINS_INSTALL/oci_toolkit.jks" -srcstoretype JKS -deststoretype PKCS12 -destkeystore "$JENKINS_HOME/oci_toolkit.p12" -srcstorepass automationtoolkit -deststorepass automationtoolkit -noprompt
@@ -58,4 +98,4 @@ if [[ $# -lt 1 ]] || [[ "$1" == "--"* ]]; then
fi
# As argument is not jenkins, assume user want to run his own process, for example a `bash` shell to explore this image
-exec "$@"
+exec "$@"
\ No newline at end of file
diff --git a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy
index db01fc1a8..1642d8255 100644
--- a/jenkins_install/scriptler/scripts/AdditionalFilters.groovy
+++ b/jenkins_install/scriptler/scripts/AdditionalFilters.groovy
@@ -31,7 +31,19 @@ html_to_be_rendered = """
"""
}
+domain_filter_val = "Unset"
for (item in SubOptions.split(",")) {
+ if ((item in ["Export Groups","Export Users"]) && (domain_filter_val.equals("Unset"))) {
+ html_to_be_rendered = """
+ ${html_to_be_rendered}
+
+ |
+ |
+ (Enter 'all' to export from all domains OR leave it Blank to export from default domain ) |
+ |
|
|
+ """
+ domain_filter_val = "Set"
+ }
if (item.equals("Export Instances (excludes instances launched by OKE)")) {
html_to_be_rendered = """
${html_to_be_rendered}
@@ -161,41 +173,6 @@ for (item in SubOptions.split(",")) {
"""
}
- if (item.equals('Create Key/Vault')){
- html_to_be_rendered = """
- ${html_to_be_rendered}
-
- |
- |
- |
- |
-
|
|
-
- |
- |
- |
- |
-
|
|
- """
- }
- if (item.equals('Create Default Budget')){
- html_to_be_rendered = """
- ${html_to_be_rendered}
-
- |
- |
- |
- |
-
|
|
-
- |
- |
- |
- |
-
|
|
- """
- }
-
if (item.equals('Enable Cloud Guard')){
html_to_be_rendered = """
${html_to_be_rendered}
@@ -225,7 +202,53 @@ for (item in SubChildOptions.split(",")) {
|
|
"""
}
- break
+ if (item.equals("Export DR Plan")) {
+ html_to_be_rendered = """
+ ${html_to_be_rendered}
+
+
+ |
+ |
+ (Default is 'prefix_oci-fsdr-plan.xlsx at /cd3user/tenancies//othertools_files') |
+ |
+
|
|
+
+ |
+ |
+ (Default is 'FSDR-Plan') |
+ |
+
|
|
+
+ |
+ |
+ (Mandatory) |
+ |
+
|
|
+ """
+ }
+
+ if (item.equals("Update DR Plan")) {
+ html_to_be_rendered = """
+ ${html_to_be_rendered}
+
+ |
+ |
+ (Default is 'prefix_oci-fsdr-plan.xlsx at /cd3user/tenancies//othertools_files) |
+ |
|
|
+
+ |
+ |
+ (Default is 'FSDR-Plan' if left empty) |
+ |
|
|
+
+ |
+ |
+ (Mandatory) |
+ |
|
|
+
+ """
+ }
+
}
html_to_be_rendered = "${html_to_be_rendered} "
diff --git a/jenkins_install/scriptler/scripts/MainOptions.groovy b/jenkins_install/scriptler/scripts/MainOptions.groovy
index 7a31488b8..51dfadbe3 100644
--- a/jenkins_install/scriptler/scripts/MainOptions.groovy
+++ b/jenkins_install/scriptler/scripts/MainOptions.groovy
@@ -17,7 +17,7 @@ return[
"Logging Services",
"Software-Defined Data Centers - OCVS",
"CD3 Services",
-"3rd Party Services"
+"Other OCI Tools"
]
}
else if(Workflow.toLowerCase().contains("export")) {
diff --git a/jenkins_install/scriptler/scripts/SubChildOptions.groovy b/jenkins_install/scriptler/scripts/SubChildOptions.groovy
index 8491ef67c..6c7671ea2 100644
--- a/jenkins_install/scriptler/scripts/SubChildOptions.groovy
+++ b/jenkins_install/scriptler/scripts/SubChildOptions.groovy
@@ -5,6 +5,7 @@ List drg_route_rules = ["DRG ROUTE RULES:disabled","Export DRG Route Rule
List nsg = ["NSGs:disabled","Export NSGs (From OCI into NSGs sheet)", "Add/Modify/Delete NSGs (Reads NSGs sheet)"]
List cis = ["CIS:disabled","Download latest compliance checking script", "Execute compliance checking script"]
List showoci = ["SHOW OCI:disabled","Download Latest ShowOCI Script", "Execute ShowOCI Script"]
+List ocifsdr = ["OCI FSDR:disabled","Export DR Plan", "Update DR Plan"]
List customer_connectivity = ["Connectivity:disabled","Create Remote Peering Connections"]
List final_list = []
@@ -27,10 +28,13 @@ for (item in SubOptions.split(",")) {
if (item.equals("ShowOCI Report")){
final_list += showoci
}
- if (item.equals("Add/Modify/Delete Firewall Policy")){
+ if (item.equals("OCI FSDR")){
+ final_list += ocifsdr
+ }
+ if (item.equals("Add/Modify/Delete Firewall Policy")){
final_list += firewall_policy
}
- if (item.equals("Customer Connectivity")){
+ if (item.equals("Customer Connectivity")){
final_list += customer_connectivity
}
}
diff --git a/jenkins_install/scriptler/scripts/SubOptions.groovy b/jenkins_install/scriptler/scripts/SubOptions.groovy
index 60dd92fff..a5a67f9c8 100644
--- a/jenkins_install/scriptler/scripts/SubOptions.groovy
+++ b/jenkins_install/scriptler/scripts/SubOptions.groovy
@@ -14,7 +14,7 @@ List developer_services = ["DEVELOPER SERVICES:disabled","Add/Modify/Dele
List security = ["SECURITY:disabled","Add/Modify/Delete KMS (Keys/Vaults)", "Enable Cloud Guard"]
List logging_services = ["LOGGING SERVICES:disabled","Enable VCN Flow Logs", "Enable LBaaS Logs", "Enable Object Storage Buckets Logs", "Enable File Storage Logs", "Enable Network Firewall Logs"]
List cd3_services = ["CD3 SERVICES:disabled","Fetch Compartments OCIDs to variables file", "Fetch Protocols to OCI_Protocols"]
-List utility_services = ["3rd Party Services:disabled","CIS Compliance Check Script", "ShowOCI Report"]
+List utility_services = ["Other OCI Tools:disabled","CIS Compliance Check Script", "ShowOCI Report", "OCI FSDR"]
List ex_identity = ["IDENTITY:disabled","Export Compartments", "Export Groups", "Export Policies", "Export Users", "Export Network Sources"]
List ex_governance = ["GOVERNANCE:disabled","Export Tags", "Export Quotas"]
List ex_cost_management = ["COST MANAGEMENT:disabled","Export Budgets"]
@@ -79,7 +79,7 @@ final_list += logging_services
if (item.equals("CD3 Services")){
final_list += cd3_services
}
-if (item.equals("3rd Party Services")){
+if (item.equals("Other OCI Tools")){
final_list += utility_services
}
if (item.equals("Export Identity")){
diff --git a/jenkins_install/scriptler/scripts/ValidateParams.groovy b/jenkins_install/scriptler/scripts/ValidateParams.groovy
index 428e68410..0f23d1c88 100644
--- a/jenkins_install/scriptler/scripts/ValidateParams.groovy
+++ b/jenkins_install/scriptler/scripts/ValidateParams.groovy
@@ -17,11 +17,11 @@ def validate_params(Workflow,MainOptions,SubOptions,SubChildOptions,AdditionalFi
"Security":["Add/Modify/Delete KMS (Keys/Vaults)", "Enable Cloud Guard"],
"Logging Services":["Enable VCN Flow Logs", "Enable LBaaS Logs", "Enable Object Storage Buckets Logs", "Enable File Storage Logs", "Enable Network Firewall Logs"],
"CD3 Services":["Fetch Compartments OCIDs to variables file", "Fetch Protocols to OCI_Protocols"],
- "3rd Party Services":["CIS Compliance Check Script", "ShowOCI Report"]
+ "Other OCI Tools":["CIS Compliance Check Script", "ShowOCI Report", "OCI FSDR"]
]
def non_gf_options_map = [
"Export Identity":["Export Compartments", "Export Groups", "Export Policies", "Export Users", "Export Network Sources"],
- "Export Governance":["Expot Tags", "Export Quotas"],
+ "Export Governance":["Export Tags", "Export Quotas"],
"Export Cost Management":["Export Budgets"],
"Export Network":["Export all Network Components", "Export Network components for VCNs/DRGs/DRGRouteRulesinOCI Tabs", "Export Network components for DHCP Tab", "Export Network components for SecRulesinOCI Tab", "Export Network components for RouteRulesinOCI Tab", "Export Network components for SubnetsVLANs Tab", "Export Network components for NSGs Tab"],
"Export OCI Firewall":["Export Firewall Policy", "Export Firewall"],
diff --git a/jenkins_install/setUpOCI_config.xml b/jenkins_install/setUpOCI_config.xml
old mode 100755
new mode 100644
index b6435444f..eb3a166a6
--- a/jenkins_install/setUpOCI_config.xml
+++ b/jenkins_install/setUpOCI_config.xml
@@ -27,7 +27,7 @@
Excel_Template
- Upload input Excel file.
+ Upload input Excel file for the services chosen.
Previously uploaded file will be used if left empty.
@@ -197,10 +197,10 @@ def generateStage(job) {
if (values.size() > 1) {
region = values[0]
service = values[1]
- job_name = "./terraform_files/${region}/${service}/terraform-apply".replace("//","/")
+ job_name = "./terraform_files/${region}/${service}/apply".replace("//","/")
}else {
region = values[0]
- job_name = "./terraform_files/${region}/terraform-apply".replace("//","/")
+ job_name = "./terraform_files/${region}/apply".replace("//","/")
}
build job: "${job_name}"
}
@@ -285,7 +285,7 @@ pipeline {
}
environment {
prop_file = "/cd3user/tenancies/${customer_prefix}/${customer_prefix}_setUpOCI.properties"
- //current_timestamp = sh (script: 'date +%m-%d-%Y-%H-%M-%S', returnStdout: true).trim()
+ current_timestamp = sh (script: 'date +%s', returnStdout: true).trim()
}
parameters {
stashedFile (
@@ -394,23 +394,50 @@ pipeline {
'''
script {
- // For latest CD3 XL file.
- def latestXL = sh(returnStdout: true, script: '''
- set +x
- ls -t /cd3user/tenancies/${customer_prefix}/*.xl* | head -n 1
- ''').trim()
-
+ sh '''
+ set +x
+ # For latest CD3 XL file.
+ latestXL=`ls -t /cd3user/tenancies/${customer_prefix}/*.xl* | head -n 1`
echo "XL is ${latestXL}"
- sh "rm -f *.xl*"
- sh "cp '${latestXL}' ."
+ rm -f *.*
+ cp ${latestXL} .
+ # Check for cis_reports and show_oci directories
+ if [ -d "/cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_cis_report" ]; then
+ last_modified=`stat -c "%Y" /cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_cis_report`
+ if [ $(($last_modified-$current_timestamp)) -gt 0 ]; then
+ cp -r /cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_cis_report .
+ tar -cf ${customer_prefix}_cis_report.zip ${customer_prefix}_cis_report/
+ rm -rf ${customer_prefix}_cis_report
+ fi
+ fi
+ if [ -d "/cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_showoci_report" ]; then
+ last_modified=`stat -c "%Y" /cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_showoci_report`
+ if [ $(($last_modified-$current_timestamp)) -gt 0 ]; then
+ cp -r /cd3user/tenancies/${customer_prefix}/othertools_files/${customer_prefix}_showoci_report .
+ tar -cf ${customer_prefix}_showoci_report.zip ${customer_prefix}_showoci_report/
+ rm -rf ${customer_prefix}_showoci_report
+ fi
+ fi
+
+ # For latest oci_fsdr plan XL file.
+ count=`ls -1 /cd3user/tenancies/${customer_prefix}/othertools_files/*.xl* 2>/dev/null | wc -l`
+ if [ $count != 0 ]; then
+ latest_fsdr_XL=`ls -t /cd3user/tenancies/${customer_prefix}/othertools_files/*.xl* | head -n 1`
+ last_modified=`stat -c \"%Y\" ${latest_fsdr_XL}`
+ if [ $(($last_modified-$current_timestamp)) -gt 0 ]; then
+ cp ${latest_fsdr_XL} .
+ fi
+ fi
+ '''
}
}
}
post {
success {
- archiveArtifacts '*.xl*'
+ archiveArtifacts '*.xl*, *.zip'
+
}
}
}
@@ -470,7 +497,7 @@ pipeline {
}
}
}
- stage ('Trigger Terraform Pipelines'){
+ stage ('Trigger Pipelines'){
when {
allOf{
expression {return "${git_status}" > 0}
diff --git a/jenkins_install/tf-destroy.groovy b/jenkins_install/tf-destroy.groovy
deleted file mode 100644
index aa0ab8afa..000000000
--- a/jenkins_install/tf-destroy.groovy
+++ /dev/null
@@ -1,169 +0,0 @@
-/* Set the various stages of the build */
-def tf_plan = "Changes"
-
-pipeline {
- agent any
- options {
- ansiColor('xterm')
- }
- stages {
- stage('Set Environment Variables') {
- steps {
- script {
- def fileContent = readFile "${JENKINS_HOME}/jenkins.properties"
- // Split file content into lines
- def lines = fileContent.readLines()
-
- // Process each line to extract variable name and value
- def variables = [:]
- lines.each { line ->
- def parts = line.split('=')
- if (parts.size() == 2) {
- variables[parts[0].trim()] = parts[1].trim()
- }
- }
-
- def variableOds = variables['outdir_structure'].toString().replaceAll("\\[|\\]", '').replaceAll('"', '')
- env.out_str = "${variableOds}"
- def jobName = env.JOB_NAME
- def parts = jobName.split('/')
- if (env.out_str == 'Multiple_Outdir') {
- // Assuming the job name format is /job//job/job_name
- env.Region = parts[1]
- env.Service = parts[2]
- }
- else {
- // Assuming the job name format is /job/job_name
- env.Region = parts[1]
- env.Service = ''
- if (env.Region == 'global') {
- env.Service = 'rpc'
- }
- }
- }
- }
- }
-
- stage('Terraform Destroy Plan') {
- when {
- expression { return env.GIT_BRANCH == 'origin/develop';}
- }
-
- steps {
- catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
- script {
-
- sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform init -upgrade"
- // Run Terraform plan
- terraformPlanOutput = sh(script: "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform plan -destroy", returnStdout: true).trim()
-
- // Check if the plan contains any changes
- if (terraformPlanOutput.contains('No changes.')) {
- echo 'No changes in Terraform plan. Skipping further stages.'
- tf_plan = "No Changes"
- } else {
- // If there are changes, proceed with applying the plan
- echo "Proceeding with destroy. \n${terraformPlanOutput}"
- }
- }
- }
- }
- }
-
- /** Approval for Terraform Apply **/
- stage('Get Approval') {
- when {
- allOf {
- expression {return env.GIT_BRANCH == 'origin/develop'; }
- expression {return tf_plan == "Changes" }
- expression {return currentBuild.result != "FAILURE" }
- }
- }
- input {
- message "Do you want to perform terraform destroy?"
- }
- steps {
- echo "Approval for the Destroy Granted!"
- }
- }
-
- stage('Terraform Destroy') {
- when {
- allOf {
- expression {return env.GIT_BRANCH == 'origin/develop'; }
- expression {return tf_plan == "Changes" }
- expression {return currentBuild.result != "FAILURE" }
- }
- }
-
- steps {
- catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
- script {
- sh "cd \"${WORKSPACE}/${env.Region}/${env.Service}\" && terraform destroy --auto-approve"
- }
- }
- }
- }
-
- /** Main branch commit to keep changes in Sync **/
- stage('Commit To Main') {
- when {
- allOf {
- expression { return env.GIT_BRANCH == 'origin/develop'; }
- expression { return tf_plan == "Changes" }
- expression { return currentBuild.result != "FAILURE" }
- }
- }
- steps {
- catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
- script {
- def buildDir = "${WORKSPACE}/${BUILD_NUMBER}"
- // Create directory with build number
- sh "mkdir -p ${buildDir}"
- // Commit changes to the main branch
- dir(buildDir) {
- sh """
- git clone ${GIT_URL}
- cd \$(ls -d */|head -n 1)
- git checkout main
- cd "${env.Region}/${env.Service}"
- git pull --no-edit origin main
- rm -f *.tfvars
- git status
- git add --all .
- """
-
- def git_status = false
- while (!git_status) {
- // Execute the git commands using shell
- def gitResult = sh(script: """
- cd "\$(ls -d */|head -n 1)"
- cd "${env.Region}/${env.Service}"
- git fetch origin main
- git merge origin/main
- git commit -m "commit for terraform-destroy build - ${BUILD_NUMBER} for "${env.Region}"/"${env.Service}
-
- git push --porcelain origin main
- """, returnStatus: true)
-
- if (gitResult == 0) {
- git_status = true
- } else {
- echo "Git operation failed, retrying...."
- sleep 3 // 3 seconds before retrying
- }
- }
- }
- }
- }
- }
-
- post {
- always {
- // Delete the build directory and the temporary directory
- deleteDir()
- }
- }
- }
- }
-}
diff --git a/cd3_automation_toolkit/cis_reports.py b/othertools/cis_reports.py
similarity index 94%
rename from cd3_automation_toolkit/cis_reports.py
rename to othertools/cis_reports.py
index 8b0c1dda1..632c9c0a9 100644
--- a/cd3_automation_toolkit/cis_reports.py
+++ b/othertools/cis_reports.py
@@ -35,9 +35,9 @@
except Exception:
OUTPUT_TO_XLSX = False
-RELEASE_VERSION = "2.8.1"
-PYTHON_SDK_VERSION = "2.124.1"
-UPDATED_DATE = "March 25, 2024"
+RELEASE_VERSION = "2.8.4"
+PYTHON_SDK_VERSION = "2.129.4"
+UPDATED_DATE = "July 26, 2024"
##########################################################################
@@ -85,40 +85,11 @@ class CIS_Report:
_DAYS_OLD = 90
__KMS_DAYS_OLD = 365
__home_region = []
+ __days_to_expiry = 30
# Time Format
__iso_time_format = "%Y-%m-%dT%H:%M:%S"
- # OCI Link
- __oci_cloud_url = "https://cloud.oracle.com"
- __oci_users_uri = __oci_cloud_url + "/identity/users/"
- __oci_policies_uri = __oci_cloud_url + "/identity/policies/"
- __oci_groups_uri = __oci_cloud_url + "/identity/groups/"
- __oci_dynamic_groups_uri = __oci_cloud_url + "/identity/dynamicgroups/"
- __oci_identity_domains_uri = __oci_cloud_url + '/identity/domains/'
- __oci_buckets_uri = __oci_cloud_url + "/object-storage/buckets/"
- __oci_boot_volumes_uri = __oci_cloud_url + "/block-storage/boot-volumes/"
- __oci_block_volumes_uri = __oci_cloud_url + "/block-storage/volumes/"
- __oci_fss_uri = __oci_cloud_url + "/fss/file-systems/"
- __oci_networking_uri = __oci_cloud_url + "/networking/vcns/"
- __oci_adb_uri = __oci_cloud_url + "/db/adb/"
- __oci_oicinstance_uri = __oci_cloud_url + "/oic/integration-instances/"
- __oci_oacinstance_uri = __oci_cloud_url + "/analytics/instances/"
- __oci_compartment_uri = __oci_cloud_url + "/identity/compartments/"
- __oci_drg_uri = __oci_cloud_url + "/networking/drgs/"
- __oci_cpe_uri = __oci_cloud_url + "/networking/cpes/"
- __oci_ipsec_uri = __oci_cloud_url + "/networking/vpn-connections/"
- __oci_events_uri = __oci_cloud_url + "/events/rules/"
- __oci_loggroup_uri = __oci_cloud_url + "/logging/log-groups/"
- __oci_vault_uri = __oci_cloud_url + "/security/kms/vaults/"
- __oci_budget_uri = __oci_cloud_url + "/usage/budgets/"
- __oci_cgtarget_uri = __oci_cloud_url + "/cloud-guard/targets/"
- __oci_onssub_uri = __oci_cloud_url + "/notification/subscriptions/"
- __oci_serviceconnector_uri = __oci_cloud_url + "/connector-hub/service-connectors/"
- __oci_fastconnect_uri = __oci_cloud_url + "/networking/fast-connect/virtual-circuit/"
- __oci_instances_uri = __oci_cloud_url + "/compute/instances/"
-
-
__oci_ocid_pattern = r'ocid1\.[a-z,0-9]*\.[a-z,0-9]*\.[a-z,0-9,-]*\.[a-z,0-9,\.]{20,}'
# Start print time info
@@ -137,8 +108,14 @@ class CIS_Report:
datetime.timedelta(days=__KMS_DAYS_OLD)
str_kms_key_time_max_datetime = kms_key_time_max_datetime.strftime(__iso_time_format)
kms_key_time_max_datetime = datetime.datetime.strptime(str_kms_key_time_max_datetime, __iso_time_format)
+ # For Certificates Check
+ cert_key_time_max_datetime = start_datetime + \
+ datetime.timedelta(days=__days_to_expiry)
+ str_cert_key_time_max_datetime = cert_key_time_max_datetime.strftime(__iso_time_format)
+ cert_key_time_max_datetime = datetime.datetime.strptime(str_cert_key_time_max_datetime, __iso_time_format)
+
- def __init__(self, config, signer, proxy, output_bucket, report_directory, report_prefix, report_summary_json, print_to_screen, regions_to_run_in, raw_data, obp, redact_output, debug=False, all_resources=True):
+ def __init__(self, config, signer, proxy, output_bucket, report_directory, report_prefix, report_summary_json, print_to_screen, regions_to_run_in, raw_data, obp, redact_output, oci_url=None, debug=False, all_resources=True):
# CIS Foundation benchmark 2.0.0
self.cis_foundations_benchmark_2_0 = {
@@ -185,7 +162,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
'4.12': {'section': 'Logging and Monitoring', 'recommendation_#': '4.12', 'Title': 'Ensure a notification is configured for changes to network gateways.', 'Status': False, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['4.2'], 'CCCS Guard Rail': '11', 'Remediation': []},
'4.13': {'section': 'Logging and Monitoring', 'recommendation_#': '4.13', 'Title': 'Ensure VCN flow logging is enabled for all subnets.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['8.2', '8.5', '13.6'], 'CCCS Guard Rail': '', 'Remediation': []},
'4.14': {'section': 'Logging and Monitoring', 'recommendation_#': '4.14', 'Title': 'Ensure Cloud Guard is enabled in the root compartment of the tenancy.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': ['8.2', '8.5', '8.11'], 'CCCS Guard Rail': '1,2,3', 'Remediation': []},
- '4.15': {'section': 'Logging and Monitoring', 'recommendation_#': '4.15', 'Title': 'Ensure a notification is configured for Oracle Cloud Guard problems detected.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['8.2', '8.11'], 'CCCS Guard Rail': '', 'Remediation': []},
+ '4.15': {'section': 'Logging and Monitoring', 'recommendation_#': '4.15', 'Title': 'Ensure a notification is configured for Oracle Cloud Guard problems detected.', 'Status': False, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['8.2', '8.11'], 'CCCS Guard Rail': '', 'Remediation': []},
'4.16': {'section': 'Logging and Monitoring', 'recommendation_#': '4.16', 'Title': 'Ensure customer created Customer Managed Key (CMK) is rotated at least annually.', 'Status': True, 'Level': 1, 'Total': [], 'Findings': [], 'CISv8': [], 'CCCS Guard Rail': '6,7', 'Remediation': []},
'4.17': {'section': 'Logging and Monitoring', 'recommendation_#': '4.17', 'Title': 'Ensure write level Object Storage logging is enabled for all buckets.', 'Status': True, 'Level': 2, 'Total': [], 'Findings': [], 'CISv8': ['8.2'], 'CCCS Guard Rail': '', 'Remediation': []},
@@ -431,7 +408,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Create a Rule Condition in the Events services by selecting Identity in the Service Name Drop-down and selecting Identity Provider – Create, Identity Provider - Delete and Identity Provider – Update. In the Actions section select Notifications as Action Type and selct the compartment and topic to be used.",
"Recommendation": "",
- "Observation": "notification has been configured for Identity Provider changes."
+ "Observation": "notifications have been configured for Identity Provider changes."
},
"4.4": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when Identity Provider Group Mappings are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments. It is recommended to create the Event rule at the root compartment level",
@@ -439,7 +416,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Find and click the Rule that handles Idp Group Mapping Changes. Click the Edit Rule button and verify that the RuleConditions section contains a condition for the Service Identity and Event Types: Idp Group Mapping – Create, Idp Group Mapping – Delete, and Idp Group Mapping – Update and confirm Action Type contains: Notifications and that a valid Topic is referenced.",
"Recommendation": "",
- "Observation": "notification has been configured for Identity Provider Group Mapping changes."
+ "Observation": "notifications have been configured for Identity Provider Group Mapping changes."
},
"4.5": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when IAM Groups are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -447,7 +424,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Create a Rule Condition by selecting Identity in the Service Name Drop-down and selecting Group – Create, Group – Delete and Group – Update. In the Actions section select Notifications as Action Type and selct the compartment and topic to be used.",
"Recommendation": "",
- "Observation": "notification has been configured for Identity Provider changes."
+ "Observation": "notifications have been configured for IAM Group changes."
},
"4.6": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when IAM Policies are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -455,7 +432,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Create a Rule Condition by selecting Identity in the Service Name Drop-down and selecting Policy – Change Compartment, Policy – Create, Policy - Delete and Policy – Update. In the Actions section select Notifications as Action Type and selct the compartment and topic to be used.",
"Recommendation": "",
- "Observation": "notification has been configured for IAM Policy changes."
+ "Observation": "notifications have been configured for IAM Policy changes."
},
"4.7": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when IAM Users are created, updated, deleted, capabilities updated, or state updated. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -463,7 +440,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles IAM User Changes and verify that the Rule Conditions section contains a condition for the Service Identity and Event Types: User – Create, User – Delete, User – Update, User Capabilities – Update, User State – Update.",
"Recommendation": "",
- "Observation": "notification has been configured for user changes."
+ "Observation": "notifications have been configured for user changes."
},
"4.8": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when Virtual Cloud Networks are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -471,7 +448,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles VCN Changes and verify that the RuleConditions section contains a condition for the Service Networking and Event Types: VCN – Create, VCN - Delete, and VCN – Update.",
"Recommendation": "",
- "Observation": "notification has been configured for VCN changes."
+ "Observation": "notifications have been configured for VCN changes."
},
"4.9": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when route tables are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -479,7 +456,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles Route Table Changes and verify that the RuleConditions section contains a condition for the Service Networking and Event Types: Route Table – Change Compartment, Route Table – Create, Route Table - Delete, and Route Table – Update.",
"Recommendation": "",
- "Observation": "notification has been configured for changes to route tables."
+ "Observation": "notifications have been configured for changes to route tables."
},
"4.10": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when security lists are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -487,7 +464,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles Security List Changes and verify that the RuleConditions section contains a condition for the Service Networking and Event Types: Security List – Change Compartment, Security List – Create, Security List - Delete, and Security List – Update.",
"Recommendation": "",
- "Observation": "notification has been configured for security list changes."
+ "Observation": "notifications have been configured for security list changes."
},
"4.11": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when network security groups are created, updated or deleted. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -495,7 +472,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles Network Security Group changes and verify that the RuleConditions section contains a condition for the Service Networking and Event Types: Network Security Group – Change Compartment, Network Security Group – Create, Network Security Group - Delete, and Network Security Group – Update.",
"Recommendation": "",
- "Observation": "notification has been configured for changes on Network Service Groups."
+ "Observation": "notifications have been configured for changes on Network Service Groups."
},
"4.12": {
"Description": "It is recommended to setup an Event Rule and Notification that gets triggered when Network Gateways are created, updated, deleted, attached, detached, or moved. This recommendation includes Internet Gateways, Dynamic Routing Gateways, Service Gateways, Local Peering Gateways, and NAT Gateways. Event Rules are compartment scoped and will detect events in child compartments, it is recommended to create the Event rule at the root compartment level.",
@@ -503,7 +480,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Edit Rule that handles Network Gateways Changes and verify that the RuleConditions section contains a condition for the Service Networking and Event Types: DRG – Create, DRG - Delete, DRG - Update, DRG Attachment – Create, DRG Attachment – Delete, DRG Attachment - Update, Internet Gateway – Create, Internet Gateway – Delete, Internet Gateway - Update, Internet Gateway – Change Compartment, Local Peering Gateway – Create, Local Peering Gateway – Delete End, Local Peering Gateway - Update, Local Peering Gateway – Change Compartment, NAT Gateway – Create, NAT Gateway – Delete, NAT Gateway - Update, NAT Gateway – Change Compartment,Compartment, Service Gateway – Create, Service Gateway – Delete Begin, Service Gateway – Delete End, Service Gateway – Update, Service Gateway – Attach Service, Service Gateway – Detach Service, Service Gateway – Change Compartment.",
"Recommendation": "",
- "Observation": "notification has been configured for changes on network gateways."
+ "Observation": "notifications have been configured for changes on network gateways."
},
"4.13": {
"Description": "VCN flow logs record details about traffic that has been accepted or rejected based on the security list rule.",
@@ -527,7 +504,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
"Impact": "There is no performance impact when enabling the above described features but depending on the amount of notifications sent per month there may be a cost associated.",
"Remediation": "Create a Rule Condition by selecting Cloud Guard in the Service Name Drop-down and selecting Detected – Problem, Remediated – Problem and Dismissed - Problem. In the Actions section select Notifications as Action Type and selct the compartment and topic to be used.",
"Recommendation": "",
- "Observation": "notification has been configured for Cloud Guard Problems"
+ "Observation": "notifications have been configured for Cloud Guard Problems."
},
"4.16": {
"Description": "Oracle Cloud Infrastructure Vault securely stores master encryption keys that protect your encrypted data. You can use the Vault service to rotate keys to generate new cryptographic material. Periodically rotating keys limits the amount of data encrypted by one key version.",
@@ -611,7 +588,7 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
}
}
- # MAP Checks
+ # OBP Checks
self.obp_foundations_checks = {
'Cost_Tracking_Budgets': {'Status': False, 'Findings': [], 'OBP': [], "Documentation": "https://docs.oracle.com/en-us/iaas/Content/Billing/Concepts/budgetsoverview.htm#Budgets_Overview"},
'SIEM_Audit_Log_All_Comps': {'Status': True, 'Findings': [], 'OBP': [], "Documentation": "https://docs.oracle.com/en/solutions/oci-aggregate-logs-siem/index.html"}, # Assuming True
@@ -621,63 +598,66 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
'SIEM_Read_Bucket_Logs': {'Status': None, 'Findings': [], 'OBP': [], "Documentation": "https://docs.oracle.com/en/solutions/oci-aggregate-logs-siem/index.html"},
'Networking_Connectivity': {'Status': True, 'Findings': [], 'OBP': [], "Documentation": "https://docs.oracle.com/en-us/iaas/Content/Network/Troubleshoot/drgredundancy.htm"},
'Cloud_Guard_Config': {'Status': None, 'Findings': [], 'OBP': [], "Documentation": "https://www.ateam-oracle.com/post/tuning-oracle-cloud-guard"},
+ 'Certificates_Near_Expiry': {'Status': None, 'Findings': [], 'OBP': [], "Documentation": "TBD"},
}
- # MAP Regional Data
+ # CIS and OBP Regional Data
+ # 4.6 is not regional because OCI IAM Policies only exist in the home region
+ self.__cis_regional_checks = {"4.3","4.4","4.5","4.7", "4.8", "4.9", "4.10", "4.11", "4.12"}
self.__obp_regional_checks = {}
# CIS monitoring notifications check
self.cis_monitoring_checks = {
- "4.4": [
+ "4.3": [
'com.oraclecloud.identitycontrolplane.createidentityprovider',
'com.oraclecloud.identitycontrolplane.deleteidentityprovider',
'com.oraclecloud.identitycontrolplane.updateidentityprovider'
],
- "4.5": [
+ "4.4": [
'com.oraclecloud.identitycontrolplane.createidpgroupmapping',
'com.oraclecloud.identitycontrolplane.deleteidpgroupmapping',
'com.oraclecloud.identitycontrolplane.updateidpgroupmapping'
],
- "4.6": [
+ "4.5": [
'com.oraclecloud.identitycontrolplane.creategroup',
'com.oraclecloud.identitycontrolplane.deletegroup',
'com.oraclecloud.identitycontrolplane.updategroup'
],
- "4.7": [
+ "4.6": [
'com.oraclecloud.identitycontrolplane.createpolicy',
'com.oraclecloud.identitycontrolplane.deletepolicy',
'com.oraclecloud.identitycontrolplane.updatepolicy'
],
- "4.8": [
+ "4.7": [
'com.oraclecloud.identitycontrolplane.createuser',
'com.oraclecloud.identitycontrolplane.deleteuser',
'com.oraclecloud.identitycontrolplane.updateuser',
'com.oraclecloud.identitycontrolplane.updateusercapabilities',
'com.oraclecloud.identitycontrolplane.updateuserstate'
],
- "4.9": [
+ "4.8": [
'com.oraclecloud.virtualnetwork.createvcn',
'com.oraclecloud.virtualnetwork.deletevcn',
'com.oraclecloud.virtualnetwork.updatevcn'
],
- "4.10": [
+ "4.9": [
'com.oraclecloud.virtualnetwork.changeroutetablecompartment',
'com.oraclecloud.virtualnetwork.createroutetable',
'com.oraclecloud.virtualnetwork.deleteroutetable',
'com.oraclecloud.virtualnetwork.updateroutetable'
],
- "4.11": [
+ "4.10": [
'com.oraclecloud.virtualnetwork.changesecuritylistcompartment',
'com.oraclecloud.virtualnetwork.createsecuritylist',
'com.oraclecloud.virtualnetwork.deletesecuritylist',
'com.oraclecloud.virtualnetwork.updatesecuritylist'
],
- "4.12": [
+ "4.11": [
'com.oraclecloud.virtualnetwork.changenetworksecuritygroupcompartment',
'com.oraclecloud.virtualnetwork.createnetworksecuritygroup',
'com.oraclecloud.virtualnetwork.deletenetworksecuritygroup',
'com.oraclecloud.virtualnetwork.updatenetworksecuritygroup'
],
- "4.13": [
+ "4.12": [
'com.oraclecloud.virtualnetwork.createdrg',
'com.oraclecloud.virtualnetwork.deletedrg',
'com.oraclecloud.virtualnetwork.updatedrg',
@@ -844,6 +824,8 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
# Compute Resources - Thinking about
self.__Instance = []
+ # Certificates raw resources
+ self.__raw_oci_certificates = []
# Setting list of regions to run in
# Start print time info
@@ -991,6 +973,40 @@ def __init__(self, config, signer, proxy, output_bucket, report_directory, repor
self.__obp_checks = True
self.__output_raw_data = True
+ # Determine if __oci_cloud_url will be override with a different realm ex. OC2 or sovreign region
+ self.__oci_cloud_url = "https://cloud.oracle.com"
+ if oci_url:
+ self.__oci_cloud_url = oci_url
+
+ # OCI Link
+ self.__oci_users_uri = self.__oci_cloud_url + "/identity/users/"
+ self.__oci_policies_uri = self.__oci_cloud_url + "/identity/policies/"
+ self.__oci_groups_uri = self.__oci_cloud_url + "/identity/groups/"
+ self.__oci_dynamic_groups_uri = self.__oci_cloud_url + "/identity/dynamicgroups/"
+ self.__oci_identity_domains_uri = self.__oci_cloud_url + '/identity/domains/'
+ self.__oci_buckets_uri = self.__oci_cloud_url + "/object-storage/buckets/"
+ self.__oci_boot_volumes_uri = self.__oci_cloud_url + "/block-storage/boot-volumes/"
+ self.__oci_block_volumes_uri = self.__oci_cloud_url + "/block-storage/volumes/"
+ self.__oci_fss_uri = self.__oci_cloud_url + "/fss/file-systems/"
+ self.__oci_networking_uri = self.__oci_cloud_url + "/networking/vcns/"
+ self.__oci_adb_uri = self.__oci_cloud_url + "/db/adb/"
+ self.__oci_oicinstance_uri = self.__oci_cloud_url + "/oic/integration-instances/"
+ self.__oci_oacinstance_uri = self.__oci_cloud_url + "/analytics/instances/"
+ self.__oci_compartment_uri = self.__oci_cloud_url + "/identity/compartments/"
+ self.__oci_drg_uri = self.__oci_cloud_url + "/networking/drgs/"
+ self.__oci_cpe_uri = self.__oci_cloud_url + "/networking/cpes/"
+ self.__oci_ipsec_uri = self.__oci_cloud_url + "/networking/vpn-connections/"
+ self.__oci_events_uri = self.__oci_cloud_url + "/events/rules/"
+ self.__oci_loggroup_uri = self.__oci_cloud_url + "/logging/log-groups/"
+ self.__oci_vault_uri = self.__oci_cloud_url + "/security/kms/vaults/"
+ self.__oci_budget_uri = self.__oci_cloud_url + "/usage/budgets/"
+ self.__oci_cgtarget_uri = self.__oci_cloud_url + "/cloud-guard/targets/"
+ self.__oci_onssub_uri = self.__oci_cloud_url + "/notification/subscriptions/"
+ self.__oci_serviceconnector_uri = self.__oci_cloud_url + "/connector-hub/service-connectors/"
+ self.__oci_fastconnect_uri = self.__oci_cloud_url + "/networking/fast-connect/virtual-circuit/"
+ self.__oci_instances_uri = self.__oci_cloud_url + "/compute/instances/"
+ self.__oci_cert_uri = self.__oci_cloud_url + "security/certificates/certificate/"
+
##########################################################################
# Create regional config, signers adds appends them to self.__regions object
##########################################################################
@@ -1097,6 +1113,11 @@ def __create_regional_signers(self, proxy):
instance.base_client.session.proxies = {'https': proxy}
region_values['instance'] = instance
+ certificate_client = oci.certificates_management.CertificatesManagementClient(region_config, signer=region_signer)
+ if proxy:
+ search.base_client.session.proxies = {'https': proxy}
+ region_values['certificate_client'] = certificate_client
+
except Exception as e:
debug("__create_regional_signers: error reading" + str(self.__config))
self.__errors.append({"id" : "__create_regional_signers", "error" : str(e)})
@@ -1362,13 +1383,13 @@ def __identity_read_groups_and_membership(self):
##########################################################################
def __identity_domains_get_all_results(self, func, args):
- if not 'start_index' in args:
+ if "start_index" not in args:
args['start_index'] = 1
- if not "count" in args:
+ if "count" not in args:
args["count"] = 1000
- if not "filter" in args:
+ if "filter" not in args:
args["filter"] = ''
- if not "attribute_sets" in args:
+ if "attribute_sets" not in args:
args["attribute_sets"] = ['all']
debug("__identity_domains_get_all_results: " + str(func.__name__) + " arguments are: " + str(args))
@@ -2645,7 +2666,7 @@ def __network_read_ip_sec_connections(self):
self.__network_ipsec_connections[ip_sec.additional_details['drgId']] = []
self.__network_ipsec_connections[ip_sec.additional_details['drgId']].append(record)
- print("\tProcessed " + str(len((list(itertools.chain.from_iterable(self.__network_ipsec_connections.values()))))) + " IP SEC Conenctions")
+ print("\tProcessed " + str(len((list(itertools.chain.from_iterable(self.__network_ipsec_connections.values()))))) + " IP SEC Connections")
return self.__network_ipsec_connections
except Exception as e:
raise RuntimeError(
@@ -2656,7 +2677,7 @@ def __network_read_ip_sec_connections(self):
############################################
def __network_topology_dump(self):
debug("__network_topology_dump: Starting")
- if type(self.__signer) == oci.auth.signers.InstancePrincipalsDelegationTokenSigner:
+ if type(self.__signer) is not oci.auth.signers.InstancePrincipalsDelegationTokenSigner:
self.__errors.append({"id": "__network_topology_dump", "error": "Delegated Tokens via Cloud Shell not supported." })
return
def api_function(region_key, region_values, tenancy_id):
@@ -2695,6 +2716,7 @@ def __adb_read_adbs(self):
try:
for region_key, region_values in self.__regions.items():
# UPDATED JB
+ #adb_query_resources = self.__search_query_resource_type("AutonomousDatabase", region_values['search_client'])
adb_query_resources = oci.pagination.list_call_get_all_results(
region_values['search_client'].search_resources,
search_details=oci.resource_search.models.StructuredSearchDetails(
@@ -2702,7 +2724,6 @@ def __adb_read_adbs(self):
).data
compartments = set()
-
for adb in adb_query_resources:
compartments.add(adb.compartment_id)
@@ -2711,268 +2732,33 @@ def __adb_read_adbs(self):
region_values['adb_client'].list_autonomous_databases,
compartment_id=compartment
).data
+ # autonomous_databases = region_values['adb_client'].list_autonomous_databases(
+ # compartment_id=compartment
+ # ).data
for adb in autonomous_databases:
try:
deep_link = self.__oci_adb_uri + adb.id + '?region=' + region_key
# Issue 295 fixed
if adb.lifecycle_state not in [ oci.database.models.AutonomousDatabaseSummary.LIFECYCLE_STATE_TERMINATED, oci.database.models.AutonomousDatabaseSummary.LIFECYCLE_STATE_TERMINATING, oci.database.models.AutonomousDatabaseSummary.LIFECYCLE_STATE_UNAVAILABLE ]:
- record = {
- "id": adb.id,
- "display_name": adb.display_name,
- "deep_link": self.__generate_csv_hyperlink(deep_link, adb.display_name),
- "apex_details": adb.apex_details,
- "are_primary_whitelisted_ips_used": adb.are_primary_whitelisted_ips_used,
- "autonomous_container_database_id": adb.autonomous_container_database_id,
- "autonomous_maintenance_schedule_type": adb.autonomous_maintenance_schedule_type,
- "available_upgrade_versions": adb.available_upgrade_versions,
- "backup_config": adb.backup_config,
- "compartment_id": adb.compartment_id,
- "connection_strings": adb.connection_strings,
- "connection_urls": adb.connection_urls,
- "cpu_core_count": adb.cpu_core_count,
- "customer_contacts": adb.cpu_core_count,
- "data_safe_status": adb.data_safe_status,
- "data_storage_size_in_gbs": adb.data_storage_size_in_gbs,
- "data_storage_size_in_tbs": adb.data_storage_size_in_tbs,
- "database_management_status": adb.database_management_status,
- "dataguard_region_type": adb.dataguard_region_type,
- "db_name": adb.db_name,
- "db_version": adb.db_version,
- "db_workload": adb.db_workload,
- "defined_tags": adb.defined_tags,
- "failed_data_recovery_in_seconds": adb.failed_data_recovery_in_seconds,
- "freeform_tags": adb.freeform_tags,
- "infrastructure_type": adb.infrastructure_type,
- "is_access_control_enabled": adb.is_access_control_enabled,
- "is_auto_scaling_enabled": adb.is_auto_scaling_enabled,
- "is_data_guard_enabled": adb.is_data_guard_enabled,
- "is_dedicated": adb.is_dedicated,
- "is_free_tier": adb.is_free_tier,
- "is_mtls_connection_required": adb.is_mtls_connection_required,
- "is_preview": adb.is_preview,
- "is_reconnect_clone_enabled": adb.is_reconnect_clone_enabled,
- "is_refreshable_clone": adb.is_refreshable_clone,
- "key_history_entry": adb.key_history_entry,
- "key_store_id": adb.key_store_id,
- "key_store_wallet_name": adb.key_store_wallet_name,
- "kms_key_id": adb.kms_key_id,
- "kms_key_lifecycle_details": adb.kms_key_lifecycle_details,
- "kms_key_version_id": adb.kms_key_version_id,
- "license_model": adb.license_model,
- "lifecycle_details": adb.lifecycle_details,
- "lifecycle_state": adb.lifecycle_state,
- "nsg_ids": adb.nsg_ids,
- "ocpu_count": adb.ocpu_count,
- "open_mode": adb.open_mode,
- "operations_insights_status": adb.operations_insights_status,
- "peer_db_ids": adb.peer_db_ids,
- "permission_level": adb.permission_level,
- "private_endpoint": adb.private_endpoint,
- "private_endpoint_ip": adb.private_endpoint_ip,
- "private_endpoint_label": adb.private_endpoint_label,
- "refreshable_mode": adb.refreshable_mode,
- "refreshable_status": adb.refreshable_status,
- "role": adb.role,
- "scheduled_operations": adb.scheduled_operations,
- "service_console_url": adb.service_console_url,
- "source_id": adb.source_id,
- "standby_whitelisted_ips": adb.standby_whitelisted_ips,
- "subnet_id": adb.subnet_id,
- "supported_regions_to_clone_to": adb.supported_regions_to_clone_to,
- "system_tags": adb.system_tags,
- "time_created": adb.time_created.strftime(self.__iso_time_format),
- "time_data_guard_role_changed": str(adb.time_data_guard_role_changed),
- "time_deletion_of_free_autonomous_database": str(adb.time_deletion_of_free_autonomous_database),
- "time_local_data_guard_enabled": str(adb.time_local_data_guard_enabled),
- "time_maintenance_begin": str(adb.time_maintenance_begin),
- "time_maintenance_end": str(adb.time_maintenance_end),
- "time_of_last_failover": str(adb.time_of_last_failover),
- "time_of_last_refresh": str(adb.time_of_last_refresh),
- "time_of_last_refresh_point": str(adb.time_of_last_refresh_point),
- "time_of_last_switchover": str(adb.time_of_last_switchover),
- "time_of_next_refresh": str(adb.time_of_next_refresh),
- "time_reclamation_of_free_autonomous_database": str(adb.time_reclamation_of_free_autonomous_database),
- "time_until_reconnect_clone_enabled": str(adb.time_until_reconnect_clone_enabled),
- "used_data_storage_size_in_tbs": str(adb.used_data_storage_size_in_tbs),
- "vault_id": adb.vault_id,
- "whitelisted_ips": adb.whitelisted_ips,
- "region": region_key,
- "notes": ""
- }
+ record = oci.util.to_dict(adb)
+ record['deep_link'] = self.__generate_csv_hyperlink(deep_link, adb.display_name)
+ record['error'] = ""
+ self.__autonomous_databases.append(record)
else:
- record = {
- "id": adb.id,
- "display_name": adb.display_name,
- "deep_link": self.__generate_csv_hyperlink(deep_link, adb.display_name),
- "apex_details": "",
- "are_primary_whitelisted_ips_used": "",
- "autonomous_container_database_id": "",
- "autonomous_maintenance_schedule_type": "",
- "available_upgrade_versions": "",
- "backup_config": "",
- "compartment_id": adb.compartment_id,
- "connection_strings": "",
- "connection_urls": "",
- "cpu_core_count": "",
- "customer_contacts": "",
- "data_safe_status": "",
- "data_storage_size_in_gbs": "",
- "data_storage_size_in_tbs": "",
- "database_management_status": "",
- "dataguard_region_type": "",
- "db_name": "",
- "db_version": "",
- "db_workload": "",
- "defined_tags": "",
- "failed_data_recovery_in_seconds": "",
- "freeform_tags": "",
- "infrastructure_type": "",
- "is_access_control_enabled": "",
- "is_auto_scaling_enabled": "",
- "is_data_guard_enabled": "",
- "is_dedicated": "",
- "is_free_tier": "",
- "is_mtls_connection_required": "",
- "is_preview": "",
- "is_reconnect_clone_enabled": "",
- "is_refreshable_clone": "",
- "key_history_entry": "",
- "key_store_id": "",
- "key_store_wallet_name": "",
- "kms_key_id": "",
- "kms_key_lifecycle_details": "",
- "kms_key_version_id": "",
- "license_model": "",
- "lifecycle_details": "",
- "lifecycle_state": adb.lifecycle_state,
- "nsg_ids": "",
- "ocpu_count": "",
- "open_mode": "",
- "operations_insights_status": "",
- "peer_db_ids": "",
- "permission_level": "",
- "private_endpoint": "",
- "private_endpoint_ip": "",
- "private_endpoint_label": "",
- "refreshable_mode": "",
- "refreshable_status": "",
- "role": "",
- "scheduled_operations": "",
- "service_console_url": "",
- "source_id": "",
- "standby_whitelisted_ips": "",
- "subnet_id": "",
- "supported_regions_to_clone_to": "",
- "system_tags": "",
- "time_created": "",
- "time_data_guard_role_changed": "",
- "time_deletion_of_free_autonomous_database": "",
- "time_local_data_guard_enabled": "",
- "time_maintenance_begin": "",
- "time_maintenance_end": "",
- "time_of_last_failover": "",
- "time_of_last_refresh": "",
- "time_of_last_refresh_point": "",
- "time_of_last_switchover": "",
- "time_of_next_refresh": "",
- "time_reclamation_of_free_autonomous_database": "",
- "time_until_reconnect_clone_enabled": "",
- "used_data_storage_size_in_tbs": "",
- "vault_id": "",
- "whitelisted_ips": "",
- "region": region_key,
- "notes": ""
- }
+ record = record = oci.util.to_dict(adb)
+ record['deep_link'] = self.__generate_csv_hyperlink(deep_link, adb.display_name)
+ record['error'] = ""
+ self.__autonomous_databases.append(record)
except Exception as e:
- record = {
- "id": "",
- "display_name": "",
- "deep_link": "",
- "apex_details": "",
- "are_primary_whitelisted_ips_used": "",
- "autonomous_container_database_id": "",
- "autonomous_maintenance_schedule_type": "",
- "available_upgrade_versions": "",
- "backup_config": "",
- "compartment_id": "",
- "connection_strings": "",
- "connection_urls": "",
- "cpu_core_count": "",
- "customer_contacts": "",
- "data_safe_status": "",
- "data_storage_size_in_gbs": "",
- "data_storage_size_in_tbs": "",
- "database_management_status": "",
- "dataguard_region_type": "",
- "db_name": "",
- "db_version": "",
- "db_workload": "",
- "defined_tags": "",
- "failed_data_recovery_in_seconds": "",
- "freeform_tags": "",
- "infrastructure_type": "",
- "is_access_control_enabled": "",
- "is_auto_scaling_enabled": "",
- "is_data_guard_enabled": "",
- "is_dedicated": "",
- "is_free_tier": "",
- "is_mtls_connection_required": "",
- "is_preview": "",
- "is_reconnect_clone_enabled": "",
- "is_refreshable_clone": "",
- "key_history_entry": "",
- "key_store_id": "",
- "key_store_wallet_name": "",
- "kms_key_id": "",
- "kms_key_lifecycle_details": "",
- "kms_key_version_id": "",
- "license_model": "",
- "lifecycle_details": "",
- "lifecycle_state": "",
- "nsg_ids": "",
- "ocpu_count": "",
- "open_mode": "",
- "operations_insights_status": "",
- "peer_db_ids": "",
- "permission_level": "",
- "private_endpoint": "",
- "private_endpoint_ip": "",
- "private_endpoint_label": "",
- "refreshable_mode": "",
- "refreshable_status": "",
- "role": "",
- "scheduled_operations": "",
- "service_console_url": "",
- "source_id": "",
- "standby_whitelisted_ips": "",
- "subnet_id": "",
- "supported_regions_to_clone_to": "",
- "system_tags": "",
- "time_created": "",
- "time_data_guard_role_changed": "",
- "time_deletion_of_free_autonomous_database": "",
- "time_local_data_guard_enabled": "",
- "time_maintenance_begin": "",
- "time_maintenance_end": "",
- "time_of_last_failover": "",
- "time_of_last_refresh": "",
- "time_of_last_refresh_point": "",
- "time_of_last_switchover": "",
- "time_of_next_refresh": "",
- "time_reclamation_of_free_autonomous_database": "",
- "time_until_reconnect_clone_enabled": "",
- "used_data_storage_size_in_tbs": "",
- "vault_id": "",
- "whitelisted_ips": "",
- "region": region_key,
- "notes": str(e)
- }
- self.__autonomous_databases.append(record)
+ record = record['deep_link'] = self.__generate_csv_hyperlink(deep_link, adb.display_name)
+ record['error'] = str(e)
+ self.__autonomous_databases.append(record)
print("\tProcessed " + str(len(self.__autonomous_databases)) + " Autonomous Databases")
return self.__autonomous_databases
except Exception as e:
- raise RuntimeError("Error in __adb_read_adbs " + str(e.args))
+ print("Error in __adb_read_adbs " + str(e.args))
+ self.__errors.append({'id' : '__adb_read_adbs', 'error' : str(e)})
############################################
# Load Oracle Integration Cloud
@@ -3138,6 +2924,7 @@ def __events_read_event_rules(self):
record = {
"compartment_id": event_rule.compartment_id,
"condition": event_rule.additional_details['condition'],
+ "actions": event_rule.additional_details['actionsDetails'],
"description": event_rule.additional_details['description'],
"display_name": event_rule.display_name,
"deep_link": self.__generate_csv_hyperlink(deep_link, event_rule.display_name),
@@ -3306,7 +3093,7 @@ def __kms_read_keys(self):
wrapping_key_id = self.__vaults[vault]['kms_client'].get_wrapping_key().data.id
debug("\t__kms_read_keys: Succeeded Adding Wrapping Key Id: " + str(wrapping_key_id))
self.__vaults[vault]['wrapping_key_id'] = wrapping_key_id
- except Exception as e:
+ except Exception:
debug("\t__kms_read_keys: Failed Adding Wrapping Key Id for vault: " + str(vault))
self.__vaults[vault]['wrapping_key_id'] = None
@@ -3506,6 +3293,7 @@ def __identity_read_tenancy_password_policy(self):
# Oracle Notifications Services for Subscriptions
##########################################################################
def __ons_read_subscriptions(self):
+ debug("__ons_read_subscriptions: Starting: ")
try:
for region_key, region_values in self.__regions.items():
# Iterate through compartments to get all subscriptions
@@ -3514,7 +3302,7 @@ def __ons_read_subscriptions(self):
search_details=oci.resource_search.models.StructuredSearchDetails(
query="query OnsSubscription resources return allAdditionalFields where compartmentId != '" + self.__managed_paas_compartment_id + "'")
).data
-
+ debug("\t__ons_read_subscriptions: Recieved " + str(len(subs_data)) + " subscriptions in region " + str(region_key))
for sub in subs_data:
deep_link = self.__oci_onssub_uri + sub.identifier + '?region=' + region_key
record = {
@@ -3570,9 +3358,8 @@ def __identity_read_tag_defaults(self):
return self.__tag_defaults
except Exception as e:
- raise RuntimeError(
- "Error in __identity_read_tag_defaults " + str(e.args))
-
+ print("Error in __identity_read_tag_defaults " + str(e.args))
+ self.__errors.append({'id' : '__identity_read_tag_defaults', 'error' : str(e)})
##########################################################################
# Get Service Connectors
##########################################################################
@@ -3690,7 +3477,7 @@ def __search_resources_in_root_compartment(self):
"region": region_key
}
self.cis_foundations_benchmark_2_0['6.2']['Total'].append(record)
- except:
+ except Exception:
self.__errors.append({"id": "search_resources_in_root_compartment Invalid OCID", "error" : str(item)})
debug(f'__search_resources_in_root_compartment: Invalid OCID: {str(item)}')
@@ -3715,7 +3502,8 @@ def __search_query_resource_type(self, resource_type, search_client):
).data
return oci.util.to_dict(results)
- except Exception as e:
+ except Exception:
+ debug("__search_query_resource_type: failed to get type: " + str(resource_type))
return []
##########################################################################
@@ -3782,11 +3570,71 @@ def __core_instance_read_compute(self):
raise RuntimeError("Error in __core_instance_read_compute " + str(e.args))
+ ##########################################################################
+ # Returns a region name for a region key
+ # Takes: region key
+ ##########################################################################
+ def __get_region_name_from_key(self,region_key):
+ debug("__get_region_name_from_key")
+ for key, region_values in self.__regions.items():
+ if region_values['region_key'].upper() == region_key.upper() or region_values['region_name'].upper() == region_key.upper():
+ return region_values['region_name']
+
+ ##########################################################################
+ # Query All certificates in the tenancy
+ ##########################################################################
+ def __certificates_read_certificates(self):
+ debug("__certificates_read_certificates")
+ try:
+ for region_key, region_values in self.__regions.items():
+ certificates_data = oci.pagination.list_call_get_all_results(
+ region_values['search_client'].search_resources,
+ search_details=oci.resource_search.models.StructuredSearchDetails(
+ query="query certificate resources return allAdditionalFields")
+ ).data
+ cert_compartments = {}
+ debug("\t__certificates_read_certificates: Got Ceritificates from ")
+
+ for certificate in certificates_data:
+ cert_compartments[certificate.compartment_id] = certificate.compartment_id
+
+ for compartment in cert_compartments:
+ certs = oci.pagination.list_call_get_all_results(
+ region_values['certificate_client'].list_certificates,
+ compartment_id=compartment).data
+ for cert in certs:
+ record = oci.util.to_dict(cert)
+ debug("\t__certificates_read_certificates: Coverted Certificate Object to Dict")
+
+ region_id = record['id'].split(".")[3]
+ debug("\t__certificates_read_certificates: Got region id")
+
+ region_name = self.__get_region_name_from_key(region_id)
+ deep_link = self.__oci_cert_uri + record['id'] + "?region=" + region_name
+ record['deep_link'] = self.__generate_csv_hyperlink(deep_link, record['name']),
+ record['region'] = region_name
+ debug("\t__certificates_read_certificates: Added region name and deeplink to certificate record.")
+ self.__raw_oci_certificates.append(record)
+ except Exception as e:
+ debug("__certificates_read_certificates failed to process: " + str(e))
+ print("\tProcessed " + str(len(self.__raw_oci_certificates)) + " Certificates")
+
+
+
##########################################################################
# Analyzes Tenancy Data for CIS Report
##########################################################################
def __report_cis_analyze_tenancy_data(self):
+ self.__cis_regional_findings_data = {}
+
+ for check in self.__cis_regional_checks:
+ self.__cis_regional_findings_data[check] = {}
+ for region_key, region_values in self.__regions.items():
+ self.__cis_regional_findings_data[check][region_key] = None
+
+
+
# 1.1 Check - Checking for policy statements that are not restricted to a service
for policy in self.__policies:
@@ -4212,22 +4060,40 @@ def __report_cis_analyze_tenancy_data(self):
# Iterate through all event rules
for event in self.__event_rules:
# Convert Event Condition to dict
- jsonable_str = event['condition'].lower().replace("'", "\"")
+ eventtype_jsonable_str = event['condition'].lower().replace("'", "\"")
try:
- event_dict = json.loads(jsonable_str)
+ eventtype_dict = json.loads(eventtype_jsonable_str)
except Exception:
print("*** Invalid Event Condition for event (not in JSON format): " + event['display_name'] + " ***")
- event_dict = {}
- # Issue 256: 'eventtype' not in event_dict (i.e. missing in event condition)
- if event_dict and 'eventtype' in event_dict:
+ eventtype_dict = {}
+ # Issue 256: 'eventtype' not in eventtype_dict (i.e. missing in event condition)
+ if eventtype_dict and 'eventtype' in eventtype_dict:
for key, changes in self.cis_monitoring_checks.items():
# Checking if all cis change list is a subset of event condition
try:
- if (all(x in event_dict['eventtype'] for x in changes)):
+ # Checking if each region has the required events
+ if (all(x in eventtype_dict['eventtype'] for x in changes)) and key in self.__cis_regional_checks:
+ self.__cis_regional_findings_data[key][event['region']] = True
+
+ # Cloud Guard Check is only required in the Cloud Guard Reporting Region
+ elif key == "4.15" and event['region'] == self.__cloud_guard_config.reporting_region and \
+ (all(x in eventtype_dict['eventtype'] for x in changes)):
self.cis_foundations_benchmark_2_0[key]['Status'] = True
+
+ # For Checks that are home region based checking those
+ elif (all(x in eventtype_dict['eventtype'] for x in changes)) and \
+ key not in self.__cis_regional_checks and event['region'] == self.__home_region:
+ self.cis_foundations_benchmark_2_0[key]['Status'] = True
+
except Exception:
print("*** Invalid Event Data for event: " + event['display_name'] + " ***")
+
+ # ******* Iterating through Regional Checks adding findings
+ for key, findings in self.__cis_regional_findings_data.items():
+ if all(findings.values()):
+ self.cis_foundations_benchmark_2_0[key]['Status'] = True
+
# CIS Check 4.13 - VCN FlowLog enable
# Generate list of subnets IDs
for subnet in self.__network_subnets:
@@ -4259,7 +4125,7 @@ def __report_cis_analyze_tenancy_data(self):
self.cis_foundations_benchmark_2_0['4.16']['Status'] = False
self.cis_foundations_benchmark_2_0['4.16']['Findings'].append(
key)
- except:
+ except Exception:
self.cis_foundations_benchmark_2_0['4.16']['Status'] = False
self.cis_foundations_benchmark_2_0['4.16']['Findings'].append(
key)
@@ -4299,7 +4165,7 @@ def __report_cis_analyze_tenancy_data(self):
bucket)
self.cis_foundations_benchmark_2_0['5.1.3']['Status'] = False
- # CIS Check 4.1.1,4.1.2,4.1.3 Total - Adding All Buckets to total
+ # CIS Check 5.1.1,5.1.2,5.1.3 Total - Adding All Buckets to total
self.cis_foundations_benchmark_2_0['5.1.1']['Total'] = self.__buckets
self.cis_foundations_benchmark_2_0['5.1.2']['Total'] = self.__buckets
self.cis_foundations_benchmark_2_0['5.1.3']['Total'] = self.__buckets
@@ -4859,6 +4725,28 @@ def __obp_analyze_tenancy_data(self):
else:
self.obp_foundations_checks['Cloud_Guard_Config']['Findings'].append(cloud_guard_record)
+ #######################################
+ # Certificate Expiry Check
+ #######################################
+
+ for cert in self.__raw_oci_certificates:
+ debug("\t__obp_analyze_tenancy_data: Iterating through certificates")
+
+ try:
+ if cert['current_version_summary']['validity'] and \
+ datetime.datetime.strptime(self.get_date_iso_format(cert['current_version_summary']['validity']['time_of_validity_not_after']), self.__iso_time_format) >= self.cert_key_time_max_datetime:
+ self.obp_foundations_checks['Certificates_Near_Expiry']['OBP'].append(cert)
+ else:
+ self.obp_foundations_checks['Certificates_Near_Expiry']['Findings'].append(cert)
+ except Exception:
+ debug("\t__obp_analyze_tenancy_data: Certificate is missing time of validity not after" + cert['name'])
+ self.obp_foundations_checks['Certificates_Near_Expiry']['Findings'].append(cert)
+
+ if self.obp_foundations_checks['Certificates_Near_Expiry']['Findings']:
+ self.obp_foundations_checks['Certificates_Near_Expiry']['Status'] = False
+ else:
+ self.obp_foundations_checks['Certificates_Near_Expiry']['Status'] = True
+
##########################################################################
# Orchestrates data collection and CIS report generation
##########################################################################
@@ -5182,7 +5070,7 @@ def __report_generate_html_summary_report(self, header, file_subject, data):
if item_value != "":
html_file.write(f"{item_key.title()}
")
if item_key == 'Observation':
- if fing['Status'] == None:
+ if fing['Status'] is None:
pfx = 'Manually check for'
else:
num_findings = len(fing['Findings'])
@@ -5342,7 +5230,8 @@ def __collect_tenancy_data(self):
self.__block_volume_read_block_volumes,
self.__boot_volume_read_boot_volumes,
self.__fss_read_fsss,
- self.__core_instance_read_compute
+ self.__core_instance_read_compute,
+ self.__certificates_read_certificates
]
# Oracle Best practice functions
@@ -5418,7 +5307,8 @@ def __report_generate_raw_data_output(self):
"cloud_guard_target": list(self.__cloud_guard_targets.values()),
"regions": self.__raw_regions,
"network_drg_attachments": list(itertools.chain.from_iterable(self.__network_drg_attachments.values())),
- "instances": self.__Instance
+ "instances": self.__Instance,
+ "certificates" : self.__raw_oci_certificates
}
for key in raw_csv_files:
rfn = self.__print_to_csv_file('raw_data', key, raw_csv_files[key])
@@ -5851,6 +5741,8 @@ def execute_report():
help='Uses Advanced Search Service to query all resources in the tenancy and outputs to a JSON. This also enables OCI Best Practice Checks (--obp) and All resource to csv (--raw) flags.')
parser.add_argument('--redact_output', action='store_true', default=False,
help='Redacts OCIDs in output CSV and JSON files.')
+ parser.add_argument('--deeplink-url-override', default=None, dest='oci_url',
+ help='Replaces the base OCI URL (https://cloud.oracle.com) for deeplinks (i.e. https://oc10.cloud.oracle.com).')
parser.add_argument('-ip', action='store_true', default=False,
dest='is_instance_principals', help='Use Instance Principals for Authentication.')
parser.add_argument('-dt', action='store_true', default=False,
@@ -5870,7 +5762,7 @@ def execute_report():
config, signer = create_signer(cmd.file_location, cmd.config_profile, cmd.is_instance_principals, cmd.is_delegation_token, cmd.is_security_token)
config['retry_strategy'] = oci.retry.DEFAULT_RETRY_STRATEGY
report = CIS_Report(config, signer, cmd.proxy, cmd.output_bucket, cmd.report_directory, cmd.report_prefix, cmd.report_summary_json, cmd.print_to_screen, \
- cmd.regions, cmd.raw, cmd.obp, cmd.redact_output, debug=cmd.debug, all_resources=cmd.all_resources)
+ cmd.regions, cmd.raw, cmd.obp, cmd.redact_output, oci_url=cmd.oci_url, debug=cmd.debug, all_resources=cmd.all_resources)
csv_report_directory = report.generate_reports(int(cmd.level))
try:
diff --git a/othertools/oci-fsdr/commonLib.py b/othertools/oci-fsdr/commonLib.py
new file mode 100644
index 000000000..beeecf9c1
--- /dev/null
+++ b/othertools/oci-fsdr/commonLib.py
@@ -0,0 +1,14 @@
+import re
+import json
+
+def get_region_from_ocid(ocid, region_map):
+ match = re.search(r'oc1\.(.*?)\.', ocid)
+ if match:
+ region_code = match.group(1)
+ return region_map.get(region_code, 'unknown-region')
+ return 'unknown-region'
+
+def load_region_map(region_file):
+ with open(region_file, 'r') as f:
+ region_map = json.load(f)
+ return region_map
\ No newline at end of file
diff --git a/othertools/oci-fsdr/export_drplan.py b/othertools/oci-fsdr/export_drplan.py
new file mode 100644
index 000000000..c11b19e0e
--- /dev/null
+++ b/othertools/oci-fsdr/export_drplan.py
@@ -0,0 +1,284 @@
+import oci
+import pandas as pd
+import os
+from openpyxl import load_workbook
+from openpyxl.utils import column_index_from_string
+from openpyxl.styles import Alignment, PatternFill, Font
+import argparse
+from commonLib import *
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-o", "--ocid", help="Provide the DR Plan OCID")
+parser.add_argument("-s", "--sheet", help="Provide the sheet name under which the value is stored")
+parser.add_argument("-f", "--file", help="Provide name of the file to be created/updated")
+parser.add_argument("-c", "--config", help="API_KEY")
+parser.add_argument("-i", "--instance_principal", help="INSTANCE_PRINCIPAL", nargs='?', const=1, type=int)
+parser.add_argument("-t", "--session_token", help="SESSION_TOKEN", nargs='?', const=1, type=int)
+args = parser.parse_args()
+
+try:
+ region_file = os.path.dirname(os.path.abspath(__file__))+"/region_file.json"
+ region_map = load_region_map(region_file)
+ region = get_region_from_ocid(args.ocid, region_map)
+except Exception as e:
+ print(f"Error loading region map: {str(e)}")
+ exit(1)
+
+try:
+ config = oci.config.from_file(file_location=args.config)
+except Exception as e:
+ print(f"Error loading OCI config: {str(e)}")
+ print(".....Exiting!!!")
+ exit(1)
+
+if args.ocid:
+ config['region'] = region
+
+try:
+ if args.instance_principal == 1:
+ signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
+ elif args.session_token:
+ token_file = config['security_token_file']
+ token = None
+ with open(token_file, 'r') as f:
+ token = f.read()
+
+ private_key = oci.signer.load_private_key_from_file(config['key_file'])
+ signer = oci.auth.signers.SecurityTokenSigner(token, private_key)
+ elif args.config != '':
+ signer = oci.signer.Signer(config['tenancy'], config['user'], config['fingerprint'], config['key_file'])
+except Exception as e:
+ print(f"Error creating signer: {str(e)}")
+ exit(1)
+
+try:
+ # Get DR Plan
+ disaster_recovery_client = oci.disaster_recovery.DisasterRecoveryClient(
+ config=config, retry_strategy=oci.retry.DEFAULT_RETRY_STRATEGY, signer=signer)
+ get_dr_plan_response = disaster_recovery_client.get_dr_plan(dr_plan_id=args.ocid)
+ plan_groups = get_dr_plan_response.data.plan_groups
+ # Extract the order of plan groups
+ original_order = [pg.id for pg in plan_groups]
+
+ # Manually convert DrPlanGroup objects to dictionaries
+ plan_dicts = []
+ for pg in plan_groups:
+ steps = []
+ for step in pg.steps:
+ step_dict = {
+ 'display_name': step.display_name,
+ 'error_mode': step.error_mode,
+ 'id': step.id,
+ 'is_enabled': step.is_enabled,
+ 'timeout': step.timeout,
+ 'type': step.type,
+ }
+ if hasattr(step, 'user_defined_step') and step.user_defined_step:
+ user_defined_step = {
+ 'step_type': step.user_defined_step.step_type,
+ 'run_as_user': getattr(step.user_defined_step, 'run_as_user', None),
+ 'run_on_instance_id': getattr(step.user_defined_step, 'run_on_instance_id', None),
+ 'function_id': getattr(step.user_defined_step, 'function_id', None),
+ 'function_region': getattr(step.user_defined_step, 'function_region', None),
+ 'request_body': getattr(step.user_defined_step, 'request_body', None),
+ 'object_storage_script_location': {
+ 'bucket': getattr(step.user_defined_step.object_storage_script_location, 'bucket', None),
+ 'namespace': getattr(step.user_defined_step.object_storage_script_location, 'namespace', None),
+ 'object': getattr(step.user_defined_step.object_storage_script_location, 'object', None)
+ } if getattr(step.user_defined_step, 'object_storage_script_location', None) else None,
+ 'run_on_instance_region': getattr(step.user_defined_step, 'run_on_instance_region', None),
+ 'script_command': getattr(step.user_defined_step, 'script_command', None)
+ }
+ step_dict['user_defined_step'] = user_defined_step
+ steps.append(step_dict)
+ plan_dicts.append({
+ 'display_name': pg.display_name,
+ 'id': pg.id,
+ 'type': pg.type,
+ 'steps': steps
+ })
+
+ # Convert the parsed plan data to a DataFrame
+ df = pd.json_normalize(plan_dicts)
+
+ # Split the data into two parts based on the "type" value
+ built_in_df = df[df['type'] == 'BUILT_IN']
+ other_df = df[df['type'] != 'BUILT_IN']
+
+ # Function to normalize and reformat data
+ def normalize_and_reformat(df):
+ dict_list_orient = df.to_dict('records')
+ normalized_data = pd.json_normalize(dict_list_orient, "steps", ['display_name', 'id', 'type'], record_prefix='steps.')
+ columns_order = [
+ 'display_name', 'id', 'steps.display_name', 'steps.error_mode', 'steps.id', 'steps.is_enabled',
+ 'steps.timeout', 'steps.type', 'type'
+ ]
+ normalized_data = normalized_data.reindex(columns=columns_order, fill_value=None)
+ return normalized_data
+
+ def normalize_other_data(df):
+ dict_list_orient = df.to_dict('records')
+ normalized_data = pd.json_normalize(dict_list_orient, "steps", ['display_name', 'id', 'type'], record_prefix='steps.')
+ columns_order = [
+ 'display_name', 'id', 'steps.display_name', 'steps.error_mode', 'steps.id', 'steps.is_enabled',
+ 'steps.timeout', 'steps.type', 'steps.user_defined_step.step_type',
+ 'steps.user_defined_step.run_as_user', 'steps.user_defined_step.run_on_instance_id',
+ 'steps.user_defined_step.function_id', 'steps.user_defined_step.function_region', 'steps.user_defined_step.request_body',
+ 'steps.user_defined_step.object_storage_script_location.bucket', 'steps.user_defined_step.object_storage_script_location.namespace', 'steps.user_defined_step.object_storage_script_location.object',
+ 'steps.user_defined_step.run_on_instance_region', 'steps.user_defined_step.script_command', 'type'
+ ]
+ normalized_data = normalized_data.reindex(columns=columns_order, fill_value=None)
+ return normalized_data
+
+ # Normalize and reformat both subsets of data
+ built_in_data = normalize_and_reformat(built_in_df)
+ other_data = normalize_other_data(other_df)
+
+ # Append both subsets of data into one DataFrame
+ combined_data = pd.concat([other_data, built_in_data], ignore_index=True)
+
+ # Sort the combined data based on the original order
+ combined_data['sort_order'] = pd.Categorical(combined_data['id'], categories=original_order, ordered=True)
+ combined_data.sort_values('sort_order', inplace=True)
+ combined_data.drop(columns=['sort_order'], inplace=True)
+
+ # Write the combined data to an Excel file
+ excel_file = args.file
+ sheet = args.sheet
+ if sheet.startswith('"') and sheet.endswith('"'):
+ sheet = sheet[1:-1]
+
+ # Check if the file exists and the sheet exists
+ if os.path.exists(excel_file):
+ wb = load_workbook(excel_file)
+ if sheet in wb.sheetnames:
+ with pd.ExcelWriter(excel_file, engine='openpyxl', mode='a', if_sheet_exists='replace') as writer:
+ print(f"Writing to sheet: {sheet}")
+ combined_data.to_excel(writer, sheet_name=sheet, index=False)
+ worksheet = writer.sheets[sheet]
+ else:
+ with pd.ExcelWriter(excel_file, engine='openpyxl', mode='a') as writer:
+ print(f"Writing to sheet: {sheet}")
+ combined_data.to_excel(writer, sheet_name=sheet, index=False)
+ worksheet = writer.sheets[sheet]
+ else:
+ with pd.ExcelWriter(excel_file, engine='openpyxl', mode='w') as writer:
+ print(f"Writing to Excel file: {excel_file} and sheet: {sheet}")
+ combined_data.to_excel(writer, sheet_name=sheet, index=False)
+ worksheet = writer.sheets[sheet]
+
+ wb = load_workbook(excel_file)
+ ws = wb[sheet]
+
+
+ def merge_and_center(ws, col):
+ max_row = ws.max_row
+ for row in range(2, max_row + 1):
+ cell_value = ws.cell(row=row, column=col).value
+ start_row = row
+ while row <= max_row and ws.cell(row=row, column=col).value == cell_value:
+ row += 1
+ end_row = row - 1
+ if start_row != end_row:
+ ws.merge_cells(start_row=start_row, start_column=col, end_row=end_row, end_column=col)
+ merged_cell = ws.cell(row=start_row, column=col)
+ merged_cell.alignment = Alignment(horizontal='center', vertical='center')
+
+
+ columns_to_merge = ['A', 'B']
+
+ for col in columns_to_merge:
+ col_index = column_index_from_string(col)
+ merge_and_center(ws, col_index)
+
+ # Define fill colors
+ fill_blue = PatternFill(start_color="346EC9", end_color="346EC9", fill_type="solid")
+ fill_purple = PatternFill(start_color="858491", end_color="858491", fill_type="solid")
+ font_white = Font(color="FFFFFF", bold=True)
+
+ header_cells = ws[1]
+ for cell in header_cells:
+ if cell.column_letter in ['A', 'B', 'T']:
+ cell.fill = fill_blue
+ cell.font = font_white
+ else:
+ cell.fill = fill_purple
+ cell.font = font_white
+
+ # Auto-adjust column widths
+ for col in ws.columns:
+ max_length = 0
+ column = col[0].column_letter
+ for cell in col:
+ try:
+ if len(str(cell.value)) > max_length:
+ max_length = len(cell.value)
+ except:
+ pass
+ adjusted_width = (max_length + 2)
+ ws.column_dimensions[column].width = adjusted_width
+
+ # Save the modified workbook
+ wb.save(excel_file)
+ print("Excel file updated successfully.")
+
+ if "Readme" not in wb.sheetnames:
+ readme_sheet = wb.create_sheet(title="Readme")
+ readme_content = """
+ Instructions to update columns in Excel sheet
+
+ For New Plan step update the row values as below:
+ - id, steps.id - leave these row values empty column empty
+ - Display_name : Display name for Plan Group name (mandatory)
+ steps.display_name : Display name for the step (mandatory)
+ steps.error_mode : STOP_ON_ERROR/CONTINUE_ON_ERROR (mandatory)
+ steps.is_enabled : TRUE/FALSE (mandatory)
+ steps.timeout : timeout value in seconds (mandatory)
+ type: USER_DEFINED (mandatory)
+ steps.user_defined_step.step_type : RUN_LOCAL_SCRIPT/RUN_OBJECTSTORE_SCRIPT/INVOKE_FUNCTION
+
+ Based on the step type from above fill in the row values as mentioned :
+ RUN_LOCAL_SCRIPT:
+ - steps.user_defined_step.run_as_user, (description: user as which the script needs to run)
+ - steps.user_defined_step.run_on_instance_id, (description: Instance OCID where the script is located)
+ - steps.user_defined_step.script_command (description: script command which needs to run)
+ RUN_OBJECTSTORE_SCRIPT:
+ - steps.user_defined_step.run_on_instance_id, (description: Instance OCID where the script is located)
+ - steps.user_defined_step.object_storage_script_location.bucket, (description: OCI bucket name)
+ - steps.user_defined_step.object_storage_script_location.namespace, (description: OCI bucket namespace name)
+ - steps.user_defined_step.object_storage_script_location.object, (description: script name)
+ - steps.user_defined_step.run_on_instance_region, (description: Instance region name)
+ - steps.user_defined_step.script_command (description: script command which needs to run)
+ INVOKE_FUNCTION:
+ - steps.user_defined_step.function_id (description: OCI Function OCID which needs to be invoked)
+ - steps.user_defined_step.function_region (description: OCI Function region)
+ - steps.user_defined_step.request_body (description: OCI Function request body)
+ """
+
+ # Insert the content into a single cell (A1)
+ readme_sheet["A1"] = readme_content.strip()
+
+ # Expand the row height to accommodate the text
+ readme_sheet.row_dimensions[1].height = 750 # You can adjust this value
+
+ # Auto-adjust column width to fit the content
+ readme_sheet.column_dimensions['A'].width = 150 # You can adjust this value
+
+ # Set text wrapping for the cell
+ readme_sheet["A1"].alignment = Alignment(wrap_text=True, vertical='top')
+ readme_sheet["A1"].font = Font(size=14, color="FFFFFF", bold=True) # Set font size to 14 and color to white
+ readme_sheet["A1"].fill = PatternFill(start_color="346EC9", end_color="346EC9",
+ fill_type="solid") # Set background to blue
+ readme_index = wb.sheetnames.index("Readme")
+ wb._sheets.insert(0, wb._sheets.pop(readme_index))
+
+ # Save the workbook with the new Readme sheet
+ wb.save(excel_file)
+
+
+
+ wb.save(excel_file)
+ wb.close()
+
+except Exception as e:
+ print(f"Error: {str(e)}")
diff --git a/othertools/oci-fsdr/oci-fsdr-plan-template.xlsx b/othertools/oci-fsdr/oci-fsdr-plan-template.xlsx
new file mode 100644
index 000000000..be22a2d89
Binary files /dev/null and b/othertools/oci-fsdr/oci-fsdr-plan-template.xlsx differ
diff --git a/othertools/oci-fsdr/region_file.json b/othertools/oci-fsdr/region_file.json
new file mode 100644
index 000000000..2d970e31d
--- /dev/null
+++ b/othertools/oci-fsdr/region_file.json
@@ -0,0 +1,41 @@
+{
+ "phx": "us-phoenix-1",
+ "syd": "ap-sydney-1",
+ "mel": "ap-melbourne-1",
+ "gru": "sa-saopaulo-1",
+ "vcp": "sa-vinhedo-1",
+ "yul": "ca-montreal-1",
+ "yyz": "ca-toronto-1",
+ "scl": "sa-santiago-1",
+ "vap": "sa-valparaiso-1",
+ "bog": "sa-bogota-1",
+ "cdg": "eu-paris-1",
+ "mrs": "eu-marseille-1",
+ "fra": "eu-frankfurt-1",
+ "hyd": "ap-hyderabad-1",
+ "bom": "ap-mumbai-1",
+ "mtz": "il-jerusalem-1",
+ "lin": "eu-milan-1",
+ "kix": "ap-osaka-1",
+ "nrt": "ap-tokyo-1",
+ "qro": "mx-queretaro-1",
+ "mty": "mx-monterrey-1",
+ "ams": "eu-amsterdam-1",
+ "jed": "me-jeddah-1",
+ "beg": "eu-jovanovac-1",
+ "sin": "ap-singapore-1",
+ "xsp": "ap-singapore-2",
+ "jnb": "af-johannesburg-1",
+ "icn": "ap-seoul-1",
+ "yny": "ap-chuncheon-1",
+ "mad": "eu-madrid-1",
+ "arn": "eu-stockholm-1",
+ "zrh": "eu-zurich-1",
+ "auh": "me-abudhabi-1",
+ "dxb": "me-dubai-1",
+ "lhr": "uk-london-1",
+ "cwl": "uk-cardiff-1",
+ "iad": "us-ashburn-1",
+ "ord": "us-chicago-1",
+ "sjc": "us-sanjose-1"
+}
diff --git a/othertools/oci-fsdr/update_drplan.py b/othertools/oci-fsdr/update_drplan.py
new file mode 100644
index 000000000..ea21c8fd4
--- /dev/null
+++ b/othertools/oci-fsdr/update_drplan.py
@@ -0,0 +1,368 @@
+import oci
+import openpyxl
+import argparse
+import os
+from commonLib import *
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-o", "--ocid", help="Provide the DR Plan OCID")
+parser.add_argument("-s", "--sheet", help="Provide the sheet name under which the value is stored")
+parser.add_argument("-f", "--file", help="Provide name of the file to be created/updated")
+parser.add_argument("-c", "--config", help="API_KEY")
+parser.add_argument("-i", "--instance_principal", help="INSTANCE_PRINCIPAL", nargs='?', const=1, type=int)
+parser.add_argument("-t", "--session_token", help="SESSION_TOKEN", nargs='?', const=1, type=int)
+
+args = parser.parse_args()
+
+try:
+ region_file = os.path.dirname(os.path.abspath(__file__)) + "/region_file.json"
+ region_map = load_region_map(region_file)
+ region = get_region_from_ocid(args.ocid, region_map)
+except Exception as e:
+ print(f"Error loading region map or determining region from OCID: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+try:
+ config = oci.config.from_file(file_location=args.config)
+ if args.ocid:
+ config['region'] = region
+except Exception as e:
+ print(f"Error loading OCI config: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+try:
+ if args.instance_principal == 1:
+ signer = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
+ elif args.session_token:
+ token_file = config['security_token_file']
+ token = None
+ with open(token_file, 'r') as f:
+ token = f.read()
+
+ private_key = oci.signer.load_private_key_from_file(config['key_file'])
+ elif args.config != '':
+ signer = oci.signer.Signer(config['tenancy'], config['user'], config['fingerprint'], config['key_file'])
+except Exception as e:
+ print(f"Error initializing signer: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+# Initialize Disaster Recovery client
+try:
+ disaster_recovery_client = oci.disaster_recovery.DisasterRecoveryClient(config)
+except Exception as e:
+ print(f"Error initializing Disaster Recovery client: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+# Function to get the actual value of a cell, considering merged cells
+def get_merged_cell_value(sheet, row, col):
+ cell = sheet.cell(row=row, column=col)
+ for merged_cell_range in sheet.merged_cells.ranges:
+ if cell.coordinate in merged_cell_range:
+ merged_cell = sheet.cell(row=merged_cell_range.min_row, column=merged_cell_range.min_col)
+ return merged_cell.value
+ return cell.value
+
+# Define functions for plan management with appropriate error handling
+def new_plan(row, plan_groups_dict):
+ try:
+ plan_group_display_name = str(row[0])
+ id = None
+ step_display_name = str(row[2])
+ step_error_mode = row[3]
+ s_id = None
+ step_is_enabled = row[5]
+ timeout = row[6]
+ step_type = row[8]
+ run_as_user = row[9]
+ run_on_instance_id = row[10]
+ function_id = row[11]
+ function_region = row[12]
+ request_body = row[13]
+ bucket = row[14]
+ namespace = row[15]
+ bucket_object = row[16]
+ instance_region = row[17]
+ script_command = row[18]
+
+ if step_type in ["RUN_LOCAL_SCRIPT", "RUN_OBJECTSTORE_SCRIPT", "INVOKE_FUNCTION"]:
+ type = 'USER_DEFINED'
+ else:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of RUN_LOCAL_SCRIPT, RUN_OBJECTSTORE_SCRIPT, INVOKE_FUNCTION")
+
+ valid_step_types = [
+ 'RUN_OBJECTSTORE_SCRIPT_PRECHECK',
+ 'RUN_LOCAL_SCRIPT_PRECHECK',
+ 'INVOKE_FUNCTION_PRECHECK',
+ 'RUN_OBJECTSTORE_SCRIPT',
+ 'RUN_LOCAL_SCRIPT',
+ 'INVOKE_FUNCTION'
+ ]
+
+ if step_type not in valid_step_types:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of {valid_step_types}")
+
+ if plan_group_display_name in plan_groups_dict:
+ plan_group_details = plan_groups_dict[plan_group_display_name]
+ else:
+ plan_group_details = oci.disaster_recovery.models.UpdateDrPlanGroupDetails(
+ display_name=plan_group_display_name,
+ id=id,
+ type=type,
+ steps=[]
+ )
+ plan_groups_dict[plan_group_display_name] = plan_group_details
+
+ if step_type == "RUN_LOCAL_SCRIPT":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateRunLocalScriptUserDefinedStepDetails(
+ step_type=step_type,
+ run_on_instance_id=run_on_instance_id,
+ run_as_user=run_as_user,
+ script_command=script_command
+ )
+ )
+ elif step_type == "RUN_OBJECTSTORE_SCRIPT":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateRunObjectStoreScriptUserDefinedStepDetails(
+ step_type=step_type,
+ run_on_instance_id=run_on_instance_id,
+ object_storage_script_location=oci.disaster_recovery.models.UpdateObjectStorageScriptLocationDetails(
+ bucket=bucket,
+ namespace=namespace,
+ object=bucket_object
+ )
+ )
+ )
+ elif step_type == "INVOKE_FUNCTION":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateInvokeFunctionUserDefinedStepDetails(
+ step_type=step_type,
+ function_id=function_id,
+ request_body=request_body
+ )
+ )
+ else:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of RUN_LOCAL_SCRIPT, RUN_OBJECTSTORE_SCRIPT, INVOKE_FUNCTION")
+
+ if step_details not in plan_group_details.steps:
+ plan_group_details.steps.append(step_details)
+
+ return plan_groups_dict, plan_group_details
+ except Exception as e:
+ print(f"Error in new_plan function: {str(e)}")
+ exit(0)
+
+def existing_plan(row, plan_groups_dict):
+ try:
+ plan_group_display_name = str(row[0])
+ id = str(row[1])
+ step_display_name = str(row[2])
+ step_error_mode = row[3]
+ s_id = str(row[4])
+ step_is_enabled = row[5]
+ timeout = row[6]
+ step_type = row[8]
+ run_as_user = row[9]
+ run_on_instance_id = row[10]
+ function_id = row[11]
+ function_region = row[12]
+ request_body = row[13]
+ bucket = row[14]
+ namespace = row[15]
+ bucket_object = row[16]
+ instance_region = row[17]
+ script_command = row[18]
+
+ if step_type in ["RUN_LOCAL_SCRIPT", "RUN_OBJECTSTORE_SCRIPT", "INVOKE_FUNCTION"]:
+ type = 'USER_DEFINED'
+ else:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of RUN_LOCAL_SCRIPT, RUN_OBJECTSTORE_SCRIPT, INVOKE_FUNCTION")
+
+ valid_step_types = [
+ 'RUN_OBJECTSTORE_SCRIPT_PRECHECK',
+ 'RUN_LOCAL_SCRIPT_PRECHECK',
+ 'INVOKE_FUNCTION_PRECHECK',
+ 'RUN_OBJECTSTORE_SCRIPT',
+ 'RUN_LOCAL_SCRIPT',
+ 'INVOKE_FUNCTION'
+ ]
+
+ if step_type not in valid_step_types:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of {valid_step_types}")
+
+ if id in plan_groups_dict:
+ plan_group_details = plan_groups_dict[id]
+ else:
+ plan_group_details = oci.disaster_recovery.models.UpdateDrPlanGroupDetails(
+ display_name=plan_group_display_name,
+ id=id,
+ type=type,
+ steps=[]
+ )
+ plan_groups_dict[id] = plan_group_details
+
+ if step_type == "RUN_LOCAL_SCRIPT":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateRunLocalScriptUserDefinedStepDetails(
+ step_type=step_type,
+ run_on_instance_id=run_on_instance_id,
+ run_as_user=run_as_user,
+ script_command=script_command
+ )
+ )
+ elif step_type == "RUN_OBJECTSTORE_SCRIPT":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateRunObjectStoreScriptUserDefinedStepDetails(
+ step_type=step_type,
+ run_on_instance_id=run_on_instance_id,
+ object_storage_script_location=oci.disaster_recovery.models.UpdateObjectStorageScriptLocationDetails(
+ bucket=bucket,
+ namespace=namespace,
+ object=bucket_object
+ )
+ )
+ )
+ elif step_type == "INVOKE_FUNCTION":
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled,
+ user_defined_step=oci.disaster_recovery.models.UpdateInvokeFunctionUserDefinedStepDetails(
+ step_type=step_type,
+ function_id=function_id,
+ request_body=request_body
+ )
+ )
+ else:
+ raise ValueError(f"Invalid step_type: {step_type}. Must be one of RUN_LOCAL_SCRIPT, RUN_OBJECTSTORE_SCRIPT, INVOKE_FUNCTION")
+
+ if step_details not in plan_group_details.steps:
+ plan_group_details.steps.append(step_details)
+
+ return plan_groups_dict, plan_group_details
+ except Exception as e:
+ print(f"Error in existing_plan function: {str(e)}")
+ exit(0)
+
+def builtin_function(row, plan_groups_dict):
+ try:
+ plan_group_display_name = str(row[0])
+ id = str(row[1])
+ step_display_name = str(row[2])
+ step_error_mode = row[3]
+ s_id = row[4]
+ step_is_enabled = row[5]
+ timeout = row[6]
+ type = row[19]
+
+ valid_builtin_types = ['BUILT_IN', 'BUILT_IN_PRECHECK', 'USER_DEFINED']
+ if type not in valid_builtin_types:
+ raise ValueError(f"Invalid value for `type`: {type}. Must be one of {valid_builtin_types}")
+
+ if id in plan_groups_dict:
+ plan_group_details = plan_groups_dict[id]
+ else:
+ plan_group_details = oci.disaster_recovery.models.UpdateDrPlanGroupDetails(
+ display_name=plan_group_display_name,
+ id=id,
+ type=type,
+ steps=[]
+ )
+ plan_groups_dict[id] = plan_group_details
+
+ step_details = oci.disaster_recovery.models.UpdateDrPlanStepDetails(
+ display_name=step_display_name,
+ error_mode=step_error_mode,
+ id=s_id,
+ timeout=timeout,
+ is_enabled=step_is_enabled
+ )
+
+ if step_details not in plan_group_details.steps:
+ plan_group_details.steps.append(step_details)
+
+ return plan_groups_dict, plan_group_details
+ except Exception as e:
+ print(f"Error in builtin_function function: {str(e)}")
+ exit(0)
+
+try:
+ workbook = openpyxl.load_workbook(args.file)
+ sheet = args.sheet
+ if sheet.startswith('"') and sheet.endswith('"'):
+ sheet = sheet[1:-1]
+ sheet = workbook[sheet]
+except Exception as e:
+ print(f"Error loading Excel file or sheet: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+plan_groups_dict = {}
+ordered_plan_groups = []
+
+try:
+ for row in sheet.iter_rows(min_row=2, max_row=sheet.max_row):
+ row_values = [get_merged_cell_value(sheet, row[0].row, col) for col in range(1, sheet.max_column + 1)]
+ id_value = str(row_values[1])
+ type_value = str(row_values[19])
+
+ row_values = [None if val in ["None", None] else val for val in row_values]
+
+ if type_value == "USER_DEFINED":
+ if id_value == "None":
+ plan_groups_dict, plan_group_details = new_plan(row_values, plan_groups_dict)
+ else:
+ plan_groups_dict, plan_group_details = existing_plan(row_values, plan_groups_dict)
+ else:
+ plan_groups_dict, plan_group_details = builtin_function(row_values, plan_groups_dict)
+
+ ordered_plan_groups.append(plan_group_details)
+except Exception as e:
+ print(f"Error processing rows in Excel sheet: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)
+
+final_plan_groups = list(plan_groups_dict.values())
+
+try:
+ update_dr_plan_details = oci.disaster_recovery.models.UpdateDrPlanDetails(plan_groups=final_plan_groups)
+ update_dr_plan_response = disaster_recovery_client.update_dr_plan(
+ update_dr_plan_details=update_dr_plan_details,
+ dr_plan_id=args.ocid
+ )
+ print("Update to DR Plan " + args.ocid + "is successful")
+except Exception as e:
+ print(f"Error updating DR plan: {str(e)}")
+ print(".....Exiting!!!")
+ exit(0)