Skip to content

To support tf count along with rosa-burner args #17

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
FROM python:3.11 as runtime
USER root
RUN curl -L https://go.dev/dl/go1.18.2.linux-amd64.tar.gz -o go1.18.2.linux-amd64.tar.gz
RUN tar -C /usr/local -xzf go1.18.2.linux-amd64.tar.gz
ENV PATH=$PATH:/usr/local/go/bin
RUN python3 -m pip install --upgrade pip || true
RUN yes | pip3 install openshift --upgrade || true
RUN apt-get -y update
RUN apt-get -y install jq
RUN curl -L $(curl -s https://api.github.com/repos/openshift/rosa/releases/latest | jq -r ".assets[] | select(.name == \"rosa-linux-amd64\") | .browser_download_url") --output /usr/local/bin/rosa
RUN curl -L $(curl -s https://api.github.com/repos/openshift-online/ocm-cli/releases/latest | jq -r ".assets[] | select(.name == \"ocm-linux-amd64\") | .browser_download_url") --output /usr/local/bin/ocm
RUN chmod +x /usr/local/bin/rosa && chmod +x /usr/local/bin/ocm
RUN /usr/local/bin/rosa download openshift-client
RUN tar xzvf openshift-client-linux.tar.gz
RUN mv oc kubectl /usr/local/bin/
RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
RUN unzip awscliv2.zip
RUN ./aws/install
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash
COPY . /
15 changes: 10 additions & 5 deletions libs/platforms/platform.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ def __init__(self, arguments, logging, utils, es):

self.environment["platform"] = arguments["platform"]

if arguments["subplatform"]:
self.environment["subplatform"] = arguments["subplatform"]

self.environment["ocm_url"] = arguments["ocm_url"]
self.environment["ocm_token"] = arguments["ocm_token"]

Expand Down Expand Up @@ -114,9 +117,11 @@ def initialize(self):
"`ocm login` execution OK"
)

def download_kubeconfig(self, cluster_name, path):
def download_kubeconfig(self, cluster_name, path, kubeconfig=""):
if kubeconfig == "":
kubeconfig = "kubeconfig_" + cluster_name
self.logging.debug(
f"Downloading kubeconfig file for Cluster {cluster_name} on {path}/kubeconfig_{cluster_name}"
f"Downloading kubeconfig file for Cluster {cluster_name} on {path}/{kubeconfig}"
)
kubeconfig_code, kubeconfig_out, kubeconfig_err = self.utils.subprocess_exec(
"ocm get /api/clusters_mgmt/v1/clusters/"
Expand All @@ -131,11 +136,11 @@ def download_kubeconfig(self, cluster_name, path):
del kubeconfig_as_dict["clusters"][0]["cluster"][
"certificate-authority-data"
]
kubeconfig_path = path + "/kubeconfig_" + cluster_name
kubeconfig_path = path + "/" + kubeconfig
with open(kubeconfig_path, "w") as kubeconfig_file:
yaml.dump(kubeconfig_as_dict, kubeconfig_file)
self.logging.debug(
f"Downloaded kubeconfig file for Cluster {cluster_name} and stored at {path}/kubeconfig_{cluster_name}"
f"Downloaded kubeconfig file for Cluster {cluster_name} and stored at {path}/{kubeconfig}"
)
return kubeconfig_path

Expand Down Expand Up @@ -184,7 +189,7 @@ def create_cluster(self, platform, cluster_name):
def delete_cluster(self, platform, cluster_name):
pass

def platform_cleanup(self):
def platform_cleanup(self, platform=""):
pass

def watcher(self):
Expand Down
4 changes: 2 additions & 2 deletions libs/platforms/rosa/hypershift/hypershift.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@ def _get_mc(self, cluster_id):
)
return json.loads(resp_out).get("management_cluster", None) if resp_code == 0 else None

def platform_cleanup(self):
super().platform_cleanup()
def platform_cleanup(self, platform=""):
super().platform_cleanup(platform)
self.logging.info("Cleaning resources")
# Delete Operator Roles
self._delete_operator_roles() if self.environment[
Expand Down
41 changes: 22 additions & 19 deletions libs/platforms/rosa/rosa.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,8 @@ def _delete_operator_roles(self):
)
return True

def platform_cleanup(self):
super().platform_cleanup()
def platform_cleanup(self, platform=""):
super().platform_cleanup(platform)

def create_cluster(self, platform, cluster_name):
super().create_cluster(platform, cluster_name)
Expand Down Expand Up @@ -240,25 +240,28 @@ def _preflight_wait(self, cluster_id, cluster_name):
self.logging.error(f"Exiting preflight times capturing on {cluster_name} cluster after capturing Ctrl-C")
return 0
self.logging.info(f"Getting status for cluster {cluster_name}")
status_code, status_out, status_err = self.utils.subprocess_exec("rosa describe cluster -c " + cluster_id + " -o json", extra_params={"universal_newlines": True})
status_code, status_out, status_err = self.utils.subprocess_exec("rosa describe cluster -c " + cluster_id + " -o json", extra_params={"universal_newlines": True}, log_output=False)
current_time = int(datetime.datetime.utcnow().timestamp())
try:
current_status = json.loads(status_out)["state"]
except Exception as err:
self.logging.error(f"Cannot load metadata for cluster {cluster_name}")
self.logging.error(err)
continue
if current_status != previous_status and previous_status != "":
return_data[previous_status] = current_time - start_time
start_time = current_time
self.logging.info(f"Cluster {cluster_name} moved from {previous_status} status to {current_status} status after {return_data[previous_status]} seconds")
if current_status == "installing":
self.logging.info(f"Cluster {cluster_name} is on installing status. Exiting preflights waiting...")
return return_data
if status_code != 0:
self.logging.debug("Cluster data not available yet, retrying..")
else:
self.logging.debug(f"Cluster {cluster_name} on {current_status} status. Waiting 2 seconds until {datetime.datetime.fromtimestamp(start_time + 60 * 60)} for next check")
time.sleep(1)
previous_status = current_status
try:
current_status = json.loads(status_out)["state"]
except Exception as err:
self.logging.error(f"Cannot load metadata for cluster {cluster_name}")
self.logging.error(err)
continue
if current_status != previous_status and previous_status != "":
return_data[previous_status] = current_time - start_time
start_time = current_time
self.logging.info(f"Cluster {cluster_name} moved from {previous_status} status to {current_status} status after {return_data[previous_status]} seconds")
if current_status == "installing":
self.logging.info(f"Cluster {cluster_name} is on installing status. Exiting preflights waiting...")
return return_data
else:
self.logging.debug(f"Cluster {cluster_name} on {current_status} status. Waiting 2 seconds until {datetime.datetime.fromtimestamp(start_time + 60 * 60)} for next check")
time.sleep(1)
previous_status = current_status
self.logging.error(f"Cluster {cluster_name} on {current_status} status (not installing) after 60 minutes. Exiting preflight waiting...")
return return_data

Expand Down
20 changes: 5 additions & 15 deletions libs/platforms/rosa/terraform/files/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,6 @@ provider "rhcs" {
url = var.url
}

# Create managed OIDC config
module "oidc_config" {
token = var.token
url = var.url
source = "./oidc_provider"
managed = true
operator_role_prefix = var.operator_role_prefix
account_role_prefix = var.account_role_prefix
tags = var.tags
path = var.path
}

locals {
path = coalesce(var.path, "/")
sts_roles = {
Expand All @@ -53,7 +41,7 @@ locals {
worker_role_arn = "arn:aws:iam::${data.aws_caller_identity.current.account_id}:role${local.path}${var.account_role_prefix}-Worker-Role"
},
operator_role_prefix = var.operator_role_prefix,
oidc_config_id = module.oidc_config.id
oidc_config_id = var.oidc_config_id
}
}

Expand All @@ -65,7 +53,8 @@ locals {
}

resource "rhcs_cluster_rosa_classic" "rosa_sts_cluster" {
name = var.cluster_name
count = var.clusters_per_apply
name = "${var.cluster_name}-${format("%04d", var.loop_factor + count.index + 1)}"
cloud_region = var.cloud_region
aws_account_id = data.aws_caller_identity.current.account_id
availability_zones = var.availability_zones
Expand All @@ -79,5 +68,6 @@ resource "rhcs_cluster_rosa_classic" "rosa_sts_cluster" {
rosa_creator_arn = data.aws_caller_identity.current.arn
}
sts = local.sts_roles
wait_for_create_complete = true
wait_for_create_complete = false
disable_waiting_in_destroy = true
}
15 changes: 0 additions & 15 deletions libs/platforms/rosa/terraform/files/output.tf

This file was deleted.

15 changes: 15 additions & 0 deletions libs/platforms/rosa/terraform/files/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,21 @@ variable "cluster_name" {
default = "rbur-000-0001"
}

variable "clusters_per_apply" {
type = number
default = null
}

variable "loop_factor" {
type = number
default = null
}

variable "oidc_config_id" {
type = string
default = null
}

variable "cloud_region" {
type = string
default = "us-east-2"
Expand Down
Loading