From e1ca7c3f04e5d36d005b6b63af51b942094401c6 Mon Sep 17 00:00:00 2001 From: Shashank Reddy Boyapally Date: Tue, 9 Apr 2024 16:08:54 -0400 Subject: [PATCH] added generic indexes, daemon mode opinionated, and version as param Signed-off-by: Shashank Reddy Boyapally --- README.md | 1 + configs/small-scale-cluster-density.yml | 48 ++++++++++++++++++++++ examples/small-scale-cluster-density.yaml | 1 + examples/small-scale-node-density-cni.yaml | 1 + orion.py | 7 +++- pkg/daemon.py | 40 ++++++++++++------ pkg/runTest.py | 7 ++-- pkg/utils.py | 10 +++-- requirements.txt | 1 + setup.py | 4 +- 10 files changed, 98 insertions(+), 22 deletions(-) create mode 100644 configs/small-scale-cluster-density.yml diff --git a/README.md b/README.md index c44e22e..c1251eb 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ Below is an illustrative example of the config and metadata that Orion can handl ``` tests : - name : aws-small-scale-cluster-density-v2 + index: ospst-perf-scale-ci-* metadata: platform: AWS masterNodesType: m6a.xlarge diff --git a/configs/small-scale-cluster-density.yml b/configs/small-scale-cluster-density.yml new file mode 100644 index 0000000..decaad6 --- /dev/null +++ b/configs/small-scale-cluster-density.yml @@ -0,0 +1,48 @@ +tests : + - name : aws-small-scale-cluster-density-v2 + index: ospst-perf-scale-ci-* + metadata: + platform: AWS + masterNodesType: m6a.xlarge + masterNodesCount: 3 + workerNodesType: m6a.xlarge + workerNodesCount: 24 + benchmark.keyword: cluster-density-v2 + ocpVersion: {{ version }} + networkType: OVNKubernetes + # encrypted: true + # fips: false + # ipsec: false + + metrics : + - name: podReadyLatency + metricName: podLatencyQuantilesMeasurement + quantileName: Ready + metric_of_interest: P99 + not: + jobConfig.name: "garbage-collection" + + - name: apiserverCPU + metricName : containerCPU + labels.namespace.keyword: openshift-kube-apiserver + metric_of_interest: value + agg: + value: cpu + agg_type: avg + + - name: ovnCPU + metricName : containerCPU + labels.namespace.keyword: openshift-ovn-kubernetes + metric_of_interest: value + agg: + value: cpu + agg_type: avg + + - name: etcdCPU + metricName : containerCPU + labels.namespace.keyword: openshift-etcd + metric_of_interest: value + agg: + value: cpu + agg_type: avg + diff --git a/examples/small-scale-cluster-density.yaml b/examples/small-scale-cluster-density.yaml index 703f1b5..4e890c2 100644 --- a/examples/small-scale-cluster-density.yaml +++ b/examples/small-scale-cluster-density.yaml @@ -1,5 +1,6 @@ tests : - name : aws-small-scale-cluster-density-v2 + index: ospst-perf-scale-ci-* metadata: platform: AWS masterNodesType: m6a.xlarge diff --git a/examples/small-scale-node-density-cni.yaml b/examples/small-scale-node-density-cni.yaml index 1821f1a..dc6fb82 100644 --- a/examples/small-scale-node-density-cni.yaml +++ b/examples/small-scale-node-density-cni.yaml @@ -1,5 +1,6 @@ tests : - name : aws-small-scale-node-density-cni + index: ospst-perf-scale-ci-* metadata: platform: AWS masterNodesType: m6a.xlarge diff --git a/orion.py b/orion.py index 1b620ab..0602395 100644 --- a/orion.py +++ b/orion.py @@ -5,10 +5,14 @@ # pylint: disable = import-error import logging import sys +import warnings import click import uvicorn from pkg.logrus import SingletonLogger from pkg.runTest import run +from pkg.utils import load_config + +warnings.filterwarnings("ignore", message="Unverified HTTPS request.*") @@ -26,7 +30,7 @@ def cli(max_content_width=120): # pylint: disable=unused-argument "--output-path", default="output.csv", help="Path to save the output csv file" ) @click.option("--debug", default=False, is_flag=True, help="log level") -@click.option("--hunter-analyze", default=True, is_flag=True, help="run hunter analyze") +@click.option("--hunter-analyze", is_flag=True, help="run hunter analyze") @click.option( "-o", "--output-format", @@ -52,6 +56,7 @@ def cmd_analysis(**kwargs): level = logging.DEBUG if kwargs['debug'] else logging.INFO logger_instance = SingletonLogger(debug=level).logger logger_instance.info("🏹 Starting Orion in command-line mode") + kwargs['configMap']=load_config(kwargs["config"]) output = run(**kwargs) for test_name, result_table in output.items(): print(test_name) diff --git a/pkg/daemon.py b/pkg/daemon.py index c4d80cb..160b9b8 100644 --- a/pkg/daemon.py +++ b/pkg/daemon.py @@ -3,10 +3,10 @@ """ import logging -import shutil -import os -from fastapi import FastAPI, File, UploadFile +from fastapi import FastAPI +from jinja2 import Template +import yaml from pkg.logrus import SingletonLogger from . import runTest @@ -17,7 +17,7 @@ @app.post("/daemon") async def daemon( - file: UploadFile = File(...), + version: str = "4.15", uuid: str = "", baseline: str = "", filter_changepoints="", @@ -30,17 +30,18 @@ async def daemon( Returns: json: json object of the changepoints and metrics """ - file_name, file_extension = os.path.splitext(file.filename) - new_file_name = f"{file_name}_copy{file_extension}" - with open(new_file_name, "wb") as buffer: - shutil.copyfileobj(file.file, buffer) + config_file_name="configs/small-scale-cluster-density.yml" + parameters={ + "version": version + } argDict = { - "config": new_file_name, + "config": config_file_name, "output_path": "output.csv", "hunter_analyze": True, "output_format": "json", "uuid": uuid, "baseline": baseline, + "configMap": render_template(config_file_name, parameters) } filter_changepoints = ( True if filter_changepoints == "true" else False # pylint: disable = R1719 @@ -49,8 +50,21 @@ async def daemon( if filter_changepoints: for key, value in result.items(): result[key] = list(filter(lambda x: x.get("is_changepoint", False), value)) - try: - os.remove(new_file_name) - except OSError as e: - logger_instance.error("error %s", e.strerror) return result + +def render_template(file_name, parameters): + """replace parameters in the config file + + Args: + file_name (str): the config file + parameters (dict): parameters to be replaces + + Returns: + dict: configMap in dict + """ + with open(file_name, 'r', encoding="utf-8") as template_file: + template_content = template_file.read() + template = Template(template_content) + rendered_config_yaml = template.render(parameters) + rendered_config = yaml.safe_load(rendered_config_yaml) + return rendered_config diff --git a/pkg/runTest.py b/pkg/runTest.py index 661af6e..2feabb4 100644 --- a/pkg/runTest.py +++ b/pkg/runTest.py @@ -5,7 +5,7 @@ import logging from fmatch.matcher import Matcher from pkg.logrus import SingletonLogger -from pkg.utils import run_hunter_analyze, load_config, get_es_url, process_test +from pkg.utils import run_hunter_analyze, get_es_url, process_test def run(**kwargs): @@ -21,12 +21,13 @@ def run(**kwargs): _type_: _description_ """ logger_instance = SingletonLogger(debug=logging.INFO).logger - data = load_config(kwargs["config"]) + data = kwargs["configMap"] + ES_URL = get_es_url(data) result_output = {} for test in data["tests"]: match = Matcher( - index="perf_scale_ci", level=logger_instance.level, ES_URL=ES_URL + index=test["index"], level=logger_instance.level, ES_URL=ES_URL, verify_certs=False ) result = process_test( test, match, kwargs["output_path"], kwargs["uuid"], kwargs["baseline"] diff --git a/pkg/utils.py b/pkg/utils.py index 4db3163..2204e3e 100644 --- a/pkg/utils.py +++ b/pkg/utils.py @@ -131,6 +131,7 @@ def get_metric_data(ids, index, metrics, match): agg_name = agg_value + "_" + agg_type cpu_df = match.convert_to_df(cpu, columns=["uuid", agg_name]) cpu_df = cpu_df.rename(columns={agg_name: metric_name + "_" + agg_name}) + cpu_df = cpu_df.drop_duplicates() dataframe_list.append(cpu_df) logger_instance.debug(cpu_df) @@ -146,6 +147,7 @@ def get_metric_data(ids, index, metrics, match): podl_df = match.convert_to_df( podl, columns=["uuid", "timestamp", metric_of_interest] ) + podl_df=podl_df.drop_duplicates() dataframe_list.append(podl_df) logger_instance.debug(podl_df) except Exception as e: # pylint: disable=broad-exception-caught @@ -227,14 +229,14 @@ def get_index_and_ids(metadata, uuids, match, baseline): Returns: _type_: index and uuids """ - index_map={"k8s-netperf":"k8s-netperf", - "ingress-perf":"ingress-performance", + index_map={"k8s-netperf":"ospst-k8s-netperf", + "ingress-perf":"ospst-ingress-performance", } if metadata["benchmark.keyword"] in index_map: return index_map[metadata["benchmark.keyword"]], uuids - index = "ripsaw-kube-burner" + index = "ospst-ripsaw-kube-burner*" if baseline == "": - runs = match.match_kube_burner(uuids) + runs = match.match_kube_burner(uuids,index) ids = match.filter_runs(runs, runs) else: ids = uuids diff --git a/requirements.txt b/requirements.txt index 08afe1b..8ea4460 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,7 @@ click==8.1.7 elastic-transport==8.11.0 elasticsearch==7.13.0 fmatch==0.0.6 +Jinja2==3.1.3 python-dateutil==2.8.2 pytz==2023.3.post1 PyYAML==6.0.1 diff --git a/setup.py b/setup.py index 50689d7..778ea28 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,9 @@ ], }, packages=find_packages(), - package_data={'pkg': ['utils.py',"runTest.py","daemon.py","logrus.py"],'hunter': ['*.py']}, + package_data={'pkg': ['utils.py',"runTest.py","daemon.py","logrus.py"], + 'hunter': ['*.py'], + 'configs':['*.yml']}, classifiers=[ 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License',