Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enabled pushing timeseries data from local run #424

Merged
merged 2 commits into from
Apr 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redisbench-admin"
version = "0.10.19"
version = "0.10.24"
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
readme = "README.md"
Expand Down
1 change: 1 addition & 0 deletions redisbench_admin/compare/compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,6 +722,7 @@ def from_rts_to_regression_table(
total_comparison_points = 0
noise_waterline = 3
progress = tqdm(unit="benchmark time-series", total=len(test_names))
at_comparison = 0
for test_name in test_names:
multi_value_baseline = check_multi_value_filter(baseline_str)
multi_value_comparison = check_multi_value_filter(comparison_str)
Expand Down
9 changes: 8 additions & 1 deletion redisbench_admin/run/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,10 @@
PROFILERS = os.getenv("PROFILERS", PROFILERS_DEFAULT)
MAX_PROFILERS_PER_TYPE = int(os.getenv("MAX_PROFILERS", 1))
PROFILE_FREQ = os.getenv("PROFILE_FREQ", PROFILE_FREQ_DEFAULT)
KEEP_ENV = bool(os.getenv("KEEP_ENV", False))
KEEP_ENV = bool(int(os.getenv("KEEP_ENV", "0")))
ALLOWED_TOOLS_DEFAULT = "memtier_benchmark,redis-benchmark,redisgraph-benchmark-go,ycsb,go-ycsb,tsbs_run_queries_redistimeseries,tsbs_load_redistimeseries,ftsb_redisearch,aibench_run_inference_redisai_vision,ann-benchmarks"
ALLOWED_BENCH_TOOLS = os.getenv("ALLOWED_BENCH_TOOLS", ALLOWED_TOOLS_DEFAULT)
SKIP_DB_SETUP = bool(int(os.getenv("SKIP_DB_SETUP", "0")))


def common_run_args(parser):
Expand All @@ -53,6 +54,12 @@ def common_run_args(parser):
action="store_true",
help="Keep environment and topology up after benchmark.",
)
parser.add_argument(
"--skip-db-setup",
type=bool,
default=SKIP_DB_SETUP,
help="skip db setup/teardown steps. Usefull when you want to target an existing DB",
)
parser.add_argument(
"--fail_fast",
required=False,
Expand Down
7 changes: 7 additions & 0 deletions redisbench_admin/run/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,6 +699,8 @@ def print_results_table_stdout(
setup_name,
test_name,
cpu_usage=None,
kv_overall={},
metric_names=[],
):
# check which metrics to extract
(_, metrics,) = merge_default_and_config_metrics(
Expand All @@ -714,6 +716,11 @@ def print_results_table_stdout(
results_matrix = extract_results_table(metrics, results_dict)
if cpu_usage is not None:
results_matrix.append(["Total shards CPU usage %", "", "", cpu_usage])
for metric_name in metric_names:
if metric_name in kv_overall:
metric_value = kv_overall[metric_name]
results_matrix.append([f"Total shards {metric_name}", "", "", metric_value])

results_matrix = [[x[0], "{:.3f}".format(x[3])] for x in results_matrix]
writer = MarkdownTableWriter(
table_name=table_name,
Expand Down
4 changes: 4 additions & 0 deletions redisbench_admin/run_async/async_terraform.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,10 @@ def setup_remote_environment(
"github_repo": tf_github_repo,
"triggering_env": tf_triggering_env,
"timeout_secs": tf_timeout_secs,
"Project": tf_github_org,
"project": tf_github_org,
"Environment": tf_github_org,
"environment": tf_github_org,
},
)
return self.retrieve_tf_connection_vars(return_code, tf)
Expand Down
16 changes: 16 additions & 0 deletions redisbench_admin/run_local/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,29 @@
# Copyright (c) 2021., Redis Labs Modules
# All rights reserved.
#
import os

from redisbench_admin.run.args import common_run_args
from redisbench_admin.run.common import REDIS_BINARY

FLUSHALL_AT_START = bool(int(os.getenv("FLUSHALL_AT_START", "0")))
IGNORE_KEYSPACE_ERRORS = bool(int(os.getenv("IGNORE_KEYSPACE_ERRORS", "0")))


def create_run_local_arguments(parser):
parser = common_run_args(parser)
parser.add_argument("--port", type=int, default=6379)
parser.add_argument("--redis-binary", type=str, default=REDIS_BINARY)
parser.add_argument(
"--flushall_on_every_test_start",
type=bool,
default=FLUSHALL_AT_START,
help="At the start of every test send a FLUSHALL",
)
parser.add_argument(
"--ignore_keyspace_errors",
type=bool,
default=IGNORE_KEYSPACE_ERRORS,
help="Ignore keyspace check errors. Will still log them as errors",
)
return parser
177 changes: 103 additions & 74 deletions redisbench_admin/run_local/local_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,59 +46,16 @@ def local_db_spin(
required_modules,
setup_type,
shard_count,
flushall_on_every_test_start=False,
ignore_keyspace_errors=False,
):
# setup Redis
# copy the rdb to DB machine
redis_conns = []
artifact_version = "n/a"
result = True
temporary_dir = tempfile.mkdtemp()
redis_7 = args.redis_7
logging.info(
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
temporary_dir
)
)
if dbdir_folder is not None:
from distutils.dir_util import copy_tree

copy_tree(dbdir_folder, temporary_dir)
logging.info(
"Copied entire content of {} into temporary path: {}".format(
dbdir_folder, temporary_dir
)
)
(
_,
_,
redis_configuration_parameters,
dataset_load_timeout_secs,
modules_configuration_parameters_map,
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")
cluster_api_enabled = False
logging.info(
"Using a dataset load timeout of {} seconds.".format(dataset_load_timeout_secs)
)
redis_conns = []
if setup_type == "oss-cluster":
cluster_api_enabled = True
shard_host = "127.0.0.1"
redis_processes, redis_conns = spin_up_local_redis_cluster(
binary,
temporary_dir,
shard_count,
shard_host,
args.port,
local_module_file,
redis_configuration_parameters,
dataset_load_timeout_secs,
modules_configuration_parameters_map,
redis_7,
)

status = setup_redis_cluster_from_conns(
redis_conns, shard_count, shard_host, args.port
)
if status is False:
raise Exception("Redis cluster setup failed. Failing test.")

dataset, dataset_name, _, _ = check_dataset_local_requirements(
benchmark_config,
temporary_dir,
Expand All @@ -108,35 +65,108 @@ def local_db_spin(
shard_count,
cluster_api_enabled,
)
if setup_type == "oss-standalone":
redis_processes = spin_up_local_redis(
binary,
args.port,
temporary_dir,
local_module_file,

if args.skip_db_setup:
logging.info("Skipping DB Setup...")
if dataset is not None:
logging.info("Given this benchmark requires an RDB load will skip it...")
result = False
return (
result,
artifact_version,
cluster_api_enabled,
redis_conns,
redis_processes,
)
else:
# setup Redis
# copy the rdb to DB machine
redis_7 = args.redis_7
logging.info(
"Using local temporary dir to spin up Redis Instance. Path: {}".format(
temporary_dir
)
)
if dbdir_folder is not None:
from distutils.dir_util import copy_tree

copy_tree(dbdir_folder, temporary_dir)
logging.info(
"Copied entire content of {} into temporary path: {}".format(
dbdir_folder, temporary_dir
)
)
(
_,
_,
redis_configuration_parameters,
dbdir_folder,
dataset_load_timeout_secs,
modules_configuration_parameters_map,
redis_7,
) = extract_redis_dbconfig_parameters(benchmark_config, "dbconfig")

logging.info(
"Using a dataset load timeout of {} seconds.".format(
dataset_load_timeout_secs
)
)

if setup_type == "oss-cluster":
cluster_api_enabled = True
shard_host = "127.0.0.1"
redis_processes, redis_conns = spin_up_local_redis_cluster(
binary,
temporary_dir,
shard_count,
shard_host,
args.port,
local_module_file,
redis_configuration_parameters,
dataset_load_timeout_secs,
modules_configuration_parameters_map,
redis_7,
)

status = setup_redis_cluster_from_conns(
redis_conns, shard_count, shard_host, args.port
)
if status is False:
raise Exception("Redis cluster setup failed. Failing test.")

if setup_type == "oss-standalone":
redis_processes = spin_up_local_redis(
binary,
args.port,
temporary_dir,
local_module_file,
redis_configuration_parameters,
dbdir_folder,
dataset_load_timeout_secs,
modules_configuration_parameters_map,
redis_7,
)
if setup_type == "oss-cluster":
for shardn, redis_process in enumerate(redis_processes):
logging.info(
"Checking if shard #{} process with pid={} is alive".format(
shardn + 1, redis_process.pid
)
)
if is_process_alive(redis_process) is False:
raise Exception("Redis process is not alive. Failing test.")
cluster_init_steps(clusterconfig, redis_conns, local_module_file)

if setup_type == "oss-standalone":
r = redis.Redis(port=args.port)
r.ping()
r.client_setname("redisbench-admin-stadalone")
r.client_setname("redisbench-admin-standalone")
redis_conns.append(r)
if setup_type == "oss-cluster":
for shardn, redis_process in enumerate(redis_processes):
logging.info(
"Checking if shard #{} process with pid={} is alive".format(
shardn + 1, redis_process.pid
)
)
if is_process_alive(redis_process) is False:
raise Exception("Redis process is not alive. Failing test.")

if setup_type == "oss-cluster":
cluster_init_steps(clusterconfig, redis_conns, local_module_file)
if dataset is None:
if flushall_on_every_test_start:
logging.info("Will flush all data at test start...")
for shard_n, shard_conn in enumerate(redis_conns):
logging.info(f"Flushing all in shard {shard_n}...")
shard_conn.flushall()

if check_dbconfig_tool_requirement(benchmark_config):
logging.info("Detected the requirements to load data via client tool")
Expand Down Expand Up @@ -175,11 +205,10 @@ def local_db_spin(
)
)

dbconfig_keyspacelen_check(
benchmark_config,
redis_conns,
)
dbconfig_keyspacelen_check(benchmark_config, redis_conns, ignore_keyspace_errors)

run_redis_pre_steps(benchmark_config, redis_conns[0], required_modules)
artifact_version = run_redis_pre_steps(
benchmark_config, redis_conns[0], required_modules
)

return cluster_api_enabled, redis_conns, redis_processes
return result, artifact_version, cluster_api_enabled, redis_conns, redis_processes
Loading
Loading