Skip to content

Commit

Permalink
Exposing memory metrics on local run
Browse files Browse the repository at this point in the history
  • Loading branch information
filipecosta90 committed Feb 19, 2024
1 parent 3816cf3 commit 8b3fbdd
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 6 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "redisbench-admin"
version = "0.10.21"
version = "0.10.24"
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
authors = ["filipecosta90 <filipecosta.90@gmail.com>","Redis Performance Group <performance@redis.com>"]
readme = "README.md"
Expand Down
7 changes: 7 additions & 0 deletions redisbench_admin/run/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,6 +699,8 @@ def print_results_table_stdout(
setup_name,
test_name,
cpu_usage=None,
kv_overall={},
metric_names=[],
):
# check which metrics to extract
(_, metrics,) = merge_default_and_config_metrics(
Expand All @@ -714,6 +716,11 @@ def print_results_table_stdout(
results_matrix = extract_results_table(metrics, results_dict)
if cpu_usage is not None:
results_matrix.append(["Total shards CPU usage %", "", "", cpu_usage])
for metric_name in metric_names:
if metric_name in kv_overall:
metric_value = kv_overall[metric_name]
results_matrix.append([f"Total shards {metric_name}", "", "", metric_value])

results_matrix = [[x[0], "{:.3f}".format(x[3])] for x in results_matrix]
writer = MarkdownTableWriter(
table_name=table_name,
Expand Down
15 changes: 15 additions & 0 deletions redisbench_admin/run_local/args.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,24 @@
from redisbench_admin.run.args import common_run_args
from redisbench_admin.run.common import REDIS_BINARY

FLUSHALL_AT_START = bool(int(os.getenv("FLUSHALL_AT_START", "0")))
IGNORE_KEYSPACE_ERRORS = bool(int(os.getenv("IGNORE_KEYSPACE_ERRORS", "0")))


def create_run_local_arguments(parser):
parser = common_run_args(parser)
parser.add_argument("--port", type=int, default=6379)
parser.add_argument("--redis-binary", type=str, default=REDIS_BINARY)
parser.add_argument(
"--flushall_on_every_test_start",
type=bool,
default=FLUSHALL_AT_START,
help="At the start of every test send a FLUSHALL",
)
parser.add_argument(
"--ignore_keyspace_errors",
type=bool,
default=IGNORE_KEYSPACE_ERRORS,
help="Ignore keyspace check errors. Will still log them as errors",
)
return parser
23 changes: 18 additions & 5 deletions redisbench_admin/run_local/local_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,11 @@ def local_db_spin(
required_modules,
setup_type,
shard_count,
flushall_on_every_test_start=False,
ignore_keyspace_errors=False,
):
redis_conns = []
artifact_version = "n/a"
result = True
temporary_dir = tempfile.mkdtemp()
cluster_api_enabled = False
Expand All @@ -68,7 +71,13 @@ def local_db_spin(
if dataset is not None:
logging.info("Given this benchmark requires an RDB load will skip it...")
result = False
return result, cluster_api_enabled, redis_conns, redis_processes
return (
result,
artifact_version,
cluster_api_enabled,
redis_conns,
redis_processes,
)
else:
# setup Redis
# copy the rdb to DB machine
Expand Down Expand Up @@ -152,6 +161,13 @@ def local_db_spin(
r.client_setname("redisbench-admin-standalone")
redis_conns.append(r)

if dataset is None:
if flushall_on_every_test_start:
logging.info("Will flush all data at test start...")
for shard_n, shard_conn in enumerate(redis_conns):
logging.info(f"Flushing all in shard {shard_n}...")
shard_conn.flushall()

if check_dbconfig_tool_requirement(benchmark_config):
logging.info("Detected the requirements to load data via client tool")
local_benchmark_output_filename = "{}/load-data.txt".format(temporary_dir)
Expand Down Expand Up @@ -189,10 +205,7 @@ def local_db_spin(
)
)

dbconfig_keyspacelen_check(
benchmark_config,
redis_conns,
)
dbconfig_keyspacelen_check(benchmark_config, redis_conns, ignore_keyspace_errors)

artifact_version = run_redis_pre_steps(
benchmark_config, redis_conns[0], required_modules
Expand Down
26 changes: 26 additions & 0 deletions redisbench_admin/run_local/run_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@
)
from redisbench_admin.run.metrics import (
from_info_to_overall_shard_cpu,
collect_redis_metrics,
collect_cpu_data,
)

from redisbench_admin.run.redistimeseries import (
datasink_profile_tabular_data,
timeseries_test_sucess_flow,
Expand Down Expand Up @@ -93,6 +95,8 @@ def run_local_command_logic(args, project_name, project_version):
required_modules = args.required_module
profilers_enabled = args.enable_profilers
s3_bucket_name = args.s3_bucket_name
flushall_on_every_test_start = args.flushall_on_every_test_start
ignore_keyspace_errors = args.ignore_keyspace_errors
profilers_list = []
if profilers_enabled:
profilers_list = args.profilers.split(",")
Expand Down Expand Up @@ -218,6 +222,8 @@ def run_local_command_logic(args, project_name, project_version):
required_modules,
setup_type,
shard_count,
flushall_on_every_test_start,
ignore_keyspace_errors,
)
if result_db_spin is False:
logging.warning(
Expand Down Expand Up @@ -372,6 +378,21 @@ def run_local_command_logic(args, project_name, project_version):
test_name,
)

(
end_time_ms,
_,
overall_end_time_metrics,
) = collect_redis_metrics(
redis_conns,
["memory"],
{
"memory": [
"used_memory",
"used_memory_dataset",
]
},
)

if (
profilers_enabled
and args.push_results_redistimeseries
Expand Down Expand Up @@ -409,6 +430,11 @@ def run_local_command_logic(args, project_name, project_version):
setup_name,
test_name,
total_shards_cpu_usage,
overall_end_time_metrics,
[
"memory_used_memory",
"memory_used_memory_dataset",
],
)

# check KPIs
Expand Down

0 comments on commit 8b3fbdd

Please sign in to comment.