From a9487949549eee318d6312647a15732e5db67dca Mon Sep 17 00:00:00 2001 From: "Filipe Oliveira (Redis)" Date: Fri, 9 Aug 2024 00:57:17 +0100 Subject: [PATCH] fixed restore build artifacts issue on docker hub triggering (#259) * Added dockerhub build-variants. Added by.hash (we have by.branch, by.tag, by.hash now) * fixes per flake linter * Updated poetry lock * Running on tox on PR in gh * tox 5.0.0 and docker >= 7 * Fixed utils/tests/test_builder.py::test_commit_schema_to_stream_then_build * Fixed utils/tests/test_builder.py:76: AssertionError * Fixed poetry lock * Added dockerhub E2E triggering/tests * Running dockerhub tests by default on CI * Ensuring we have small CPU count requests for CI * Fixed prefetch image test * Include a way of triggering dockerhub run * Add airgap option to cli docker triggering * fixed restore build artifacts issue on docker hub triggering * Fixed linter issues --- pyproject.toml | 2 +- .../__builder__/builder.py | 32 ++++++++++-------- redis_benchmarks_specification/__cli__/cli.py | 1 + .../artifacts.py | 33 ++++++++++--------- 4 files changed, 39 insertions(+), 29 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index eb6c507..faa2d99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "redis-benchmarks-specification" -version = "0.1.210" +version = "0.1.213" description = "The Redis benchmarks specification describes the cross-language/tools requirements and expectations to foster performance and observability standards around redis related technologies. Members from both industry and academia, including organizations and individuals are encouraged to contribute." authors = ["filipecosta90 ","Redis Performance Group "] readme = "Readme.md" diff --git a/redis_benchmarks_specification/__builder__/builder.py b/redis_benchmarks_specification/__builder__/builder.py index 9b48f9c..a38b635 100644 --- a/redis_benchmarks_specification/__builder__/builder.py +++ b/redis_benchmarks_specification/__builder__/builder.py @@ -521,22 +521,28 @@ def store_airgap_image_redis(conn, docker_client, run_image): run_image, airgap_key ) ) - run_image_binary_stream = io.BytesIO() - run_image_docker = docker_client.images.get(run_image) - for chunk in run_image_docker.save(): - run_image_binary_stream.write(chunk) # 7 days expire binary_exp_secs = 24 * 60 * 60 * 7 - res_airgap = conn.set( - airgap_key, - run_image_binary_stream.getbuffer(), - ex=binary_exp_secs, - ) - logging.info( - "DOCKER AIR GAP: result of set bin data to {}: {}".format( - airgap_key, res_airgap + if conn.exists(airgap_key): + logging.info( + f"DOCKER AIRGAP KEY ALREADY EXISTS: {airgap_key}. Updating only the expire time" + ) + conn.expire(airgap_key, binary_exp_secs) + else: + run_image_binary_stream = io.BytesIO() + run_image_docker = docker_client.images.get(run_image) + for chunk in run_image_docker.save(): + run_image_binary_stream.write(chunk) + res_airgap = conn.set( + airgap_key, + run_image_binary_stream.getbuffer(), + ex=binary_exp_secs, + ) + logging.info( + "DOCKER AIR GAP: result of set bin data to {}: {}".format( + airgap_key, res_airgap + ) ) - ) def generate_benchmark_stream_request( diff --git a/redis_benchmarks_specification/__cli__/cli.py b/redis_benchmarks_specification/__cli__/cli.py index 2b8269a..67bf21e 100644 --- a/redis_benchmarks_specification/__cli__/cli.py +++ b/redis_benchmarks_specification/__cli__/cli.py @@ -90,6 +90,7 @@ def trigger_tests_dockerhub_cli_command_logic(args, project_name, project_versio ) build_stream_fields["github_repo"] = args.gh_repo build_stream_fields["github_org"] = args.gh_org + build_stream_fields["restore_build_artifacts"] = "False" server_name = args.gh_repo if args.server_name is not None: server_name = args.server_name diff --git a/redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py b/redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py index 08463c7..cb33691 100644 --- a/redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py +++ b/redis_benchmarks_specification/__self_contained_coordinator__/artifacts.py @@ -6,19 +6,22 @@ def restore_build_artifacts_from_test_details( build_artifacts, conn, temporary_dir, testDetails ): for build_artifact in build_artifacts: - buffer_key = testDetails["{}".format(build_artifact).encode()] - logging.info( - "Reading artifact binary {} from key {}".format(build_artifact, buffer_key) - ) - buffer = bytes(conn.get(buffer_key)) - artifact_fname = "{}/{}".format(temporary_dir, build_artifact) - with open(artifact_fname, "wb") as fd: - fd.write(buffer) - os.chmod(artifact_fname, 755) - # TODO: re-enable - # if build_artifact == "redis-server": - # redis_server_path = artifact_fname + build_artifact_key = "{}".format(build_artifact).encode() + if build_artifact_key in testDetails: + buffer_key = testDetails[build_artifact_key] + logging.info( + "Reading artifact binary {} from key {}".format( + build_artifact, buffer_key + ) + ) + buffer = bytes(conn.get(buffer_key)) + artifact_fname = "{}/{}".format(temporary_dir, build_artifact) + with open(artifact_fname, "wb") as fd: + fd.write(buffer) + os.chmod(artifact_fname, 755) - logging.info( - "Successfully restored {} into {}".format(build_artifact, artifact_fname) - ) + logging.info( + "Successfully restored {} into {}".format( + build_artifact, artifact_fname + ) + )