From 30b80b03a410994a8abb93d5a3f81b0d1f5cb96f Mon Sep 17 00:00:00 2001 From: Samir Talwar Date: Mon, 3 Feb 2020 14:34:24 +0100 Subject: [PATCH] When building JMH benchmarks, fail on error, instead of proceeding. (#977) * When building JMH benchmarks, fail on error, instead of proceeding. Currently, if there are errors in some (but not all) benchmarks, running `bazel run //path/to/benchmark` will compile them, fail on some, and then run the rest. This changes that behavior so that any JMH build failure will fail the build. * Add a test for expected failures in JMH benchmarks. * Document the fix to JMH builds as a breaking change. * Split "test_benchmark_jmh_failure" from "test_benchmark_jmh". * We don't need ValidBenchmark.scala when testing JMH failures. --- README.md | 6 ++++ .../jmh_support/BenchmarkGenerator.scala | 29 ++++++++++++------- test/shell/test_misc.sh | 20 +++++++++++-- test_expect_failure/jmh/BUILD | 11 +++++++ .../jmh/InvalidBenchmark.scala | 10 +++++++ 5 files changed, 63 insertions(+), 13 deletions(-) create mode 100644 test_expect_failure/jmh/BUILD create mode 100644 test_expect_failure/jmh/InvalidBenchmark.scala diff --git a/README.md b/README.md index 38c742cf1..7570340e6 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,12 @@ for an example workspace using another scala version. | 0.14.x | 3b9ab9be31ac217d3337c709cb6bfeb89c8dcbb1 | | 0.13.x | 3c987b6ae8a453886759b132f1572c0efca2eca2 | +## Breaking changes + +If you're upgrading to a version containing one of these commits, you may encounter a breaking change where there was previously undefined behavior. + +- [929b318](https://github.com/bazelbuild/rules_scala/commit/929b3180cc099ba76859f5e88710d2ac087fbfa3) on 2020-01-30: Fixed a bug in the JMH benchmark build that was allowing build failures to creep through. Previously you were able to build a benchmark suite with JMH build errors. Running the benchmark suite would only run the successfully-built benchmarks. + ## Usage with [bazel-deps](https://github.com/johnynek/bazel-deps) Bazel-deps allows you to generate bazel dependencies transitively for maven artifacts. Generally we don't want bazel-deps to fetch diff --git a/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala b/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala index d367ff424..b443f3e68 100644 --- a/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala +++ b/src/scala/io/bazel/rules_scala/jmh_support/BenchmarkGenerator.scala @@ -41,15 +41,24 @@ object BenchmarkGenerator { classPath: List[Path] ) + private case class GenerationException(messageLines: Seq[String]) + extends RuntimeException(messageLines.mkString("\n")) + def main(argv: Array[String]): Unit = { val args = parseArgs(argv) - generateJmhBenchmark( - args.generatorType, - args.resultSourceJar, - args.resultResourceJar, - args.inputJar, - args.classPath - ) + try { + generateJmhBenchmark( + args.generatorType, + args.resultSourceJar, + args.resultResourceJar, + args.inputJar, + args.classPath + ) + } catch { + case GenerationException(messageLines) => + messageLines.foreach(log) + sys.exit(1) + } } private def parseArgs(argv: Array[String]): BenchmarkGeneratorArgs = { @@ -168,10 +177,8 @@ object BenchmarkGenerator { generator.generate(source, destination) generator.complete(source, destination) if (destination.hasErrors) { - log("JMH Benchmark generator failed") - for (e <- destination.getErrors.asScala) { - log(e.toString) - } + throw new GenerationException( + "JHM Benchmark generator failed" +: destination.getErrors.asScala.map(_.toString).toSeq) } } constructJar(sourceJarOut, tmpSourceDir) diff --git a/test/shell/test_misc.sh b/test/shell/test_misc.sh index e1a767b71..575f9bc2e 100755 --- a/test/shell/test_misc.sh +++ b/test/shell/test_misc.sh @@ -55,12 +55,27 @@ test_repl() { test_benchmark_jmh() { RES=$(bazel run -- test/jmh:test_benchmark -i1 -f1 -wi 1) - RESPONSE_CODE=$? + if [ $? -ne 0 ]; then + exit 1 + fi if [[ $RES != *Result*Benchmark* ]]; then echo "Benchmark did not produce expected output:\n$RES" exit 1 fi - exit $RESPONSE_CODE + + exit 0 +} + +test_benchmark_jmh_failure() { + set +e + + bazel build test_expect_failure/jmh:jmh_reports_failure + if [ $? -eq 0 ]; then + echo "'bazel build test_expect_failure/jmh:jmh_reports_failure' should have failed." + exit 1 + fi + + exit 0 } scala_test_test_filters() { @@ -117,6 +132,7 @@ $runner test_disappearing_class $runner test_transitive_deps $runner test_repl $runner test_benchmark_jmh +$runner test_benchmark_jmh_failure $runner scala_test_test_filters $runner test_multi_service_manifest $runner test_override_javabin diff --git a/test_expect_failure/jmh/BUILD b/test_expect_failure/jmh/BUILD new file mode 100644 index 000000000..fbd48091d --- /dev/null +++ b/test_expect_failure/jmh/BUILD @@ -0,0 +1,11 @@ +load( + "//jmh:jmh.bzl", + "scala_benchmark_jmh", +) + +scala_benchmark_jmh( + name = "jmh_reports_failure", + srcs = [ + "InvalidBenchmark.scala", + ], +) diff --git a/test_expect_failure/jmh/InvalidBenchmark.scala b/test_expect_failure/jmh/InvalidBenchmark.scala new file mode 100644 index 000000000..5a0472942 --- /dev/null +++ b/test_expect_failure/jmh/InvalidBenchmark.scala @@ -0,0 +1,10 @@ +package foo + +import org.openjdk.jmh.annotations.Benchmark + +// Benchmark classes cannot be final. +final class InvalidBenchmark { + @Benchmark + def sumIntegersBenchmark: Int = + (1 to 100).sum +}