diff --git a/Jenkinsfile b/Jenkinsfile index bf7913ef1..7fc4a6104 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -318,11 +318,21 @@ if (env.CHANGE_ID) { checkout scm unstash 'event-formation-unit-centos7.tar.gz' sh "tar xzvf event-formation-unit-centos7.tar.gz" - sh "ls -R" sh """ export LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:./event-formation-unit/lib/ - python3 -u ./integrationtest/integrationtest.py + python3 -u ./test/integrationtest.py """ } // stage } // node + node('inttest'){ + stage('Performance Test'){ + checkout scm + unstash 'event-formation-unit-centos7.tar.gz' + sh "tar xzvf event-formation-unit-centos7.tar.gz" + sh """ + export LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:./event-formation-unit/lib/ + python3 -u ./test/performancetest.py + """ + } + } } // if diff --git a/src/common/reduction/EventBuilder2D.cpp b/src/common/reduction/EventBuilder2D.cpp index 0e6d874b6..15cfb4a71 100644 --- a/src/common/reduction/EventBuilder2D.cpp +++ b/src/common/reduction/EventBuilder2D.cpp @@ -17,7 +17,6 @@ // #define TRC_LEVEL TRC_L_INF - EventBuilder2D::EventBuilder2D() { matcher.set_minimum_time_gap(timegap); } void EventBuilder2D::insert(Hit hit) { @@ -33,29 +32,38 @@ void EventBuilder2D::insert(Hit hit) { } } -void EventBuilder2D::flush() { +void EventBuilder2D::flush(bool full_flush) { matcher.matched_events.clear(); sort_chronologically(HitsX); - ClustersX.cluster(HitsX); - ClustersX.flush(); + ClustererX.cluster(HitsX); sort_chronologically(HitsY); - ClustersY.cluster(HitsY); - ClustersY.flush(); + ClustererY.cluster(HitsY); + + if(full_flush){ + flushClusterers(); + } - matcher.insert(PlaneX, ClustersX.clusters); - matcher.insert(PlaneY, ClustersY.clusters); - matcher.match(true); + matcher.insert(PlaneX, ClustererX.clusters); + matcher.insert(PlaneY, ClustererY.clusters); + matcher.match(full_flush); auto &e = matcher.matched_events; Events.insert(Events.end(), e.begin(), e.end()); - clear(); + clearHits(); } -void EventBuilder2D::clear() { +void EventBuilder2D::clearHits() { HitsX.clear(); HitsY.clear(); } +void EventBuilder2D::flushClusterers() { + ClustererX.flush(); + ClustererY.flush(); +} + + + diff --git a/src/common/reduction/EventBuilder2D.h b/src/common/reduction/EventBuilder2D.h index f75f705a0..d9cd86558 100644 --- a/src/common/reduction/EventBuilder2D.h +++ b/src/common/reduction/EventBuilder2D.h @@ -28,16 +28,18 @@ class EventBuilder2D { // \todo pass by rvalue? void insert(Hit hit); - void flush(); + void flush(bool full_flush = false); - void clear(); + void clearHits(); + + void flushClusterers(); void setTimeBox(uint32_t TimeBoxValue) { TimeBoxSize = TimeBoxValue; } HitVector HitsX, HitsY; // \todo parametrize - GapClusterer ClustersX{timegap, coordgap}, ClustersY{timegap, coordgap}; + GapClusterer ClustererX{timegap, coordgap}, ClustererY{timegap, coordgap}; // \todo parametrize GapMatcher matcher{latency, PlaneX, PlaneY}; diff --git a/src/common/reduction/HitVector.h b/src/common/reduction/HitVector.h index 6d407862d..0388411dc 100644 --- a/src/common/reduction/HitVector.h +++ b/src/common/reduction/HitVector.h @@ -126,6 +126,7 @@ template > class MyVector { Vec.clear(); reserve(MinReserveCount); } + void resize(size_type sz) { Vec.resize(sz); } void resize(size_type sz, const value_type &c) { Vec.resize(sz, c); } }; diff --git a/src/common/reduction/test/CMakeLists.txt b/src/common/reduction/test/CMakeLists.txt index 621240680..abdbe3f3e 100644 --- a/src/common/reduction/test/CMakeLists.txt +++ b/src/common/reduction/test/CMakeLists.txt @@ -27,3 +27,8 @@ set(ChronoMergerTest_SRC ChronoMergerTest.cpp ) create_test_executable(ChronoMergerTest) + +set(HitVectorBenchmark_SRC + HitVectorBenchmark.cpp + ) + create_benchmark_executable(HitVectorBenchmark) \ No newline at end of file diff --git a/src/common/reduction/test/HitVectorBenchmark.cpp b/src/common/reduction/test/HitVectorBenchmark.cpp new file mode 100644 index 000000000..2737b8c05 --- /dev/null +++ b/src/common/reduction/test/HitVectorBenchmark.cpp @@ -0,0 +1,16 @@ +#include +#include + + +static void BM_pushback_hits(benchmark::State &state){ + HitVector hits; + for (auto _ : state){ + Hit hit; + hit.coordinate = 50; + hit.weight = 50; + hit.time = 50; + hits.push_back(hit); + } +} +BENCHMARK(BM_pushback_hits); +BENCHMARK_MAIN(); diff --git a/src/modules/cspec/CSPECBase.cpp b/src/modules/cspec/CSPECBase.cpp index 58a253fe2..5511ab4a1 100644 --- a/src/modules/cspec/CSPECBase.cpp +++ b/src/modules/cspec/CSPECBase.cpp @@ -85,7 +85,7 @@ CSPECBase::CSPECBase(BaseSettings const &settings, Stats.create("readouts.tof_neg", Counters.TimeStats.TofNegative); Stats.create("readouts.prevtof_count", Counters.TimeStats.PrevTofCount); Stats.create("readouts.prevtof_neg", Counters.TimeStats.PrevTofNegative); - Stats.create("readouts.tof_toolarge", Counters.TOFErrors); + Stats.create("readouts.tof_toolarge", Counters.TOFErrors); //move this to events.tof_toolarge // Clustering stats diff --git a/src/modules/cspec/test/CSPECInstrumentTest.cpp b/src/modules/cspec/test/CSPECInstrumentTest.cpp index 731926603..58c8718c1 100644 --- a/src/modules/cspec/test/CSPECInstrumentTest.cpp +++ b/src/modules/cspec/test/CSPECInstrumentTest.cpp @@ -142,6 +142,34 @@ std::vector GoodEvent { }; + +std::vector SplitEventA { + // First readout - plane Y - Grids + 0x00, 0x01, 0x14, 0x00, // Data Header - Ring 0, FEN 1 + 0x00, 0x00, 0x00, 0x00, // Time HI 0 s + 0x01, 0x00, 0x00, 0x00, // Time LO 1 tick + 0x00, 0x00, 0x00, 0x01, // ADC 0x100 + 0x00, 0x00, 0x02, 0x3C, // GEO 0, TDC 0, VMM 1, CH 60 + +}; + +std::vector SplitEventB { + // Second readout - plane Y - Grids + 0x00, 0x01, 0x14, 0x00, // Data Header - Ring 0, FEN 1 + 0x00, 0x00, 0x00, 0x00, // Time HI 0 s + 0x02, 0x00, 0x00, 0x00, // Time LO 2 tick + 0x00, 0x00, 0x00, 0x01, // ADC 0x100 + 0x00, 0x00, 0x02, 0x3D, // GEO 0, TDC 0, VMM 1, CH 61 + + // Third readout - plane X & Z - Wires + 0x00, 0x01, 0x14, 0x00, // Data Header, Ring 0, FEN 1 + 0x00, 0x00, 0x00, 0x00, // Time HI 0 s + 0x05, 0x00, 0x00, 0x00, // Time LO 5 ticks + 0x00, 0x00, 0x00, 0x01, // ADC 0x100 + 0x00, 0x00, 0x00, 0x3C, // GEO 0, TDC 0, VMM 0, CH 60 + +}; + std::vector BadEventMultipleWires { // First readout - plane Y - Grids 0x00, 0x01, 0x14, 0x00, // Data Header - Ring 0, FEN 1 @@ -441,7 +469,7 @@ TEST_F(CSPECInstrumentTest, GoodEvent) { ASSERT_EQ(counters.VMMStats.Readouts, 3); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } ASSERT_EQ(counters.Events, 1); @@ -465,7 +493,7 @@ TEST_F(CSPECInstrumentTest, BadMappingError) { ASSERT_EQ(counters.VMMStats.Readouts, 2); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } ASSERT_EQ(counters.Events, 0); @@ -511,7 +539,7 @@ TEST_F(CSPECInstrumentTest, NoEventGridOnly) { ASSERT_EQ(counters.VMMStats.Readouts, 2); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } ASSERT_EQ(counters.Events, 0); @@ -536,7 +564,7 @@ TEST_F(CSPECInstrumentTest, NoEventWireOnly) { ASSERT_EQ(counters.VMMStats.Readouts, 2); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } ASSERT_EQ(counters.Events, 0); @@ -576,7 +604,7 @@ TEST_F(CSPECInstrumentTest, BadEventLargeGridSpan) { ASSERT_EQ(counters.VMMStats.Readouts, 5); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } ASSERT_EQ(counters.Events, 0); @@ -593,7 +621,7 @@ TEST_F(CSPECInstrumentTest, NegativeTOF) { cspec->processReadouts(); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } @@ -611,7 +639,7 @@ TEST_F(CSPECInstrumentTest, HighTOFError) { cspec->processReadouts(); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } @@ -629,7 +657,7 @@ TEST_F(CSPECInstrumentTest, BadEventLargeTimeSpan) { cspec->processReadouts(); for (auto &builder : cspec->builders) { - builder.flush(); + builder.flush(true); cspec->generateEvents(builder.Events); } @@ -639,6 +667,35 @@ TEST_F(CSPECInstrumentTest, BadEventLargeTimeSpan) { ASSERT_EQ(counters.ClustersNoCoincidence, 1); } +TEST_F(CSPECInstrumentTest, EventCrossPackets) { + makeHeader(cspec->ESSReadoutParser.Packet, SplitEventA); + + auto Res = cspec->VMMParser.parse(cspec->ESSReadoutParser.Packet); + counters.VMMStats = cspec->VMMParser.Stats; + + cspec->processReadouts(); + for (auto &builder : cspec->builders) { + builder.flush(); + cspec->generateEvents(builder.Events); + } + ASSERT_EQ(Res, 1); + + makeHeader(cspec->ESSReadoutParser.Packet, SplitEventB); + + Res = cspec->VMMParser.parse(cspec->ESSReadoutParser.Packet); + counters.VMMStats = cspec->VMMParser.Stats; + + cspec->processReadouts(); + for (auto &builder : cspec->builders) { + builder.flush(true); + cspec->generateEvents(builder.Events); + } + + ASSERT_EQ(Res, 2); + ASSERT_EQ(counters.VMMStats.Readouts, 3); + ASSERT_EQ(counters.Events, 1); +} + int main(int argc, char **argv) { saveBuffer(ConfigFile, (void *)ConfigStr.c_str(), ConfigStr.size()); saveBuffer(BadConfigFile, (void *)BadConfigStr.c_str(), BadConfigStr.size()); diff --git a/src/modules/freia/FreiaInstrument.cpp b/src/modules/freia/FreiaInstrument.cpp index f597ed988..c8cf43e85 100644 --- a/src/modules/freia/FreiaInstrument.cpp +++ b/src/modules/freia/FreiaInstrument.cpp @@ -187,9 +187,10 @@ void FreiaInstrument::processReadouts(void) { void FreiaInstrument::generateEvents(std::vector &Events) { ESSReadout::ESSTime &TimeRef = ESSReadoutParser.Packet.Time; - + XTRACE(EVENT, DEB, "Number of events: %u", Events.size()); for (const auto &e : Events) { if (e.empty()) { + XTRACE(EVENT, DEB, "Empty event"); continue; } diff --git a/src/modules/freia/test/FreiaInstrumentTest.cpp b/src/modules/freia/test/FreiaInstrumentTest.cpp index 77b602578..a272ccb33 100644 --- a/src/modules/freia/test/FreiaInstrumentTest.cpp +++ b/src/modules/freia/test/FreiaInstrumentTest.cpp @@ -283,7 +283,7 @@ TEST_F(FreiaInstrumentTest, EventTOFError) { freia->processReadouts(); for (auto &builder : freia->builders) { - builder.flush(); + builder.flush(true); freia->generateEvents(builder.Events); } ASSERT_EQ(Res, 2); diff --git a/integrationtest/integrationtest.json b/test/integrationtest.json similarity index 100% rename from integrationtest/integrationtest.json rename to test/integrationtest.json diff --git a/integrationtest/integrationtest.py b/test/integrationtest.py similarity index 64% rename from integrationtest/integrationtest.py rename to test/integrationtest.py index 1bf0c1799..a0f4d9cab 100644 --- a/integrationtest/integrationtest.py +++ b/test/integrationtest.py @@ -3,6 +3,7 @@ import time import sys import os +from testutils import run_efu, run_data_generator, get_metrics sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) from utils.efushell.EFUMetrics import Metrics @@ -16,49 +17,9 @@ def compare_pair(x, y, operator): raise Exception(f"invalid operator {operator}") -def run_efu(test, efu): - print("Running EFU") - efu_command = f"{efu}/bin/efu --det {efu}/modules/{test['Module']}.so --nohwcheck --file {test['Config']} --region 0 --graphite 127.0.0.1" - print(efu_command) - efu_process = subprocess.Popen( - f"exec {efu_command}", - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - # waiting for EFU start up to finish (estimated 5 seconds is enough time) - # checking if the EFU is still running after start up - time.sleep(5) - poll = efu_process.poll() - if poll is not None: - out, errs = efu_process.communicate() - print(out) - print(errs) - raise Exception(f"Running efu with command {efu_command} failed") - return efu_process - - -def run_data_generator(test, efu): - print("Running Data Generator") - generator_process = subprocess.Popen( - f"{efu}/generators/{test['Generator']} -a {test['Packets']} -o 50 -t {test['Throttle']}", - shell=True, - ) - exit_code = generator_process.wait() - if exit_code != 0: - raise Exception(f"Artififial generator {test['Generator']} failed") - return generator_process - - def check_stats(test): print("Getting EFU Stats") - try: - metrics = Metrics("127.0.0.1", 8888) - metrics.get_all_metrics(metrics.get_number_of_stats()) - except: - # socket errors can happen when attempting to get metrics if grafana connection is hanging - raise Exception("failed to get metrics") + metrics = get_metrics() for stats_test in test['StatsTestList']: actual_stat = metrics.return_metric(f"efu.{test['Module']}.0.{stats_test[0]}") @@ -80,6 +41,10 @@ def check_stats(test): def create_kafka_topic(topic_name): + subprocess.Popen( + f"/ess/ecdc/kafka/kafka_2.13-2.8.0/bin/kafka-topics.sh --bootstrap-server localhost:9092 --delete --topic {topic_name}", + shell=True, + ).wait() subprocess.Popen( f"/ess/ecdc/kafka/kafka_2.13-2.8.0/bin/kafka-topics.sh --bootstrap-server localhost:9092 --create --topic {topic_name}", shell=True, @@ -116,16 +81,16 @@ def check_kafka(test): def run_tests(): efu = "./event-formation-unit" - with open('./integrationtest/integrationtest.json') as f: + with open('./test/integrationtest.json') as f: data = json.load(f) for test in data['Tests']: create_kafka_topic(test['KafkaTopic']) - efu_process = run_efu(test, efu) + efu_process = run_efu(efu, test['Module'], test['Config']) try: - run_data_generator(test, efu) + run_data_generator(efu, test['Generator'], test['Packets'], test['Throttle']) time.sleep(1) - run_data_generator(test, efu) + run_data_generator(efu, test['Generator'], test['Packets'], test['Throttle']) check_stats(test) check_kafka(test) efu_process.kill() diff --git a/test/performancetest.json b/test/performancetest.json new file mode 100644 index 000000000..f6d03cfeb --- /dev/null +++ b/test/performancetest.json @@ -0,0 +1,17 @@ +{ + "Tests": + [ + { + "Module": "loki", + "Config": "./src/modules/loki/configs/STFCTestII.json", + "Generator": "loki_udp_generated", + "InitThrottle": 1 + }, + { + "Module": "cspec", + "Config": "./src/modules/cspec/configs/let.json", + "Generator": "let_udp_generated", + "InitThrottle": 1 + } + ] +} diff --git a/test/performancetest.py b/test/performancetest.py new file mode 100644 index 000000000..947990a9d --- /dev/null +++ b/test/performancetest.py @@ -0,0 +1,88 @@ +import json +import subprocess +import time +import sys +import os +from testutils import run_efu, run_data_generator_timed, get_metrics + +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) +from utils.efushell.EFUMetrics import Metrics + + +def get_stat(stat_name): + return get_stats([stat_name])[stat_name] + + +def get_stats(stat_name_list): + metrics = get_metrics() + stats = {} + for stat_name in stat_name_list: + stats[stat_name] = metrics.return_metric(stat_name) + return(stats) + +def efu_drop_packets_check(efu, generator, module, throttle): + time_s = 30 + readouts_per_packet = 100 + run_data_generator_timed(efu, generator, throttle, readouts_per_packet, time_s) + time.sleep(5) + new_packets_dropped = get_stat(f"efu.{module}.0.receive.dropped") + return new_packets_dropped + + +def assess_performance(efu, generator, module, throttle): + time_s = 30 + readouts_per_packet = 100 + stat_name_list = [f"efu.{module}.0.receive.packets", f"efu.{module}.0.readouts.count", f"efu.{module}.0.events.count"] + stats = get_stats(stat_name_list) + run_data_generator_timed(efu, generator, throttle, readouts_per_packet, time_s) + new_stats = get_stats(stat_name_list) + for stat_name in stat_name_list: + stats[stat_name] = (new_stats[stat_name] - stats[stat_name]) / time_s + print(f"the following stats are per second, calculated over {time_s} seconds") + print(stats) + + + +def bisect_throttle_settings(efu, generator, module, throttle): + packets_dropped = 0 + min_throttle = 0 + max_throttle = 1000 + for x in range(10): + new_packets_dropped = efu_drop_packets_check(efu, generator, module, throttle) + if new_packets_dropped == packets_dropped: + print(f"Passed at throttle: {throttle}") + max_throttle = throttle + throttle = (throttle + min_throttle) / 2 + else: + print(f"Failed at throttle: {throttle}") + min_throttle = throttle + throttle = (throttle + max_throttle) / 2 + packets_dropped = new_packets_dropped + if((max_throttle - min_throttle) < 1): + break + print(f"New throttle to test: {throttle}") + print(f"Min throttle: {min_throttle}, Max throttle: {max_throttle}") + print(f"Packets dropped: {packets_dropped}") + print(f"Best throttle achieved: {max_throttle}") + best_passing_throttle = max_throttle + return best_passing_throttle + +def run_performance_test(): + efu = "./event-formation-unit" + + with open('./test/performancetest.json') as f: + data = json.load(f) + + for test in data['Tests']: + efu_process = run_efu(efu, test['Module'], test['Config']) + try: + best_throttle = bisect_throttle_settings(efu, test['Generator'], test['Module'], test['InitThrottle']) + assess_performance(efu, test['Generator'], test['Module'], best_throttle) + except: + efu_process.kill() + raise + efu_process.kill() + time.sleep(5) + +if __name__ == "__main__": + run_performance_test() \ No newline at end of file diff --git a/test/testutils.py b/test/testutils.py new file mode 100644 index 000000000..104418d58 --- /dev/null +++ b/test/testutils.py @@ -0,0 +1,62 @@ +import subprocess +import time +import sys +import os + +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) +from utils.efushell.EFUMetrics import Metrics + +def run_efu(efu, module, config): + print("Running EFU") + efu_command = f"{efu}/bin/efu --det {efu}/modules/{module}.so --nohwcheck --file {config} --region 0 --graphite 127.0.0.1" + print(efu_command) + efu_process = subprocess.Popen( + f"exec {efu_command}", + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + # waiting for EFU start up to finish (estimated 5 seconds is enough time) + # checking if the EFU is still running after start up + time.sleep(5) + poll = efu_process.poll() + if poll is not None: + out, errs = efu_process.communicate() + print(out) + print(errs) + raise Exception(f"Running efu with command {efu_command} failed") + return efu_process + + +def run_data_generator(efu, generator, packets, throttle): + print("Running Data Generator") + generator_process = subprocess.Popen( + f"{efu}/generators/{generator} -a {packets} -o 50 -t {int(throttle)}", + shell=True, + ) + exit_code = generator_process.wait() + if exit_code != 0: + raise Exception(f"Artififial generator {generator} failed") + return generator_process + +def run_data_generator_timed(efu, generator, throttle, readouts_per_packet, time_s): + print("Running Data Generator") + generator_process = subprocess.Popen( + f"{efu}/generators/{generator} -l -o {readouts_per_packet} -t {int(throttle)}", + shell=True, + ) + time.sleep(time_s) + generator_process.kill() + return generator_process + +def get_metrics(): + print("Getting EFU Stats") + try: + metrics = Metrics("127.0.0.1", 8888) + metrics.get_all_metrics(metrics.get_number_of_stats()) + return metrics + except: + # socket errors can happen when attempting to get metrics if grafana connection is hanging + print("failed to get metrics") + raise diff --git a/utils/efushell/efustatstocsv.py b/utils/efushell/efustatstocsv.py new file mode 100755 index 000000000..79cfaa419 --- /dev/null +++ b/utils/efushell/efustatstocsv.py @@ -0,0 +1,37 @@ +#!/usr/bin/python + +from EFUMetrics import Metrics +import argparse +import csv + +parser = argparse.ArgumentParser() +parser.add_argument("-i", metavar='ipaddr', help = "server ip address (default 127.0.0.1)", + type = str, default = "127.0.0.1") +parser.add_argument("-p", metavar='port', help = "server tcp port (default 8888)", + type = int, default = 8888) +parser.add_argument("-v", help = "Dump stats in format suitable for verifymetrics.py", action = 'store_true') +parser.add_argument("-f", type=str, default="efustats.csv", help="Filename to save csv file under") + +args = parser.parse_args() + +print("") +metrics = Metrics(args.i, args.p) + +res = metrics._get_efu_command("STAT_GET_COUNT") +numstats = int(res.split()[1]) +print("Available stats ({}):".format(numstats)) +verify = "" +stats_dict = {} +for statnum in range(1, numstats + 1): + res = metrics._get_efu_command("STAT_GET " + str(statnum)) + verify = verify + str(res.split()[1]) + ":" + str(res.split()[2]) + " " + stats_dict[res.split()[1].decode('utf-8')] = res.split()[2].decode('utf-8') + +print(stats_dict) +with open(args.f, 'w') as csvfile: + for key in stats_dict.keys(): + csvfile.write("%s, %s\n" % (key, stats_dict[key])) + +if args.v: + print(verify) +