From d70c0d6838716a9ac72dc183a7b0e2ae1007c535 Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:01 +0000 Subject: [PATCH 01/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'cmake/'?= =?UTF-8?q?=20with=20remote=20'cmake/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- cmake/externals.cmake | 18 ------------------ cmake/python_scanners.cmake | 1 + cmake/scanners.cmake | 2 -- cmake/utilities.cmake | 14 ++++++++++---- 4 files changed, 11 insertions(+), 24 deletions(-) diff --git a/cmake/externals.cmake b/cmake/externals.cmake index 899fe52104..cb98a0913e 100644 --- a/cmake/externals.cmake +++ b/cmake/externals.cmake @@ -88,24 +88,6 @@ add_dependencies(nuke-all nuke-pippi) set_target_properties(get-pippi PROPERTIES EXCLUDE_FROM_ALL 1) -# Add get-gambit_plotting_tools target -set(name "gambit_plotting_tools") -set(dir "${CMAKE_SOURCE_DIR}/${name}") -ExternalProject_Add(get-${name} - GIT_REPOSITORY https://github.com/GambitBSM/gambit_plotting_tools.git - GIT_TAG v1.0.0 - SOURCE_DIR ${dir} - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND "" -) -set(rmstring "${CMAKE_BINARY_DIR}/get-${name}-prefix/src/get-${name}-stamp/get-${name}") -add_custom_target(nuke-gambit_plotting_tools COMMAND ${CMAKE_COMMAND} -E remove -f ${rmstring}-download ${rmstring}-download-failed ${rmstring}-mkdir ${rmstring}-patch ${rmstring}-update ${rmstring}-gitclone-lastrun.txt || true - COMMAND ${CMAKE_COMMAND} -E remove_directory ${dir} || true) -add_dependencies(nuke-all nuke-gambit_plotting_tools) -set_target_properties(get-gambit_plotting_tools PROPERTIES EXCLUDE_FROM_ALL 1) - - # Macro to clear the build stamp manually for an external project macro(enable_auto_rebuild package) set(rmstring "${CMAKE_BINARY_DIR}/${package}-prefix/src/${package}-stamp/${package}-build") diff --git a/cmake/python_scanners.cmake b/cmake/python_scanners.cmake index 9a6811bd08..055c5fbdf7 100644 --- a/cmake/python_scanners.cmake +++ b/cmake/python_scanners.cmake @@ -74,4 +74,5 @@ check_python_scanner_modules(scipy_shgo "scipy,numpy" "scipy,numpy") check_python_scanner_modules(scipy_minimize "scipy,numpy" "scipy,numpy") check_python_scanner_modules(reactive_ultranest "ultranest,numpy,packaging" "ultranest,numpy,packaging") check_python_scanner_modules(zeus "zeus,numpy" "zeus-mcmc,numpy") +check_python_scanner_modules(binminpy "binminpy,numpy,scipy,mpi4py" "binminpy,numpy,scipy,mpi4py") diff --git a/cmake/scanners.cmake b/cmake/scanners.cmake index 4f632af721..b4022e5361 100644 --- a/cmake/scanners.cmake +++ b/cmake/scanners.cmake @@ -117,7 +117,6 @@ if(NOT ditched_${name}_${ver}) endif() endif() # End if(NOT GAMBIT_LIGHT) -if(NOT GAMBIT_LIGHT) # Do not include this version in GAMBIT-light set(name "diver") set(ver "1.0.5") set(lib "libdiver") @@ -143,7 +142,6 @@ if(NOT ditched_${name}_${ver}) ) add_extra_targets("scanner" ${name} ${ver} ${dir} ${dl} clean) endif() -endif() # End if(NOT GAMBIT_LIGHT) set(name "diver") set(ver "1.3.0") diff --git a/cmake/utilities.cmake b/cmake/utilities.cmake index ff27d64232..68e55c0c44 100644 --- a/cmake/utilities.cmake +++ b/cmake/utilities.cmake @@ -704,10 +704,16 @@ macro(BOSS_backend_full name backend_version ${ARGN}) set(BOSS_includes_Eigen3 "-I${EIGEN3_INCLUDE_DIR}") endif() - if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") - set(BOSS_castxml_cc "--castxml-cc=${CMAKE_CXX_COMPILER}") - elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel") - set(BOSS_castxml_cc "") + # Set the BOSS castxml compiler to the cxx compiler + # If it is passed by the user, add on "castxml-cc=" for BOSS + if (NOT DEFINED BOSS_castxml_compiler) + if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang") + set(BOSS_castxml_cc "--castxml-cc=${CMAKE_CXX_COMPILER}") + elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Intel") + set(BOSS_castxml_cc "") + endif() + else() + set(BOSS_castxml_cc "--castxml-cc=${BOSS_castxml_compiler}") endif() # Parse command line options from optional arguments From 2f51472980a16dee4a762cdf6ec0f007aed5d375 Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 02/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'Backends?= =?UTF-8?q?/include/gambit/Backends/'=20with=20remote=20'Backends/include/?= =?UTF-8?q?gambit/Backends/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Backends/include/gambit/Backends/backend_macros.hpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Backends/include/gambit/Backends/backend_macros.hpp b/Backends/include/gambit/Backends/backend_macros.hpp index 8af9690710..13404d8c0c 100644 --- a/Backends/include/gambit/Backends/backend_macros.hpp +++ b/Backends/include/gambit/Backends/backend_macros.hpp @@ -284,7 +284,9 @@ namespace Gambit << "which is supposed to contain the factory for class " << std::endl \ << fixns(STRINGIFY(BARENAME) STRINGIFY(CONVERT_VARIADIC_ARG(ARGS)))<<", "< Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 03/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'Elements?= =?UTF-8?q?/'=20with=20remote=20'Elements/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Elements/include/gambit/Elements/functors.hpp | 8 +++++++- Elements/src/functors.cpp | 10 +++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/Elements/include/gambit/Elements/functors.hpp b/Elements/include/gambit/Elements/functors.hpp index 53d1e6b0eb..bc6b13e0c1 100644 --- a/Elements/include/gambit/Elements/functors.hpp +++ b/Elements/include/gambit/Elements/functors.hpp @@ -165,6 +165,8 @@ namespace Gambit virtual void setInUse(bool){}; /// Setter for purpose (relevant only for next-to-output functors) void setPurpose(str); + /// Setter for critical (relevant only for next-to-output functors) + void setCritical(bool); /// Setter for vertex ID (used in printer system) void setVertexID(int); /// Set ID for timing 'vertex' (used in printer system) @@ -197,6 +199,8 @@ namespace Gambit sspair quantity() const; /// Getter for purpose (relevant for output nodes, aka helper structures for the dep. resolution) str purpose() const; + /// Getter for critical (relevant for output nodes, aka helper structures for the dep. resolution) + bool critical() const; /// Getter for the citation key str citationKey() const; /// Getter for vertex ID @@ -410,6 +414,8 @@ namespace Gambit str myOrigin; /// Purpose of the function (relevant for output and next-to-output functors) str myPurpose; + /// critical flag of the function (relevant for output and next-to-output functors) + bool myCritical; /// Citation key: BibTex key of the reference. str myCitationKey; /// Bound model functor claw, for checking relationships between models @@ -600,7 +606,7 @@ namespace Gambit virtual std::set model_conditional_backend_reqs_exact (str model); /// Add and activate unconditional dependencies. - void setDependency(str, str, void(*)(functor*, module_functor_common*), str purpose= ""); + void setDependency(str, str, void(*)(functor*, module_functor_common*), str purpose= "", bool critical=false); /// Add conditional dependency-type pairs in advance of later conditions. void setConditionalDependency(str, str); diff --git a/Elements/src/functors.cpp b/Elements/src/functors.cpp index 6d2f36968c..0a3b681f80 100644 --- a/Elements/src/functors.cpp +++ b/Elements/src/functors.cpp @@ -104,6 +104,9 @@ namespace Gambit /// Setter for purpose (relevant only for next-to-output functors) void functor::setPurpose(str purpose) { myPurpose = purpose; } + /// Setter for critical flag (relevant only for next-to-output functors) + void functor::setCritical(bool critical) { myCritical = critical; } + /// Setter for vertex ID (used in printer system) void functor::setVertexID(int ID) { myVertexID = ID; } @@ -145,6 +148,8 @@ namespace Gambit sspair functor::quantity() const { return std::make_pair(myCapability, myType); } /// Getter for purpose (relevant for output nodes, aka helper structures for the dep. resolution) str functor::purpose() const { return myPurpose; } + /// Getter for critical (relevant for output nodes, aka helper structures for the dep. resolution) + bool functor::critical() const { return myCritical; } /// Getter for citation key str functor::citationKey() const { return myCitationKey; } /// Getter for vertex ID @@ -1166,12 +1171,13 @@ namespace Gambit } /// Add and activate unconditional dependencies. - void module_functor_common::setDependency(str dep, str dep_type, void(*resolver)(functor*, module_functor_common*), str purpose) + void module_functor_common::setDependency(str dep, str dep_type, void(*resolver)(functor*, module_functor_common*), str purpose, bool critical) { sspair key (dep, Utils::fix_type(dep_type)); myDependencies.insert(key); dependency_map[key] = resolver; this->myPurpose = purpose; // only relevant for output nodes + this->myCritical = critical; // only relevant for output nodes } /// Add conditional dependency-type pairs in advance of later conditions. @@ -1529,6 +1535,8 @@ namespace Gambit if (dependency_map.find(key) != dependency_map.end()) (*dependency_map[key])(dep_functor,this); // propagate purpose from next to next-to-output nodes dep_functor->setPurpose(this->myPurpose); + // propagate critical from next to next-to-output nodes + dep_functor->setCritical(this->myCritical); // propagate this functor's dependees and subcaps on to the resolving functor dep_functor->notifyOfDependee(this); // save the pointer to the resolving functor to allow this functor to notify it of future dependees From b3f487bcb1c6bc81b5d3802770b4a3f7648fc3de Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 04/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'Core/'?= =?UTF-8?q?=20with=20remote=20'Core/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Core/include/gambit/Core/depresolver.hpp | 6 ++++++ Core/include/gambit/Core/observable.hpp | 4 ++++ Core/src/depresolver.cpp | 22 ++++++++++++++++++---- Core/src/likelihood_container.cpp | 9 +++++++-- Core/src/yaml_parser.cpp | 1 + 5 files changed, 36 insertions(+), 6 deletions(-) diff --git a/Core/include/gambit/Core/depresolver.hpp b/Core/include/gambit/Core/depresolver.hpp index 351cf906c2..2aa56aab6a 100644 --- a/Core/include/gambit/Core/depresolver.hpp +++ b/Core/include/gambit/Core/depresolver.hpp @@ -77,6 +77,7 @@ namespace Gambit { VertexID vertex; str purpose; + bool critical; }; /// Information in resolution queue @@ -88,6 +89,7 @@ namespace Gambit VertexID toVertex; int dependency_type; bool printme; + bool critical; const Observable* obslike; }; @@ -163,6 +165,10 @@ namespace Gambit /// Non-null only if the functor corresponds to an ObsLike entry in the ini file. const str& getPurpose(VertexID); + /// Returns whether a given functor is critical + /// True only if the functor corresponds to a critical ObsLike entry in the ini file. + bool getCritical(VertexID); + /// Tell functor that it invalidated the current point in model space (due to a large or NaN contribution to lnL) void invalidatePointAt(VertexID, bool); diff --git a/Core/include/gambit/Core/observable.hpp b/Core/include/gambit/Core/observable.hpp index 57d188ffbd..5efee8eb6d 100644 --- a/Core/include/gambit/Core/observable.hpp +++ b/Core/include/gambit/Core/observable.hpp @@ -65,6 +65,9 @@ namespace Gambit /// Instruction to printer as to whether to write result to disk. bool printme; + /// Allowed to invalidate a point. + bool critical; + /// Whether or not to log matches to the observable with functors. bool log_matches; @@ -100,6 +103,7 @@ namespace Gambit functionChain(), subcaps(), printme(true), + critical(false), log_matches(true), include_all(false) {} diff --git a/Core/src/depresolver.cpp b/Core/src/depresolver.cpp index a39bc18898..9d67e81722 100644 --- a/Core/src/depresolver.cpp +++ b/Core/src/depresolver.cpp @@ -256,12 +256,13 @@ namespace Gambit for (const Observable& obslike : obslikes) { // Format output - logger() << LogTags::dependency_resolver << endl << obslike.capability << " (" << obslike.type << ") [" << obslike.purpose << "]"; + logger() << LogTags::dependency_resolver << endl << obslike.capability << " (" << obslike.type << ") [" << obslike.purpose << "] critical:" << obslike.critical << ""; QueueEntry target; target.quantity.first = obslike.capability; target.quantity.second = obslike.type; target.obslike = &obslike; target.printme = obslike.printme; + target.critical = obslike.critical; resolutionQueue.push(target); } logger() << EOM; @@ -333,11 +334,11 @@ namespace Gambit makeFunctorsModelCompatible(); graph_traits::vertex_iterator vi, vi_end; - const str formatString = "%-20s %-32s %-32s %-32s %-15s %-7i %-5i %-5i\n"; + const str formatString = "%-20s %-32s %-32s %-32s %-15s %-5i %-7i %-5i %-5i\n"; logger() << LogTags::dependency_resolver << endl << "Vertices registered in masterGraph" << endl; logger() << "----------------------------------" << endl; logger() << boost::format(formatString)% - "MODULE"% "FUNCTION"% "CAPABILITY"% "TYPE"% "PURPOSE"% "STATUS"% "#DEPs"% "#BE_REQs"; + "MODULE"% "FUNCTION"% "CAPABILITY"% "TYPE"% "PURPOSE"% "CRITICAL"% "STATUS"% "#DEPs"% "#BE_REQs"; for (std::tie(vi, vi_end) = vertices(masterGraph); vi != vi_end; ++vi) { logger() << boost::format(formatString)% @@ -346,6 +347,7 @@ namespace Gambit (*masterGraph[*vi]).capability()% (*masterGraph[*vi]).type()% (*masterGraph[*vi]).purpose()% + (*masterGraph[*vi]).critical()% (*masterGraph[*vi]).status()% (*masterGraph[*vi]).dependencies().size()% (*masterGraph[*vi]).backendreqs().size(); @@ -707,6 +709,17 @@ namespace Gambit return none; } + /// Return whether a given functor is critical. + bool DependencyResolver::getCritical(VertexID v) + { + for (const OutputVertex& ov : outputVertices) + { + if (ov.vertex == v) return ov.critical; + } + /// critical can safely be false if the functor does not correspond to an ObsLike entry in the ini file. + return false; + } + /// Tell functor that it invalidated the current point in model space (due to a large or NaN contribution to lnL) void DependencyResolver::invalidatePointAt(VertexID vertex, bool isnan) { @@ -1569,7 +1582,8 @@ namespace Gambit else // if output vertex { outVertex.vertex = fromVertex; - outVertex.purpose = entry.obslike->purpose;; + outVertex.purpose = entry.obslike->purpose; + outVertex.critical = entry.obslike->critical; outputVertices.push_back(outVertex); // Don't need subcaps during dry-run if (not boundCore->show_runorder) diff --git a/Core/src/likelihood_container.cpp b/Core/src/likelihood_container.cpp index ffdfc4805d..689969f2b6 100644 --- a/Core/src/likelihood_container.cpp +++ b/Core/src/likelihood_container.cpp @@ -91,11 +91,11 @@ namespace Gambit // Set the ScanID set_scanID(); - // Find subset of vertices that match requested purpose + // Find subset of vertices that match requested purpose, or are critical observables auto all_vertices = dependencyResolver.getObsLikeOrder(); for (auto it = all_vertices.begin(); it != all_vertices.end(); ++it) { - if (dependencyResolver.getPurpose(*it) == purpose) + if (dependencyResolver.getPurpose(*it) == purpose || dependencyResolver.getCritical(*it) == true) { return_types[*it] = dependencyResolver.checkTypeMatch(*it, purpose, allowed_types_for_purpose); target_vertices.push_back(std::move(*it)); @@ -279,6 +279,11 @@ namespace Gambit lnlike += *jt; } } + else if (dependencyResolver.getCritical(*it) == true) + { + // Don't throw an error if the target vertex is a critical obslike, + // but don't add it to the total LogLike + } else core_error().raise(LOCAL_INFO, "Unexpected target functor type."); // Print debug info diff --git a/Core/src/yaml_parser.cpp b/Core/src/yaml_parser.cpp index d948f4e71d..fa9ffb2c4e 100644 --- a/Core/src/yaml_parser.cpp +++ b/Core/src/yaml_parser.cpp @@ -212,6 +212,7 @@ namespace YAML const std::string key = entry.first.as(); if (key == "purpose") rhs.purpose = entry.second.as(); else if (key == "capability") rhs.capability = entry.second.as(); + else if (key == "critical") rhs.critical = entry.second.as(); else if (key == "type") rhs.type = entry.second.as(); else if (key == "function") rhs.function = entry.second.as(); else if (key == "module") rhs.module = entry.second.as(); From 67e09ea948e5eee5a0c67734accc021e12a3ed2b Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 05/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'Logs/'?= =?UTF-8?q?=20with=20remote=20'Logs/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Logs/src/logging.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Logs/src/logging.cpp b/Logs/src/logging.cpp index 3c6141acf1..98fa31c4ff 100644 --- a/Logs/src/logging.cpp +++ b/Logs/src/logging.cpp @@ -194,7 +194,7 @@ namespace Gambit //std::cout<<"Sorting tags..."<::iterator tag = mail.tags.begin(); tag != mail.tags.end(); ++tag) { - // Debugging crap... to be deleted. + // Debugging stuff... to be deleted. // std::cout<<"Sorting tag "<::iterator tag2 = components().begin(); tag2 != components().end(); ++tag2) From 5559fc91d9a9f9a0984d8e1d7b66511372955f5c Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 06/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'Printers?= =?UTF-8?q?/'=20with=20remote=20'Printers/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../include/gambit/Printers/printers/hdf5printer/hdf5tools.hpp | 2 +- Printers/src/printers/hdf5printer/hdf5tools.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Printers/include/gambit/Printers/printers/hdf5printer/hdf5tools.hpp b/Printers/include/gambit/Printers/printers/hdf5printer/hdf5tools.hpp index bebd197742..4bae259766 100644 --- a/Printers/include/gambit/Printers/printers/hdf5printer/hdf5tools.hpp +++ b/Printers/include/gambit/Printers/printers/hdf5printer/hdf5tools.hpp @@ -6,7 +6,7 @@ /// HDF5 databases. /// /// Currently I am using the C++ bindings for -/// HDF5, however they are a bit crap and it may +/// HDF5, however they not great and it may /// be better to just write our own. /// /// ********************************************* diff --git a/Printers/src/printers/hdf5printer/hdf5tools.cpp b/Printers/src/printers/hdf5printer/hdf5tools.cpp index bc35ede738..346e4ebfa6 100644 --- a/Printers/src/printers/hdf5printer/hdf5tools.cpp +++ b/Printers/src/printers/hdf5printer/hdf5tools.cpp @@ -6,7 +6,7 @@ /// HDF5 databases. /// /// Currently I am using the C++ bindings for -/// HDF5, however they are a bit crap and it may +/// HDF5, however they not great and it may /// be better to just write our own. /// /// ********************************************* From 3b59c0e77f2f5d27645c57940a7d120f5344044f Mon Sep 17 00:00:00 2001 From: anderkve Date: Tue, 9 Dec 2025 15:15:02 +0000 Subject: [PATCH 07/10] =?UTF-8?q?=F0=9F=94=84=20synced=20local=20'ScannerB?= =?UTF-8?q?it/'=20with=20remote=20'ScannerBit/'?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../gambit/ScannerBit/plugin_interface.hpp | 2 +- .../ScannerBit/scanners/diver_1.0.5/diver.hpp | 50 +++++ ScannerBit/src/scanners/diver_1.0.5/diver.cpp | 200 ++++++++++++++++++ .../python/plugins/gambit_binminpy.py | 167 +++++++++++++++ .../scanners/python/plugins/gambit_scipy.py | 4 +- .../src/scanners/python/plugins/grid.py | 2 +- .../src/scanners/python/plugins/utils/mpi.py | 2 +- ScannerBit/src/scanners/twalk/twalk.cpp | 2 +- 8 files changed, 423 insertions(+), 6 deletions(-) create mode 100644 ScannerBit/include/gambit/ScannerBit/scanners/diver_1.0.5/diver.hpp create mode 100755 ScannerBit/src/scanners/diver_1.0.5/diver.cpp create mode 100644 ScannerBit/src/scanners/python/plugins/gambit_binminpy.py diff --git a/ScannerBit/include/gambit/ScannerBit/plugin_interface.hpp b/ScannerBit/include/gambit/ScannerBit/plugin_interface.hpp index 273e52d10e..3b0563a9db 100644 --- a/ScannerBit/include/gambit/ScannerBit/plugin_interface.hpp +++ b/ScannerBit/include/gambit/ScannerBit/plugin_interface.hpp @@ -176,7 +176,7 @@ namespace Gambit template auto operator()(args&... params) -> typename find_variadic_type ::ret_type { - static_assert(find_variadic_type ::value, "\n\033[00;31;1mPlugin Interface: Entered argument types do not match any of the plugin mains' argument types.\033[00m\n"); + static_assert(find_variadic_type ::value, "\n\u001B[00;31;1mPlugin Interface: Entered argument types do not match any of the plugin mains' argument types.\u001B[00m\n"); return Plugin_Main_Interface_Base::func_type>::operator()(params...); } }; diff --git a/ScannerBit/include/gambit/ScannerBit/scanners/diver_1.0.5/diver.hpp b/ScannerBit/include/gambit/ScannerBit/scanners/diver_1.0.5/diver.hpp new file mode 100644 index 0000000000..ccd47b549a --- /dev/null +++ b/ScannerBit/include/gambit/ScannerBit/scanners/diver_1.0.5/diver.hpp @@ -0,0 +1,50 @@ +// GAMBIT: Global and Modular BSM Inference Tool +// ********************************************* +/// \file +/// +/// ScannerBit interface to Diver 1.0.5 +/// +/// Header file +/// +/// ********************************************* +/// +/// Authors (add name and date if you modify): +/// +/// \author Pat Scott +/// (p.scott@imperial.ac.uk) +/// \date 2019 Dec +/// +/// ********************************************* + +#ifndef __diver_hpp__ +#define __diver_hpp__ + +#include "gambit/ScannerBit/scanner_plugin.hpp" + +// C++ prototype of the main run_de function for Diver. +extern "C" void cdiver(double (*)(double[], const int, int&, bool&, const bool, void*&), int, const double[], const double[], + const char[], int, int, const int[], bool, const int, const int, int, int, const double[], double, + double, bool, bool, int, bool, bool, double, int, bool, bool, double(*)(const double[], const int, void*&), + double, double, int, bool, bool, int, bool, int, double, int, void*&, int); + +namespace Gambit +{ + + namespace Diver_1_0_5 + { + + /// Structure for passing likelihood and printer data through Diver to the objective function. + struct diverScanData + { + Scanner::like_ptr likelihood_function; + Scanner::printer_interface* printer; + }; + + /// Function to be minimised by Diver + double objective(double params[], const int param_dim, int &fcall, bool &quit, const bool validvector, void*& context); + + } + +} + +#endif // #defined __diver_hpp__ diff --git a/ScannerBit/src/scanners/diver_1.0.5/diver.cpp b/ScannerBit/src/scanners/diver_1.0.5/diver.cpp new file mode 100755 index 0000000000..73886d1477 --- /dev/null +++ b/ScannerBit/src/scanners/diver_1.0.5/diver.cpp @@ -0,0 +1,200 @@ +// GAMBIT: Global and Modular BSM Inference Tool +// ********************************************* +/// \file +/// +/// ScannerBit interface to Diver 1.0.5 +/// +/// ********************************************* +/// +/// Authors (add name and date if you modify): +/// +/// \author Pat Scott +/// (p.scott@imperial.ac.uk) +/// \date 2017 June, Nov +/// +/// ********************************************* + +#include +#include +#include + +#include "gambit/ScannerBit/scanners/diver_1.0.5/diver.hpp" +#include "gambit/Utils/yaml_options.hpp" +#include "gambit/Utils/util_types.hpp" +#include "gambit/Utils/util_functions.hpp" +#include "gambit/Utils/variadic_functions.hpp" + +/// ================================================= +/// Interface to ScannerBit +/// ================================================= + +scanner_plugin(diver, version(1, 0, 5)) +{ + + // Access Diver stuff and standard Gambit things + using namespace Gambit; + using namespace Gambit::Diver_1_0_5; + + // Error thrown if the following entries are not present in the inifile + reqd_inifile_entries("NP"); + + // Tell cmake to search for the diver library. + reqd_libraries("diver"); + + // Set up the scan data container + diverScanData data; + + // Code to execute when the plugin is loaded. + plugin_constructor + { + // Retrieve the external likelihood calculator + data.likelihood_function = get_purpose(get_inifile_value("like")); + if (data.likelihood_function->getRank() == 0) cout << "Loading Diver differential evolution plugin for ScannerBit." << std::endl; + // Retrieve the external printer + data.printer = &(get_printer()); + // Do not allow GAMBIT's own likelihood calculator to directly shut down the scan. + // Diver will assume responsibility for this process, triggered externally by + // the 'plugin_info.early_shutdown_in_progress()' function. + data.likelihood_function->disable_external_shutdown(); + } + + int plugin_main (void) + { + // Path to save Diver samples, resume files, etc + str defpath = get_inifile_value("default_output_path"); + str root = Utils::ensure_path_exists(get_inifile_value("path",defpath+"Diver/native")); + + // Ask the printer if this is a resumed run or not, and check that the necessary files exist if so. + bool resume = get_printer().resume_mode(); + if (resume) + { + bool good = true; + static const std::vector names = initVector(root+".rparam", root+".devo", root+".raw"); + for (auto it = names.begin(); it != names.end(); ++it) + { + std::ifstream file(*it); + good = good and file.good() and (file.peek() != std::ifstream::traits_type::eof()); + file.close(); + } + if (not good) + { + std::ostringstream warning; + warning << "Cannot resume previous Diver run because one or all of" << endl; + for (auto it = names.begin(); it != names.end(); ++it) warning << " " << *it << endl; + warning << "is missing or empty. This is probably because your last run didn't " << endl + << "complete even one generation. Diver will start from scratch, " << endl + << "as if you had specified -r."; + if (data.likelihood_function->getRank() == 0) cout << "WARNING: " << warning.str() << endl; + scan_warn << warning.str() << scan_end; + resume = false; + } + } + + // Retrieve the global option specifying the minimum interesting likelihood. + double gl0 = get_inifile_value("likelihood: model_invalid_for_lnlike_below"); + // Retrieve the global option specifying the likelihood offset to use + double offset = get_inifile_value("likelihood: lnlike_offset", 1e-4*gl0); + // Make sure the likleihood functor knows to apply the offset internally in ScannerBit + data.likelihood_function->setPurposeOffset(offset); + // Offset the minimum interesting likelihood by the offset, and flip it to match diver sign convention. + gl0 = -1.0 * (gl0 + offset); + + // Other Diver run parameters + int nPar = get_dimension(); // Dimensionality of the parameter space + int nDerived = 0; // Number of derived quantities to output (GAMBIT printers handle these). + int nDiscrete = get_inifile_value ("nDiscrete", 0); // Number of parameters that are to be treated as discrete + bool partitionDiscrete = get_inifile_value ("partitionDiscrete", false); // Split the population evenly amongst discrete parameters and evolve separately + int maxciv = get_inifile_value ("maxciv", 1); // Maximum number of civilisations + int maxgen = get_inifile_value ("maxgen", 5000); // Maximum number of generations per civilisation + int NP = get_inifile_value ("NP"); // Population size (individuals per generation) + double Cr = get_inifile_value("Cr", 0.9); // Crossover factor + double lambda = get_inifile_value("lambda", 0.0); // Mixing factor between best and rand/current + bool current = get_inifile_value ("current", false); // Use current vector for mutation + bool expon = get_inifile_value ("expon", false); // Use exponential crossover + int bndry = get_inifile_value ("bndry", 3); // Boundary constraint: 1=brick wall, 2=random re-initialization, 3=reflection + bool jDE = get_inifile_value ("jDE", true); // Use self-adaptive choices for rand/1/bin parameters as per Brest et al 2006 + bool lambdajDE = get_inifile_value ("lambdajDE", true); // Use self-adaptive rand-to-best/1/bin parameters; based on Brest et al 2006 + double convthresh = get_inifile_value("convthresh", 1.e-3); // Threshold for gen-level convergence: smoothed fractional improvement in the mean population value + int convsteps = get_inifile_value ("convsteps", 10); // Number of steps to smooth over when checking convergence + bool removeDuplicates = get_inifile_value ("removeDuplicates", true); // Weed out duplicate vectors within a single generation + bool doBayesian = false; // Calculate approximate log evidence and posterior weightings + double maxNodePop = 1.9; // Population at which node is partitioned in binary space partitioning for posterior + double Ztolerance = 0.01; // Input tolerance in log-evidence + int savecount = get_inifile_value ("savecount", 1); // Save progress every savecount generations + bool native_output = get_inifile_value ("full_native_output", true); // Output .raw file (Diver native sample output format) + int init_pop_strategy = get_inifile_value ("init_population_strategy", 2);// Initialisation strategy: 0=one shot, 1=n-shot, 2=n-shot with error if no valid vectors found. + bool discard_unfit_points= get_inifile_value ("discard_unfit_points", false);// Recalculate any trial vector whose fitness is above max_acceptable_value + int max_ini_attempts = get_inifile_value ("max_initialisation_attempts", 10000); // Maximum number of times to try to find a valid vector for each slot in the initial population. + double max_acceptable_value= get_inifile_value("max_acceptable_value",0.9999*gl0); // Maximum function value to accept for the initial gen if init_population_strategy > 0, or any gen if discard_unfit_points=T. + int verbose = get_inifile_value ("verbosity", 0); // Output verbosity: 0=only error messages, 1=basic info, 2=civ-level info, 3+=population info + int seed = get_inifile_value ("seed", -1); // Base seed for random number generation; non-positive means seed from the system clock + double (*prior)(const double[], const int, void*&) = NULL; // Pointer to prior function, only used if doBayesian = true. + void* context = &data; // Pointer to GAMBIT likelihood function and printers, passed through to objective function. + + // Copy the contents of root to a char array. + std::vector path(root.length()+1); + strcpy(&path[0], root.c_str()); + + // Unit cube boundaries + std::vector lowerbounds(nPar, 0.0); // Lower boundaries of parameter space to scan + std::vector upperbounds(nPar, 1.0); // Upper boundaries of parameter space to scan + + // Scale factors + std::vector Fvec = get_inifile_value >("F", initVector(0.7)); + int nF = Fvec.size(); // Size of the array indicating scale factors + + // Discrete parameters + std::vector discrete(nDiscrete, 0); // Indices of discrete parameters, Fortran style, i.e. starting at 1!! + //TODO Needs to be set automatically somehow? Not yet sure how to deal with discrete parameters in GAMBIT. + + // Run Diver + if (data.likelihood_function->getRank() == 0) cout << "Starting Diver run..." << std::endl; + cdiver(&objective, nPar, &lowerbounds[0], &upperbounds[0], &path[0], nDerived, nDiscrete, + &discrete[0], partitionDiscrete, maxciv, maxgen, NP, nF, &Fvec[0], Cr, lambda, current, + expon, bndry, jDE, lambdajDE, convthresh, convsteps, removeDuplicates, doBayesian, + prior, maxNodePop, Ztolerance, savecount, resume, native_output, init_pop_strategy, + discard_unfit_points, max_ini_attempts, max_acceptable_value, seed, context, verbose); + if (data.likelihood_function->getRank() == 0) cout << "Diver run finished!" << std::endl; + return 0; + + } + +} + +/// ================================================= +/// Function definitions +/// ================================================= + +namespace Gambit +{ + + namespace Diver_1_0_5 + { + + //Function to be minimized. Corresponds to -ln(Likelihood). Redirects to the target of context pointer. + double objective(double params[], const int param_dim, int &fcall, bool &quit, const bool validvector, void*& context) + { + // Return the worst possible likelihood if the point is outside the prior box. + if (not validvector) return std::numeric_limits::max(); + + // Put the parameters into a C++ vector + std::vector param_vec(params, params + param_dim); + + // Retrieve the likelihood function from the context pointer and call it + diverScanData* data = static_cast(context); + double lnlike = data->likelihood_function(param_vec); + + // Increment the number of function calls, tell Diver to continue and return the likelihood + fcall += 1; + + // Check whether the calling code wants us to shut down early + quit = Gambit::Scanner::Plugins::plugin_info.early_shutdown_in_progress(); + + return -lnlike; + + } + + } + +} + diff --git a/ScannerBit/src/scanners/python/plugins/gambit_binminpy.py b/ScannerBit/src/scanners/python/plugins/gambit_binminpy.py new file mode 100644 index 0000000000..676874f6d3 --- /dev/null +++ b/ScannerBit/src/scanners/python/plugins/gambit_binminpy.py @@ -0,0 +1,167 @@ +""" +Binminpy scanners +================= +""" + +import numpy as np +from scannerbit import with_mpi as scannerbit_with_mpi +from utils import copydoc, version, with_mpi +try: + import binminpy + binminpy_version = version(binminpy) + from binminpy.BinMinBottomUp import BinMinBottomUp as binminpy_BinMinBottomUp +except: + __error__ = "The binminpy package is not installed. To install it, run: pip install git+https://github.com/anderkve/binminpy.git" + binminpy_version = "n/a" + binminpy_BinMinBottomUp = None + +import scanner_plugin as splug + + +class BinMinBottomUp(splug.scanner): + """ +Sampling and optimization based on the "bottom-up" mode of binminpy, where the +parameter space is binned by working outwards from all identified local optima. + +See https://github.com/anderkve/binminpy + +YAML options: + like: Use the functors that correspond to the specified purpose. + run: + n_bins: Number of bins for each parameter, given as a list on the form "model::parameter: ". + sampled_parameters: List of the parameters that should be sampled within each bin, e.g ["model::par_1", "model::par_2"]. + (Remaining parameters will be optimized within each bin.) + sampler: Choice of sampler for sampling parameters within each bin. + optimizer Choice of optimizer for initial global optimization and optimizing parameters within each bin. + optimizer_kwargs: Keyword arguments to be forwarded to the optimzer. + n_initial_points: Number of starting points for the initial search for local optima. + n_sampler_points_per_bin: Number of sampled points within each bin. + accept_loglike_above: Add neighboring bins for bins that have a highest loglike above this threshold. + accept_delta_loglike: Add neighboring bins for bins that have a delta loglike (difference to best-fit point) within this threshold. + neighborhood_distance: If the current bin is accepted, how many bins in each direction should be added to the list of tasks. + inherit_best_init_point_within_bin: When optimizing parameters, start optimization from the current best point within the given bin. + n_optim_restarts_per_bin: Number of repeated attempts at optimizing parameters per bin. + n_tasks_per_batch: Number of tasks (bins) assigned to each MPI worker process at a time. + print_progress_every_n_batch: How frequently the progress message is printed. +""" + + __version__ = binminpy_version + __plugin_name__ = "binminpy" + + + def __init__(self, **kwargs): + if not scannerbit_with_mpi: + raise Exception(f"GAMBIT has been compiled with MPI disabled (WITH_MPI=0), but the " + f"binminpy scanner requires MPI parallelisation with >1 MPI processes. " + f"Rerun CMake with \"cmake -DWITH_MPI=1\" and then recompile GAMBIT.") + if not with_mpi: + raise Exception(f"The binminpy scanner requires MPI parallelisation with >1 MPI processes. " + f"Make sure that mpi4py is installed and restart GAMBIT with >1 processes.") + + super().__init__(use_mpi=True, use_resume=False) + + self.print_prefix = f"{BinMinBottomUp.__plugin_name__} scanner plugin:" + + + def run(self): + + # Define target function: this is where we call the GAMBIT loglike function + def target_function(x, *args): + return -self.loglike_hypercube(x) + + # Get the parameter ordering from GAMBIT + par_indices = {par_name:idx for idx,par_name in enumerate(self.parameter_names)} + + # Set up the list of binning tuples + binning_tuples = [] + if not "n_bins" in self.run_args: + raise RuntimeError(f"{self.print_prefix} The run argument 'n_bins' is missing.") + for param_name in self.parameter_names: + if not param_name in self.run_args["n_bins"]: + self.run_args["n_bins"][param_name] = 1 + par_n_bins = self.run_args["n_bins"][param_name] + binning_tuples.append([0., 1., par_n_bins]) # <-- Working in the unit hypercube + + # Read list of sampled parameters, and remove any duplicate entries + sampled_parameter_names = list(set(self.run_args.get("sampled_parameters", []))) + # Remove any parameter that is not scanned by GAMBIT + sampled_parameter_names = [par_name for par_name in sampled_parameter_names if par_name in self.parameter_names] + # All parameters that are not listed as sampled parameters should be optimized + optimized_parameter_names = list(set(self.parameter_names).difference(sampled_parameter_names)) + + if self.mpi_rank == 0: + print(f"{self.print_prefix} Parameters that will be *sampled* in each bin: {sampled_parameter_names}", flush=True) + print(f"{self.print_prefix} Parameters that will be *optimized* in each bin: {optimized_parameter_names}", flush=True) + + # Parse options for restricting the set of parameter bins + if ("accept_loglike_above" in self.run_args) and ("accept_delta_loglike" in self.run_args): + if self.mpi_rank == 0: + print(f"{self.print_prefix} Both 'accept_loglike_above' and 'accept_delta_loglike' have been set. " + f"Will use the weaker of the two requirements when constructing bins.", flush=True) + + accept_target_below = -np.inf + if "accept_loglike_above" in self.run_args: + accept_target_below = -1.0 * self.run_args["accept_loglike_above"] + + accept_delta_target_below = -np.inf + if "accept_delta_loglike" in self.run_args: + accept_delta_target_below = abs(self.run_args["accept_delta_loglike"]) + + if (accept_target_below == -np.inf) and (accept_delta_target_below == -np.inf): + if self.mpi_rank == 0: + print(f"{self.print_prefix} Running with no restrictions on the set of parameter space bins.", flush=True) + accept_target_below = np.inf + accept_delta_target_below = np.inf + + # Create the BinMinBottomUp instance + binned_opt = binminpy_BinMinBottomUp( + target_function, + binning_tuples, + args=(), + sampler=self.run_args.get("sampler", "latinhypercube"), + optimizer=self.run_args.get("optimizer", "minimize"), + optimizer_kwargs=self.run_args.get("optimizer_kwargs", {}), + sampled_parameters=tuple(par_indices[par_name] for par_name in sampled_parameter_names), + n_initial_points=self.run_args.get("n_initial_points", self.mpi_size), + n_sampler_points_per_bin=self.run_args.get("n_sampler_points_per_bin", 10), + inherit_best_init_point_within_bin=self.run_args.get("inherit_best_init_point_within_bin", False), + accept_target_below=accept_target_below, + accept_delta_target_below=accept_delta_target_below, + save_evals=self.run_args.get("save_evals", False), + return_evals=False, + return_bin_centers=False, + return_bin_results=False, + optima_comparison_rtol=self.run_args.get("optima_comparison_rtol", 1e-9), + optima_comparison_atol=self.run_args.get("optima_comparison_atol", 0.0), + neighborhood_distance=self.run_args.get("neighborhood_distance", 1), + n_optim_restarts_per_bin=self.run_args.get("n_optim_restarts_per_bin", 1), + n_tasks_per_batch=self.run_args.get("n_tasks_per_batch", 10), + print_progress_every_n_batch=self.run_args.get("print_progress_every_n_batch", 1000), + max_tasks_per_worker=self.run_args.get("max_tasks_per_worker", np.inf), + max_n_bins=self.run_args.get("max_n_bins", np.inf), + ) + + # Run the scan! + result = binned_opt.run() + + # Print a summary + if self.mpi_rank == 0: + best_bins = result["optimal_bins"] + print() + print(f"{self.print_prefix} # Global optima found in bin(s) {best_bins}:", flush=True) + for i, bin_index_tuple in enumerate(best_bins): + x_opt_physical = self.transform_to_vec(result['x_optimal'][i]) + x_print_dict = dict(zip(self.parameter_names, x_opt_physical.tolist())) + print(f"{self.print_prefix} - Bin {bin_index_tuple}:", flush=True) + print(f"{self.print_prefix} - Parameters:", flush=True) + for key, val in x_print_dict.items(): + print(f"{self.print_prefix} - {key}: {val}") + print(f"{self.print_prefix} - Log-likelihood: {result['y_optimal'][i]}", flush=True) + print() + print(f"{self.print_prefix} Target function calls: {result['n_target_calls']}", flush=True) + print() + + return 0 + + +__plugins__ = {BinMinBottomUp.__plugin_name__: BinMinBottomUp} diff --git a/ScannerBit/src/scanners/python/plugins/gambit_scipy.py b/ScannerBit/src/scanners/python/plugins/gambit_scipy.py index 3f82085576..18515e38e3 100644 --- a/ScannerBit/src/scanners/python/plugins/gambit_scipy.py +++ b/ScannerBit/src/scanners/python/plugins/gambit_scipy.py @@ -1,6 +1,6 @@ """ -Scipy dual annealing scannner -============================= +Scipy scannners +=============== """ from utils import copydoc, version, with_mpi diff --git a/ScannerBit/src/scanners/python/plugins/grid.py b/ScannerBit/src/scanners/python/plugins/grid.py index 6e08ec8a31..5233e95a8c 100644 --- a/ScannerBit/src/scanners/python/plugins/grid.py +++ b/ScannerBit/src/scanners/python/plugins/grid.py @@ -28,7 +28,7 @@ class Grid(splug.scanner): YAML options: grid_pts[10]: The number of points along each dimension on the grid. A vector is given with each element corresponding to each dimension. - like: Use the functors thats corresponds to the specified purpose. + like: Use the functors that correspond to the specified purpose. parameters: Specifies the order of parameters that corresponds to the grid points specified by the tag "grid_pts". """ diff --git a/ScannerBit/src/scanners/python/plugins/utils/mpi.py b/ScannerBit/src/scanners/python/plugins/utils/mpi.py index 953c873685..714853bb35 100644 --- a/ScannerBit/src/scanners/python/plugins/utils/mpi.py +++ b/ScannerBit/src/scanners/python/plugins/utils/mpi.py @@ -17,7 +17,7 @@ else: print(f"WARNING: GAMBIT is compiled with MPI parallelisation enabled (WITH_MPI=1), " f"but {__file__} failed to import the python module mpi4py. If you want to " - f"run this scanner with multiple MPI processes, please install mpi4py.") + f"use GAMBIT with an MPI parallelised python scanner, please install mpi4py.") else: print(f"WARNING: The scanner plugin failed to import mpi4py in {__file__}, " f"but that's OK since GAMBIT anyway is running in serial mode (-DWITH_MPI=0).", flush=True) diff --git a/ScannerBit/src/scanners/twalk/twalk.cpp b/ScannerBit/src/scanners/twalk/twalk.cpp index 7366d6ee46..dbc39ea63b 100644 --- a/ScannerBit/src/scanners/twalk/twalk.cpp +++ b/ScannerBit/src/scanners/twalk/twalk.cpp @@ -477,7 +477,7 @@ namespace Gambit set_resume_params.dump(); // Better way // This works I think, but it still has problems. In particular, // it looks like you must resume with the same number of processes - // that you started the run with, which is kind of crap. + // that you started the run with, which is not good. } for (auto &&gd : gDev) delete gd; From 9d998b760ba5775132fa588aab346e4c150971fc Mon Sep 17 00:00:00 2001 From: Anders Kvellestad Date: Tue, 9 Dec 2025 16:39:16 +0100 Subject: [PATCH 08/10] Switching to github-hosted runners --- .github/workflows/ci.yml | 2 +- .github/workflows/ci_linux.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f23c2fda4..4829b279a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: # A job that builds GAMBIT + scanners, tests the command-line interface # and runs a minimal test run with spartan.yaml gambit_light_build: - runs-on: [self-hosted, macOS, Arm64] + runs-on: ubuntu-latest strategy: fail-fast: false matrix: diff --git a/.github/workflows/ci_linux.yml b/.github/workflows/ci_linux.yml index 718a10cb9a..32cf941e9e 100644 --- a/.github/workflows/ci_linux.yml +++ b/.github/workflows/ci_linux.yml @@ -10,7 +10,7 @@ jobs: # A job that builds GAMBIT + scanners, tests the command-line interface # and runs a minimal test run with spartan.yaml gambit_light_build: - runs-on: [self-hosted, Linux, X64] + runs-on: ubuntu-latest strategy: fail-fast: false matrix: From 3857a00f948ef9c1d2d9c428b4005c5eb77f01c9 Mon Sep 17 00:00:00 2001 From: Anders Kvellestad Date: Tue, 9 Dec 2025 16:59:01 +0100 Subject: [PATCH 09/10] Update to CI jobs --- .github/workflows/ci.yml | 2 +- .github/workflows/ci_linux.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4829b279a4..0f23c2fda4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ jobs: # A job that builds GAMBIT + scanners, tests the command-line interface # and runs a minimal test run with spartan.yaml gambit_light_build: - runs-on: ubuntu-latest + runs-on: [self-hosted, macOS, Arm64] strategy: fail-fast: false matrix: diff --git a/.github/workflows/ci_linux.yml b/.github/workflows/ci_linux.yml index 32cf941e9e..3781fee565 100644 --- a/.github/workflows/ci_linux.yml +++ b/.github/workflows/ci_linux.yml @@ -1,4 +1,4 @@ -name: Gambit-light Linux CI on Wino +name: Gambit-light Linux CI on: push: From dd7810d305c8a8a1f13c8fafcd5f8b052accdb82 Mon Sep 17 00:00:00 2001 From: Anders Kvellestad Date: Tue, 9 Dec 2025 17:05:54 +0100 Subject: [PATCH 10/10] Reverted CI job change for now. --- .github/workflows/ci_linux.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_linux.yml b/.github/workflows/ci_linux.yml index 3781fee565..537133a72c 100644 --- a/.github/workflows/ci_linux.yml +++ b/.github/workflows/ci_linux.yml @@ -10,7 +10,7 @@ jobs: # A job that builds GAMBIT + scanners, tests the command-line interface # and runs a minimal test run with spartan.yaml gambit_light_build: - runs-on: ubuntu-latest + runs-on: [self-hosted, Linux, X64] strategy: fail-fast: false matrix: