Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix bug in _hqp_yq_xq2map process #2103

Merged
merged 2 commits into from
Jun 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions validphys2/src/validphys/process_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,11 +159,11 @@ def _hqp_yq_xq2map(kin_info):
# Theory predictions computed with HT/4 ~ mt/2 for rapidity distr.
# see section 3 from 1906.06535
# HT defined in Eqn. (1) of 1611.08609
rapidity = kin_info.get_one_of(_Vars.y_t, _Vars.y_ttBar)
ratio = np.sqrt(mass2) / kin_info[_Vars.sqrts]
rapidity = kin_info.get_one_of(_Vars.y_t, _Vars.y_ttBar, "k1")
q2 = kin_info.get_one_of(_Vars.m_t2, "k2")
ratio = np.sqrt(q2) / kin_info[_Vars.sqrts]
x1 = ratio * np.exp(rapidity)
x2 = ratio * np.exp(-rapidity)
q2 = kin_info[_Vars.m_t2]
x = np.concatenate((x1, x2))
return np.clip(x, a_min=None, a_max=1, out=x), np.concatenate((q2, q2)) / 4

Expand Down
40 changes: 33 additions & 7 deletions validphys2/src/validphys/tests/test_datafiles.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
"""
Test all datafiles

The checks in ``test_all_datasets`` are run for each dataset independently so that one gets one
The checks in ``test_all_datasets`` are run for each dataset independently so that one gets one
failure per dataset in case of problems
"""

import pytest

from validphys.covmats import INTRA_DATASET_SYS_NAME
from validphys.kinematics import xq2map_with_cuts
from validphys.loader import FallbackLoader
from validphys.plotoptions.kintransforms import identity as kintransform_identity

Expand All @@ -24,9 +25,21 @@ def _load_main_and_variants(dataset_name):


@pytest.mark.parametrize("dataset_name", all_datasets)
def test_all_datasets(dataset_name):
"""checks that a dataset can be loaded (together with its variants),
that the kinematics, uncertainties and data can be read
def test_all_datasets(dataset_name, data_internal_cuts_new_theory_config):
"""Checks that a dataset can be loaded (together with its variants),
that the kinematics, uncertainties and data can be read.

All checks pertaining to a given dataset are done together in this function
since a broken dataset will likely fail more than one check.
This avoids polluting the output with many errors for a single dataset.

Checks:
1. Loading of data, kinematics, uncertainties
2. Kinematic coverage is included in the dataframe
3. A process type is being used (if not legacy)
4. All variants can be loaded
5. Uncertainties are either ADD or MULT
6. The kinematic coverage coverage can be generated
"""
# Load the data and all its variants
cds = _load_main_and_variants(dataset_name)
Expand Down Expand Up @@ -65,11 +78,15 @@ def test_all_datasets(dataset_name):
# check the uncertainties can be loaded
# note that due to legacy data there might be datasets without data_uncertainties
# but that would only happen for non-variant (or member 0 of the list)
all_unc = [cd.metadata.load_uncertainties() for cd in cds[1:]]
# Separate member 0 in that case
valid_cds = []
if main_cd.metadata.data_uncertainties:
all_unc.insert(0, main_cd.metadata.load_uncertainties())
valid_cds.append(main_cd)
valid_cds += cds[1:]

for cd in valid_cds:
unc = cd.metadata.load_uncertainties()

for unc in all_unc:
# Check that, if present, the special `stat` key is ADD and UNCORR
if "stat" in unc:
stat = unc["stat"]
Expand All @@ -90,3 +107,12 @@ def test_all_datasets(dataset_name):

# Check that all treatments are either MULT or ADD
assert set(unc.columns.get_level_values("treatment").unique()) <= {"MULT", "ADD"}

# Extra checks for non-polarized datasets
if str(process_type).endswith("_POL"):
return

# Legacy datasets with no "new implementation" are skipped
for cd in valid_cds:
# check without cuts
xq2map_with_cuts(cd, False)