Skip to content

Commit

Permalink
Merge pull request #1698 from NNPDF/n3lo_dis_fit
Browse files Browse the repository at this point in the history
N3LO fits with IHOU
  • Loading branch information
RoyStegeman committed Nov 15, 2023
2 parents ee25aea + 9c9c88d commit 7592b58
Show file tree
Hide file tree
Showing 8 changed files with 232 additions and 19 deletions.
Binary file modified nnpdfcpp/data/theory.db
Binary file not shown.
6 changes: 3 additions & 3 deletions validphys2/src/validphys/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1404,7 +1404,7 @@ def produce_defaults(
default_filter_settings_recorded_spec_=None,
):
"""Produce default values for filters taking into account the
values of ``q2min``, ` `w2min`` and ``maxTau`` defined at namespace
values of ``q2min``, ``w2min`` and ``maxTau`` defined at namespace
level and those inside a ``filter_defaults`` mapping.
"""
from validphys.filters import default_filter_settings_input
Expand All @@ -1416,7 +1416,7 @@ def produce_defaults(
raise ConfigError("q2min defined multiple times with different values")
if w2min is not None and "w2min" in filter_defaults and w2min != filter_defaults["w2min"]:
raise ConfigError("w2min defined multiple times with different values")

if (
maxTau is not None
and "maxTau" in filter_defaults
Expand All @@ -1442,7 +1442,7 @@ def produce_defaults(
if w2min is not None and defaults_loaded:
log.warning("Using w2min from runcard")
filter_defaults["w2min"] = w2min

if maxTau is not None and defaults_loaded:
log.warning("Using maxTau from runcard")
filter_defaults["maxTau"] = maxTau
Expand Down
12 changes: 12 additions & 0 deletions validphys2/src/validphys/scalevariations/pointprescriptions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,15 @@
'7 point': ['(1, 1)', '(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)']
'7original point': ['(1, 1)', '(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)']
'9 point': ['(1, 1)', '(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)', '(2, 0.5)', '(0.5, 2)']
# N3LO IHOU Anomalous dimension theory covmat
'n3lo ad covmat': ['(0, 0, 0, 0)','(1, 0, 0, 0)','(2, 0, 0, 0)','(3, 0, 0, 0)','(4, 0, 0, 0)','(5, 0, 0, 0)','(6, 0, 0, 0)','(7, 0, 0, 0)','(8, 0, 0, 0)','(9, 0, 0, 0)','(10, 0, 0, 0)','(11, 0, 0, 0)','(12, 0, 0, 0)','(13, 0, 0, 0)','(14, 0, 0, 0)','(15, 0, 0, 0)','(16, 0, 0, 0)','(17, 0, 0, 0)','(18, 0, 0, 0)','(19, 0, 0, 0)','(0, 1, 0, 0)','(0, 2, 0, 0)','(0, 3, 0, 0)','(0, 4, 0, 0)','(0, 5, 0, 0)','(0, 6, 0, 0)','(0, 7, 0, 0)','(0, 8, 0, 0)','(0, 9, 0, 0)','(0, 10, 0, 0)','(0, 11, 0, 0)','(0, 12, 0, 0)','(0, 13, 0, 0)','(0, 14, 0, 0)','(0, 15, 0, 0)','(0, 16, 0, 0)','(0, 17, 0, 0)','(0, 18, 0, 0)','(0, 19, 0, 0)','(0, 20, 0, 0)','(0, 21, 0, 0)','(0, 0, 1, 0)','(0, 0, 2, 0)','(0, 0, 3, 0)','(0, 0, 4, 0)','(0, 0, 5, 0)','(0, 0, 6, 0)','(0, 0, 7, 0)','(0, 0, 8, 0)','(0, 0, 9, 0)','(0, 0, 10, 0)','(0, 0, 11, 0)','(0, 0, 12, 0)','(0, 0, 13, 0)','(0, 0, 14, 0)','(0, 0, 15, 0)','(0, 0, 0, 1)','(0, 0, 0, 2)','(0, 0, 0, 3)','(0, 0, 0, 4)','(0, 0, 0, 5)','(0, 0, 0, 6)']
# N3LO full IHOU: Anomalous dimension theory covmat + DIS massive coefficient functions
'n3lo ihou': ['(0, 0, 0, 0)','(1, 0, 0, 0)','(2, 0, 0, 0)','(3, 0, 0, 0)','(4, 0, 0, 0)','(5, 0, 0, 0)','(6, 0, 0, 0)','(7, 0, 0, 0)','(8, 0, 0, 0)','(9, 0, 0, 0)','(10, 0, 0, 0)','(11, 0, 0, 0)','(12, 0, 0, 0)','(13, 0, 0, 0)','(14, 0, 0, 0)','(15, 0, 0, 0)','(16, 0, 0, 0)','(17, 0, 0, 0)','(18, 0, 0, 0)','(19, 0, 0, 0)','(0, 1, 0, 0)','(0, 2, 0, 0)','(0, 3, 0, 0)','(0, 4, 0, 0)','(0, 5, 0, 0)','(0, 6, 0, 0)','(0, 7, 0, 0)','(0, 8, 0, 0)','(0, 9, 0, 0)','(0, 10, 0, 0)','(0, 11, 0, 0)','(0, 12, 0, 0)','(0, 13, 0, 0)','(0, 14, 0, 0)','(0, 15, 0, 0)','(0, 16, 0, 0)','(0, 17, 0, 0)','(0, 18, 0, 0)','(0, 19, 0, 0)','(0, 20, 0, 0)','(0, 21, 0, 0)','(0, 0, 1, 0)','(0, 0, 2, 0)','(0, 0, 3, 0)','(0, 0, 4, 0)','(0, 0, 5, 0)','(0, 0, 6, 0)','(0, 0, 7, 0)','(0, 0, 8, 0)','(0, 0, 9, 0)','(0, 0, 10, 0)','(0, 0, 11, 0)','(0, 0, 12, 0)','(0, 0, 13, 0)','(0, 0, 14, 0)','(0, 0, 15, 0)','(0, 0, 0, 1)','(0, 0, 0, 2)','(0, 0, 0, 3)','(0, 0, 0, 4)','(0, 0, 0, 5)','(0, 0, 0, 6)','(-1, -1)','(1, 1)']
# N3LO full IHOU + 7 point scale variations
'n3lo full thcovmat': ['(0, 0, 0, 0)','(1, 0, 0, 0)','(2, 0, 0, 0)','(3, 0, 0, 0)','(4, 0, 0, 0)','(5, 0, 0, 0)','(6, 0, 0, 0)','(7, 0, 0, 0)','(8, 0, 0, 0)','(9, 0, 0, 0)','(10, 0, 0, 0)','(11, 0, 0, 0)','(12, 0, 0, 0)','(13, 0, 0, 0)','(14, 0, 0, 0)','(15, 0, 0, 0)','(16, 0, 0, 0)','(17, 0, 0, 0)','(18, 0, 0, 0)','(19, 0, 0, 0)','(0, 1, 0, 0)','(0, 2, 0, 0)','(0, 3, 0, 0)','(0, 4, 0, 0)','(0, 5, 0, 0)','(0, 6, 0, 0)','(0, 7, 0, 0)','(0, 8, 0, 0)','(0, 9, 0, 0)','(0, 10, 0, 0)','(0, 11, 0, 0)','(0, 12, 0, 0)','(0, 13, 0, 0)','(0, 14, 0, 0)','(0, 15, 0, 0)','(0, 16, 0, 0)','(0, 17, 0, 0)','(0, 18, 0, 0)','(0, 19, 0, 0)','(0, 20, 0, 0)','(0, 21, 0, 0)','(0, 0, 1, 0)','(0, 0, 2, 0)','(0, 0, 3, 0)','(0, 0, 4, 0)','(0, 0, 5, 0)','(0, 0, 6, 0)','(0, 0, 7, 0)','(0, 0, 8, 0)','(0, 0, 9, 0)','(0, 0, 10, 0)','(0, 0, 11, 0)','(0, 0, 12, 0)','(0, 0, 13, 0)','(0, 0, 14, 0)','(0, 0, 15, 0)','(0, 0, 0, 1)','(0, 0, 0, 2)','(0, 0, 0, 3)','(0, 0, 0, 4)','(0, 0, 0, 5)','(0, 0, 0, 6)','(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)','(-1, -1)','(1, 1)']
# N3LO full IHOU + 3 point scale variations for datasets with no N3LO correcttions
'n3lo 3pt missing': ['(0, 0, 0, 0)','(1, 0, 0, 0)','(2, 0, 0, 0)','(3, 0, 0, 0)','(4, 0, 0, 0)','(5, 0, 0, 0)','(6, 0, 0, 0)','(7, 0, 0, 0)','(8, 0, 0, 0)','(9, 0, 0, 0)','(10, 0, 0, 0)','(11, 0, 0, 0)','(12, 0, 0, 0)','(13, 0, 0, 0)','(14, 0, 0, 0)','(15, 0, 0, 0)','(16, 0, 0, 0)','(17, 0, 0, 0)','(18, 0, 0, 0)','(19, 0, 0, 0)','(0, 1, 0, 0)','(0, 2, 0, 0)','(0, 3, 0, 0)','(0, 4, 0, 0)','(0, 5, 0, 0)','(0, 6, 0, 0)','(0, 7, 0, 0)','(0, 8, 0, 0)','(0, 9, 0, 0)','(0, 10, 0, 0)','(0, 11, 0, 0)','(0, 12, 0, 0)','(0, 13, 0, 0)','(0, 14, 0, 0)','(0, 15, 0, 0)','(0, 16, 0, 0)','(0, 17, 0, 0)','(0, 18, 0, 0)','(0, 19, 0, 0)','(0, 20, 0, 0)','(0, 21, 0, 0)','(0, 0, 1, 0)','(0, 0, 2, 0)','(0, 0, 3, 0)','(0, 0, 4, 0)','(0, 0, 5, 0)','(0, 0, 6, 0)','(0, 0, 7, 0)','(0, 0, 8, 0)','(0, 0, 9, 0)','(0, 0, 10, 0)','(0, 0, 11, 0)','(0, 0, 12, 0)','(0, 0, 13, 0)','(0, 0, 14, 0)','(0, 0, 15, 0)','(0, 0, 0, 1)','(0, 0, 0, 2)','(0, 0, 0, 3)','(0, 0, 0, 4)','(0, 0, 0, 5)','(0, 0, 0, 6)', '(1, 0.5 missing)', '(1, 2 missing)','(-1, -1)','(1, 1)']
# N3LO full IHOU + 3 point scale variations for hadronic dasasets
'n3lo 3pt hadronic': ['(0, 0, 0, 0)','(1, 0, 0, 0)','(2, 0, 0, 0)','(3, 0, 0, 0)','(4, 0, 0, 0)','(5, 0, 0, 0)','(6, 0, 0, 0)','(7, 0, 0, 0)','(8, 0, 0, 0)','(9, 0, 0, 0)','(10, 0, 0, 0)','(11, 0, 0, 0)','(12, 0, 0, 0)','(13, 0, 0, 0)','(14, 0, 0, 0)','(15, 0, 0, 0)','(16, 0, 0, 0)','(17, 0, 0, 0)','(18, 0, 0, 0)','(19, 0, 0, 0)','(0, 1, 0, 0)','(0, 2, 0, 0)','(0, 3, 0, 0)','(0, 4, 0, 0)','(0, 5, 0, 0)','(0, 6, 0, 0)','(0, 7, 0, 0)','(0, 8, 0, 0)','(0, 9, 0, 0)','(0, 10, 0, 0)','(0, 11, 0, 0)','(0, 12, 0, 0)','(0, 13, 0, 0)','(0, 14, 0, 0)','(0, 15, 0, 0)','(0, 16, 0, 0)','(0, 17, 0, 0)','(0, 18, 0, 0)','(0, 19, 0, 0)','(0, 20, 0, 0)','(0, 21, 0, 0)','(0, 0, 1, 0)','(0, 0, 2, 0)','(0, 0, 3, 0)','(0, 0, 4, 0)','(0, 0, 5, 0)','(0, 0, 6, 0)','(0, 0, 7, 0)','(0, 0, 8, 0)','(0, 0, 9, 0)','(0, 0, 10, 0)','(0, 0, 11, 0)','(0, 0, 12, 0)','(0, 0, 13, 0)','(0, 0, 14, 0)','(0, 0, 15, 0)','(0, 0, 0, 1)','(0, 0, 0, 2)','(0, 0, 0, 3)','(0, 0, 0, 4)','(0, 0, 0, 5)','(0, 0, 0, 6)', '(1, 0.5 hadronic)', '(1, 2 hadronic)','(-1, -1)','(1, 1)']
# N3LO 7 point scale variations
'n3lo 7 point': ['(0, 0, 0, 0)', '(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)']
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,87 @@ scale_variations_for:
(2, 1): 437
(2, 2): 438

# N3LO: MHOU + IHOU
- theoryid: 722
variations:
(0, 0, 0, 0): 722 # central th
(1, 0, 0, 0): 723 # anomalous dimensions variations
(2, 0, 0, 0): 724
(3, 0, 0, 0): 725
(4, 0, 0, 0): 726
(5, 0, 0, 0): 727
(6, 0, 0, 0): 728
(7, 0, 0, 0): 729
(8, 0, 0, 0): 730
(9, 0, 0, 0): 731
(10, 0, 0, 0): 732
(11, 0, 0, 0): 733
(12, 0, 0, 0): 734
(13, 0, 0, 0): 735
(14, 0, 0, 0): 736
(15, 0, 0, 0): 737
(16, 0, 0, 0): 738
(17, 0, 0, 0): 739
(18, 0, 0, 0): 740
(19, 0, 0, 0): 741
(0, 1, 0, 0): 742
(0, 2, 0, 0): 743
(0, 3, 0, 0): 744
(0, 4, 0, 0): 745
(0, 5, 0, 0): 746
(0, 6, 0, 0): 747
(0, 7, 0, 0): 748
(0, 8, 0, 0): 749
(0, 9, 0, 0): 750
(0, 10, 0, 0): 751
(0, 11, 0, 0): 752
(0, 12, 0, 0): 753
(0, 13, 0, 0): 754
(0, 14, 0, 0): 755
(0, 15, 0, 0): 756
(0, 16, 0, 0): 757
(0, 17, 0, 0): 758
(0, 18, 0, 0): 759
(0, 19, 0, 0): 760
(0, 20, 0, 0): 761
(0, 21, 0, 0): 762
(0, 0, 1, 0): 763
(0, 0, 2, 0): 764
(0, 0, 3, 0): 765
(0, 0, 4, 0): 766
(0, 0, 5, 0): 767
(0, 0, 6, 0): 768
(0, 0, 7, 0): 769
(0, 0, 8, 0): 770
(0, 0, 9, 0): 771
(0, 0, 10, 0): 772
(0, 0, 11, 0): 773
(0, 0, 12, 0): 774
(0, 0, 13, 0): 775
(0, 0, 14, 0): 776
(0, 0, 15, 0): 777
(0, 0, 0, 1): 778
(0, 0, 0, 2): 779
(0, 0, 0, 3): 780
(0, 0, 0, 4): 781
(0, 0, 0, 5): 782
(0, 0, 0, 6): 783
(0.5, 1): 784 # scale variations
(2, 1): 785
(0.5, 0.5): 786
(1, 0.5): 787
(2, 0.5): 788
(0.5, 2): 789
(1, 2): 790
(2, 2): 791
(-1, -1): 792 # DIS coeff functions fact variations
(1, 1): 793
# IHOU + MHOU missing prescription: see https://github.com/NNPDF/papers/blob/e2ac1832cf4a36dab83a696564eaa75a4e55f5d2/minutes/minutes-2023-08-18.txt#L148-L157
(1, 0.5 missing): 794 # As 787 but DIS and N3LO DY from 722
(1, 2 missing): 795 # As 790 but DIS and N3LO DY from 722
(1, 0.5 hadronic): 796 # As 787 but DIS from 722
(1, 2 hadronic): 797 # As 790 but DIS from 722

- theoryid: 600
variations:
(0.5, 0.5): 601
Expand Down
64 changes: 64 additions & 0 deletions validphys2/src/validphys/theorycovariance/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,45 @@ def covmat_9pt(name1, name2, deltas1, deltas2):
) + (1 / 8) * (np.outer((deltas1[2] + deltas1[3]), (deltas2[2] + deltas2[3])))
return s

def covmat_n3lo_singlet(name1, name2, deltas1, deltas2):
"""Returns theory covariance sub-matrix for all the
singlet splitting function variations.
"""
n3lo_vars_dict = {
"gg": 19,
"gq": 21,
"qg": 15,
"qq": 6,
}
s_singlet_ad = 0
cnt = 0
for n_var in n3lo_vars_dict.values():
s_singlet_ad += covmat_n3lo_ad(name1, name2, deltas1[cnt:cnt+n_var], deltas2[cnt:cnt+n_var])
cnt += n_var
return s_singlet_ad


def covmat_n3lo_ad(name1, name2, deltas1, deltas2):
"""Returns theory covariance sub-matrix for each of the
singlet splitting function variations.
Normalization is given by:
(n_pt - 1)
where:
* n_pt = number of point presctiption
"""
norm = len(deltas1)
if name1 == name2:
s = sum(np.outer(d, d) for d in deltas1)
else:
s = 0
for i, d1 in enumerate(deltas1):
for j, d2 in enumerate(deltas2):
if i == j:
s += np.outer(d1, d2)
return 1 / norm * s


@check_correct_theory_combination
def covs_pt_prescrip(
Expand Down Expand Up @@ -384,6 +423,31 @@ def covs_pt_prescrip(
s = covmat_7pt(name1, name2, deltas1, deltas2)
elif l == 9:
s = covmat_9pt(name1, name2, deltas1, deltas2)
# n3lo ad variation prescriprion
elif l == 62:
s = covmat_n3lo_singlet(name1, name2, deltas1, deltas2)
# n3lo ihou prescriprion
elif l == 64:
s_ad = covmat_n3lo_singlet(name1, name2, deltas1[:-2], deltas2[:-2])
s_cf = covmat_3pt(name1, name2, deltas1[-2:], deltas2[-2:])
s = s_ad + s_cf
# n3lo 3 pt MHOU see also
# see https://github.com/NNPDF/papers/blob/e2ac1832cf4a36dab83a696564eaa75a4e55f5d2/minutes/minutes-2023-08-18.txt#L148-L157
elif l == 66:
s_ad = covmat_n3lo_singlet(name1, name2, deltas1[:-4], deltas2[:-4])
s_mhou = covmat_3pt(name1, name2, deltas1[-4:-2], deltas2[-4:-2])
s_cf = covmat_3pt(name1, name2, deltas1[-2:], deltas2[-2:])
s = s_ad + s_cf + s_mhou
# n3lo full covmat prescriprion
elif l == 70:
# spit deltas and compose thcovmat
# splittin functions variatons
s_ad = covmat_n3lo_singlet(name1, name2, deltas1[:-8], deltas2[:-8])
# scale variations
s_mhou = covmat_7pt(name1, name2, deltas1[-8:-2], deltas2[-8:-2])
# massive coefficient function variations
s_cf = covmat_3pt(name1, name2, deltas1[-2:], deltas2[-2:])
s = s_ad + s_cf + s_mhou
start_locs = (start_proc[name1], start_proc[name2])
covmats[start_locs] = s
return covmats
Expand Down
18 changes: 9 additions & 9 deletions validphys2/src/validphys/theorycovariance/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ def matrix_plot_labels(df):
def plot_covmat_heatmap(covmat, title):
"""Matrix plot of a covariance matrix."""
df = covmat
df.sort_index(0, inplace=True)
df.sort_index(1, inplace=True)
df.sort_index(axis=0, inplace=True)
df.sort_index(axis=1, inplace=True)
oldindex = df.index.tolist()
newindex = sorted(oldindex, key=_get_key)
# reindex index
Expand Down Expand Up @@ -181,8 +181,8 @@ def _get_key(element):
def plot_corrmat_heatmap(corrmat, title):
"""Matrix plot of a correlation matrix"""
df = corrmat
df.sort_index(0, inplace=True)
df.sort_index(1, inplace=True)
df.sort_index(axis=0, inplace=True)
df.sort_index(axis=1, inplace=True)
oldindex = df.index.tolist()
newindex = sorted(oldindex, key=_get_key)
# reindex index
Expand Down Expand Up @@ -350,18 +350,18 @@ def plot_diag_cov_comparison(
plot_index = theory_covmat_custom.index
sqrtdiags_th = np.sqrt(np.diag(theory_covmat_custom)) / data
sqrtdiags_th = pd.DataFrame(sqrtdiags_th.values, index=plot_index)
sqrtdiags_th.sort_index(0, inplace=True)
sqrtdiags_th.sort_index(axis=0, inplace=True)
oldindex = sqrtdiags_th.index.tolist()
newindex = sorted(oldindex, key=_get_key)
sqrtdiags_th = sqrtdiags_th.reindex(newindex)
sqrtdiags_exp = np.sqrt(np.diag(procs_covmat)) / data
sqrtdiags_exp = pd.DataFrame(sqrtdiags_exp.values, index=plot_index)
sqrtdiags_exp.sort_index(0, inplace=True)
sqrtdiags_exp.sort_index(axis=0, inplace=True)
sqrtdiags_exp = sqrtdiags_exp.reindex(newindex)
df_total = theory_covmat_custom + procs_covmat
sqrtdiags_tot = np.sqrt(np.diag(df_total)) / data
sqrtdiags_tot = pd.DataFrame(sqrtdiags_tot.values, index=plot_index)
sqrtdiags_tot.sort_index(0, inplace=True)
sqrtdiags_tot.sort_index(axis=0, inplace=True)
sqrtdiags_tot = sqrtdiags_tot.reindex(newindex)
fig, ax = plotutils.subplots(figsize=(20, 10))
ax.plot(sqrtdiags_exp.values, ".", label="Experiment", color="orange")
Expand Down Expand Up @@ -401,12 +401,12 @@ def plot_diag_cov_impact(
inv_tot = (np.diag(la.inv(matrix_theory + matrix_experiment))) ** (-0.5) / procs_data_values
plot_index = theory_covmat_custom.index
df_inv_exp = pd.DataFrame(inv_exp, index=plot_index)
df_inv_exp.sort_index(0, inplace=True)
df_inv_exp.sort_index(axis=0, inplace=True)
oldindex = df_inv_exp.index.tolist()
newindex = sorted(oldindex, key=_get_key)
df_inv_exp = df_inv_exp.reindex(newindex)
df_inv_tot = pd.DataFrame(inv_tot, index=plot_index)
df_inv_tot.sort_index(0, inplace=True)
df_inv_tot.sort_index(axis=0, inplace=True)
df_inv_tot = df_inv_tot.reindex(newindex)
fig, ax = plotutils.subplots()
ax.plot(df_inv_exp.values, ".", label="Experiment", color="orange")
Expand Down
12 changes: 6 additions & 6 deletions validphys2/src/validphys/theorycovariance/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,7 +683,7 @@ def eigenvector_plot(evals_nonzero_basis, shx_vector):
[processnames, dsnames, ids], names=("process", "dataset", "id")
)
f = pd.DataFrame(f.values, index=tripleindex)
f.sort_index(0, inplace=True)
f.sort_index(axis=0, inplace=True)
oldindex = f.index.tolist()
newindex = sorted(oldindex, key=_get_key)
f = f.reindex(newindex)
Expand Down Expand Up @@ -737,12 +737,12 @@ def deltamiss_plot(theory_shift_test, allthx_vector, evals_nonzero_basis, shx_ve
[processnames, dsnames, ids], names=("process", "dataset", "id")
)
f = pd.DataFrame(f.values, index=tripleindex)
f.sort_index(0, inplace=True)
f.sort_index(axis=0, inplace=True)
oldindex = f.index.tolist()
newindex = sorted(oldindex, key=_get_key)
f = f.reindex(newindex)
fmiss = pd.DataFrame(fmiss, index=tripleindex)
fmiss.sort_index(0, inplace=True)
fmiss.sort_index(axis=0, inplace=True)
fmiss = fmiss.reindex(newindex)
# Plotting
fig, ax = plotutils.subplots(figsize=(20, 10))
Expand Down Expand Up @@ -786,15 +786,15 @@ def shift_diag_cov_comparison(allthx_vector, shx_vector, thx_covmat, thx_vector)
[processnames, dsnames, ids], names=("process", "dataset", "id")
)
matrix = pd.DataFrame(matrix.values, index=tripleindex, columns=tripleindex)
matrix.sort_index(0, inplace=True)
matrix.sort_index(1, inplace=True)
matrix.sort_index(axis=0, inplace=True)
matrix.sort_index(axis=1, inplace=True)
oldindex = matrix.index.tolist()
newindex = sorted(oldindex, key=_get_key)
matrix = matrix.reindex(newindex)
matrix = (matrix.T.reindex(newindex)).T
sqrtdiags = np.sqrt(np.diag(matrix))
fnorm = pd.DataFrame(fnorm.values, index=tripleindex)
fnorm.sort_index(0, inplace=True)
fnorm.sort_index(axis=0, inplace=True)
fnorm = fnorm.reindex(newindex)
# Plotting
fig, ax = plotutils.subplots(figsize=(20, 10))
Expand Down
Loading

0 comments on commit 7592b58

Please sign in to comment.