Skip to content

Commit

Permalink
register the unpolarized bc also with the vp interface
Browse files Browse the repository at this point in the history
  • Loading branch information
scarlehoff committed Aug 9, 2024
1 parent 241b0b5 commit d4087d5
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 8 deletions.
14 changes: 8 additions & 6 deletions n3fit/src/n3fit/hyper_optimization/rewards.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def compute_loss(
self,
penalties: dict[str, np.ndarray],
experimental_loss: np.ndarray,
pdf_model: MetaModel,
pdf_object: N3PDF,
experimental_data: list[DataGroupSpec],
fold_idx: int = 0,
) -> float:
Expand All @@ -214,8 +214,8 @@ def compute_loss(
as defined in 'penalties.py' and instantiated within :class:`~n3fit.model_trainer.ModelTrainer`.
experimental_loss: NDArray(replicas)
Experimental loss for each replica.
pdf_model: :class:`n3fit.backends.MetaModel`
N3fitted meta-model.
pdf_object: :class:`n3fit.vpinterface.N3PDF`
N3fitted PDF
experimental_data: List[validphys.core.DataGroupSpec]
List of tuples containing `validphys.core.DataGroupSpec` instances for each group data set
fold_idx: int
Expand All @@ -233,18 +233,20 @@ def compute_loss(
>>> import numpy as np
>>> from n3fit.hyper_optimization.rewards import HyperLoss
>>> from n3fit.model_gen import generate_pdf_model
>>> from n3fit.vpinterface import N3PDF
>>> from validphys.loader import Loader
>>> hyper = HyperLoss(loss_type="chi2", replica_statistic="average", fold_statistic="average")
>>> penalties = {'saturation': np.array([1.0, 2.0]), 'patience': np.array([3.0, 4.0]), 'integrability': np.array([5.0, 6.0]),}
>>> experimental_loss = np.array([0.1, 0.2])
>>> ds = Loader().check_dataset("NMC_NC_NOTFIXED_P_EM-SIGMARED", theoryid=399, cuts="internal")
>>> ds = Loader().check_dataset("NMC_NC_NOTFIXED_P_EM-SIGMARED", variant="legacy", theoryid=399, cuts="internal")
>>> experimental_data = [Loader().check_experiment("My DataGroupSpec", [ds])]
>>> fake_fl = [{'fl' : i, 'largex' : [0,1], 'smallx': [1,2]} for i in ['u', 'ubar', 'd', 'dbar', 'c', 'g', 's', 'sbar']]
>>> pdf_model = generate_pdf_model(nodes=[8], activations=['linear'], seed=0, num_replicas=2, flav_info=fake_fl, fitbasis="FLAVOUR")
>>> loss = hyper.compute_loss(penalties, experimental_loss, pdf_model, experimental_data)
>>> pdf = N3PDF(pdf_model.split_replicas())
>>> loss = hyper.compute_loss(penalties, experimental_loss, pdf, experimental_data)
"""
# calculate phi for a given k-fold using vpinterface and validphys
phi_per_fold = compute_phi(N3PDF(pdf_model.split_replicas()), experimental_data)
phi_per_fold = compute_phi(pdf_object, experimental_data)

# update hyperopt metrics
# these are saved in the phi_vector and chi2_matrix attributes, excluding penalties
Expand Down
8 changes: 6 additions & 2 deletions n3fit/src/n3fit/model_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1007,11 +1007,15 @@ def hyperparametrizable(self, params):
# containing only exp datasets within the held out fold
experimental_data = self._filter_datagroupspec(partition["datasets"])

vplike_pdf = N3PDF(pdf_model.split_replicas())
if self.boundary_condition is not None:
vplike_pdf.register_boundary(self.boundary_condition["unpolarized_bc"])

# Compute per replica hyper losses
hyper_loss = self._hyper_loss.compute_loss(
penalties=penalties,
experimental_loss=experimental_loss,
pdf_model=pdf_model,
pdf_object=vplike_pdf,
experimental_data=experimental_data,
fold_idx=k,
)
Expand All @@ -1025,7 +1029,7 @@ def hyperparametrizable(self, params):
]
trvl_data = self._filter_datagroupspec(trvl_exp_names)
# evaluate phi on training/validation exp set
trvl_phi = compute_phi(N3PDF(pdf_model.split_replicas()), trvl_data)
trvl_phi = compute_phi(vplike_pdf, trvl_data)

# Now save all information from this fold
l_hyper.append(hyper_loss)
Expand Down

0 comments on commit d4087d5

Please sign in to comment.