Skip to content

Commit

Permalink
Make weight initialization reproducible
Browse files Browse the repository at this point in the history
  • Loading branch information
APJansen committed Mar 4, 2024
1 parent 625372d commit 2230dbd
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 13 deletions.
1 change: 1 addition & 0 deletions n3fit/src/n3fit/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,6 @@
set_eager,
set_initial_state,
)
from n3fit.backends.keras_backend.multi_dense import MultiInitializer

print("Using Keras backend")
28 changes: 17 additions & 11 deletions n3fit/src/n3fit/layers/preprocessing.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Optional
from typing import List, Optional

from n3fit.backends import MetaLayer, constraints
from n3fit.backends import MetaLayer, MultiInitializer, constraints
from n3fit.backends import operations as op


Expand Down Expand Up @@ -31,16 +31,16 @@ class Preprocessing(MetaLayer):
(defaults to true)
large_x: bool
Whether large x preprocessing factor should be active
seed: int
seed for the initializer of the random alpha and beta values
replica_seeds: List[int]
list of pre replica seeds for the initializer of the random alpha and beta values
num_replicas: int (default 1)
The number of replicas
"""

def __init__(
self,
flav_info: Optional[list] = None,
seed: int = 0,
replica_seeds: Optional[List[int]] = None,
large_x: bool = True,
num_replicas: int = 1,
**kwargs,
Expand All @@ -50,7 +50,10 @@ def __init__(
"Trying to instantiate a preprocessing factor with no basis information"
)
self.flav_info = flav_info
self.seed = seed
if replica_seeds is None:
self.replica_seeds = [0] * num_replicas # TODO Aron: figure out if this ever happens
else:
self.replica_seeds = replica_seeds
self.large_x = large_x
self.num_replicas = num_replicas

Expand All @@ -75,20 +78,23 @@ def generate_weight(self, name: str, kind: str, dictionary: dict, set_to_zero: b
"""
constraint = None
if set_to_zero:
initializer = MetaLayer.init_constant(0.0)
single_replica_initializer = MetaLayer.init_constant(0.0)
trainable = False
else:
minval, maxval = dictionary[kind]
trainable = dictionary.get("trainable", True)
# Set the initializer and move the seed one up
initializer = MetaLayer.select_initializer(
"random_uniform", minval=minval, maxval=maxval, seed=self.seed
# Seeds will be set in the wrapper MultiInitializer
single_replica_initializer = MetaLayer.select_initializer(
"random_uniform", minval=minval, maxval=maxval, seed=0
)
self.seed += 1
# If we are training, constrain the weights to be within the limits
if trainable:
constraint = constraints.MinMaxWeight(minval, maxval)

initializer = MultiInitializer(single_replica_initializer, self.replica_seeds)
# increment seeds for the next coefficient
self.replica_seeds = [seed + 1 for seed in self.replica_seeds]

# Generate the new trainable (or not) parameter
newpar = self.builder_helper(
name=name,
Expand Down
2 changes: 1 addition & 1 deletion n3fit/src/n3fit/model_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ def pdfNN_layer_generator(
flav_info=flav_info,
input_shape=(1,),
name=PREPROCESSING_LAYER_ALL_REPLICAS,
seed=seed[0] + number_of_layers,
replica_seeds=seed,
large_x=not subtract_one,
num_replicas=num_replicas,
)
Expand Down
2 changes: 1 addition & 1 deletion n3fit/src/n3fit/tests/test_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def test_preprocessing():
{'fl': 't8', 'smallx': [0.56, 1.29], 'largex': [1.45, 3.03]},
{'fl': 'cp', 'smallx': [0.12, 1.19], 'largex': [1.83, 6.7]},
]
prepro = Preprocessing(flav_info=flav_info, seed=1)
prepro = Preprocessing(flav_info=flav_info, replica_seeds=[1])
np.random.seed(42)
test_x = np.random.uniform(size=(1, 4, 1))
test_prefactors = [
Expand Down

0 comments on commit 2230dbd

Please sign in to comment.