Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 39 additions & 25 deletions n3fit/src/n3fit/layers/preprocessing.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Optional

from n3fit.backends import MetaLayer, constraints
from n3fit.backends import operations as op

Expand Down Expand Up @@ -34,28 +36,30 @@ class Preprocessing(MetaLayer):
"""

def __init__(
self, flav_info=None, seed=0, initializer="random_uniform", large_x=True, **kwargs,
self,
flav_info: Optional[list] = None,
seed: int = 0,
large_x: bool = True,
**kwargs,
):
if flav_info is None:
raise ValueError(
"Trying to instantiate a preprocessing factor with no basis information"
)
self.flav_info = flav_info
self.seed = seed
self.output_dim = len(flav_info)
self.initializer = initializer
self.large_x = large_x
self.kernel = []
self.alphas = []
self.betas = []
super().__init__(**kwargs)

def generate_weight(self, weight_name, kind, dictionary, set_to_zero=False):
def generate_weight(self, name: str, kind: str, dictionary: dict, set_to_zero: bool = False):
"""
Generates weights according to the flavour dictionary and adds them
to the kernel list of the class
Generates weights according to the flavour dictionary

Parameters
----------
weight_name: str
name: str
name to be given to the generated weight
kind: str
where to find the limits of the weight in the dictionary
Expand All @@ -64,48 +68,58 @@ def generate_weight(self, weight_name, kind, dictionary, set_to_zero=False):
set_to_zero: bool
set the weight to constant 0
"""
weight_constraint = None
constraint = None
if set_to_zero:
initializer = MetaLayer.init_constant(0.0)
trainable = False
else:
limits = dictionary[kind]
ldo = limits[0]
lup = limits[1]
minval, maxval = dictionary[kind]
trainable = dictionary.get("trainable", True)
# Set the initializer and move the seed one up
initializer = MetaLayer.select_initializer(
self.initializer, minval=ldo, maxval=lup, seed=self.seed
"random_uniform", minval=minval, maxval=maxval, seed=self.seed
)
self.seed += 1
# If we are training, constrain the weights to be within the limits
if trainable:
weight_constraint = constraints.MinMaxWeight(ldo, lup)
constraint = constraints.MinMaxWeight(minval, maxval)

# Generate the new trainable (or not) parameter
newpar = self.builder_helper(
name=weight_name,
name=name,
kernel_shape=(1,),
initializer=initializer,
trainable=trainable,
constraint=weight_constraint,
constraint=constraint,
)
self.kernel.append(newpar)
return newpar

def build(self, input_shape):
# Run through the whole basis
for flav_dict in self.flav_info:
flav_name = flav_dict["fl"]
alpha_name = f"alpha_{flav_name}"
self.generate_weight(alpha_name, "smallx", flav_dict)
self.alphas.append(self.generate_weight(alpha_name, "smallx", flav_dict))
beta_name = f"beta_{flav_name}"
self.generate_weight(beta_name, "largex", flav_dict, set_to_zero=not self.large_x)
self.betas.append(
self.generate_weight(beta_name, "largex", flav_dict, set_to_zero=not self.large_x)
)

super(Preprocessing, self).build(input_shape)

def call(self, inputs, **kwargs):
x = inputs
pdf_list = []
for i in range(0, self.output_dim * 2, 2):
pdf_list.append(x ** (1 - self.kernel[i][0]) * (1 - x) ** self.kernel[i + 1][0])
return op.concatenate(pdf_list, axis=-1)
def call(self, x):
"""
Compute preprocessing prefactor.

Parameters
----------
x: tensor(shape=[1,N,1])

Returns
-------
prefactor: tensor(shape=[1,N,F])
"""
alphas = op.stack(self.alphas, axis=1)
betas = op.stack(self.betas, axis=1)

return x ** (1 - alphas) * (1 - x) ** betas
67 changes: 67 additions & 0 deletions n3fit/src/n3fit/tests/test_preprocessing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import numpy as np

from n3fit.layers import Preprocessing


def test_preprocessing():
"""Regression test"""
# taken from basic runcard
flav_info = [
{'fl': 'sng', 'smallx': [1.05, 1.19], 'largex': [1.47, 2.7], 'trainable': False},
{'fl': 'g', 'smallx': [0.94, 1.25], 'largex': [0.11, 5.87], 'trainable': False},
{'fl': 'v', 'smallx': [0.54, 0.75], 'largex': [1.15, 2.76], 'trainable': False},
{'fl': 'v3', 'smallx': [0.21, 0.57], 'largex': [1.35, 3.08]},
{'fl': 'v8', 'smallx': [0.52, 0.76], 'largex': [0.77, 3.56]},
{'fl': 't3', 'smallx': [-0.37, 1.52], 'largex': [1.74, 3.39]},
{'fl': 't8', 'smallx': [0.56, 1.29], 'largex': [1.45, 3.03]},
{'fl': 'cp', 'smallx': [0.12, 1.19], 'largex': [1.83, 6.7]},
]
prepro = Preprocessing(flav_info=flav_info, seed=1)
np.random.seed(42)
test_x = np.random.uniform(size=(1, 4, 1))
test_prefactors = [
[
[
3.7446213e-01,
1.9785003e-01,
2.7931085e-01,
2.0784079e-01,
4.5369801e-01,
2.7796263e-01,
5.4610312e-01,
2.4907256e-02,
],
[
6.2252983e-04,
3.0504008e-05,
4.5713778e-03,
1.0905267e-03,
4.0506415e-02,
5.9004971e-05,
4.5114113e-03,
2.6757403e-09,
],
[
4.1631009e-02,
1.0586979e-02,
8.3202787e-02,
4.3506064e-02,
2.2559988e-01,
1.5161950e-02,
1.0105091e-01,
1.4808348e-04,
],
[
1.1616933e-01,
4.2717375e-02,
1.5620175e-01,
9.7478621e-02,
3.2600221e-01,
5.8901049e-02,
2.1937098e-01,
1.8343410e-03,
],
]
]
prefactors = prepro(test_x)
np.testing.assert_allclose(test_prefactors, prefactors)