Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ option(ENABLE_TESTS "Enable unit test" OFF)
option(ENABLE_ASAN "Enable ASAN" OFF)
option(ENABLE_DEAD_STRIP "Enable use of flag `-dead_strip-dylibs`" OFF)
option(VP_DEV "validphys in developer mode" ON)
option(N3_DEV "n3fit in developer mode" ON)
set(PROFILE_PREFIX "" CACHE STRING "Where you store the 'data' folder. Default empty uses CMAKE_INSTALL_PREFIX/share/NNPDF.")

if (PROFILE_PREFIX)
Expand Down Expand Up @@ -143,9 +144,19 @@ add_subdirectory(libnnpdf)
# nnpdfcpp configuration
add_subdirectory(nnpdfcpp)

# evolven3fit
add_subdirectory(n3fit/evolven3fit)

# install validphys2
if(VP_DEV)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install -e ${PROJECT_SOURCE_DIR}/validphys2)")
else(VP_DEV)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-deps --ignore-installed ${PROJECT_SOURCE_DIR}/validphys2)")
endif(VP_DEV)

# install n3fit
if(N3_DEV)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install -e ${PROJECT_SOURCE_DIR}/n3fit)")
else(N3_DEV)
install(CODE "execute_process(COMMAND ${PYTHON_EXECUTABLE} -m pip install --no-deps --ignore-installed ${PROJECT_SOURCE_DIR}/n3fit)")
endif(N3_DEV)
2 changes: 1 addition & 1 deletion conda-recipe/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=${PREFIX} -DVP_DEV=OFF
cmake .. -DCMAKE_INSTALL_PREFIX=${PREFIX} -DVP_DEV=OFF -DN3_DEV=OFF
make -j${CPU_COUNT}
make install
4 changes: 4 additions & 0 deletions conda-recipe/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ requirements:
- python
- numpy
run:
- tensorflow
- keras
- hyperopt
- seaborn
- lhapdf
- sqlite
- gsl
Expand Down
35 changes: 35 additions & 0 deletions n3fit/evolven3fit/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Include files (should this information not be known at this point?)
include_directories(${PROJECT_SOURCE_DIR}/nnpdfcpp/src/common/inc)
include_directories(${PROJECT_SOURCE_DIR}/nnpdfcpp/src/nnfit/inc)
include_directories(${PROJECT_SOURCE_DIR}/n3fit/evolven3fit)
include_directories(${PROJECT_SOURCE_DIR}/libnnpdf/src/)
set(EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR}/binaries)

configure_file(
"${PROJECT_SOURCE_DIR}/libnnpdf/src/NNPDF/common.h.in"
"${PROJECT_SOURCE_DIR}/libnnpdf/src/NNPDF/common.h"
)

# To whom it may concern
# I am 100% sure there is a better way of doing all of it
# but I care 0% about it atm
set(NNPDF_LDFLAGS "-L${PROJECT_BINARY_DIR}/libnnpdf/ -lnnpdf")

# Add files to the make
add_executable(evolven3fit ${PROJECT_SOURCE_DIR}/n3fit/evolven3fit/evolven3fit.cc
${PROJECT_SOURCE_DIR}/nnpdfcpp/src/common/src/nnpdfsettings.cc
${PROJECT_SOURCE_DIR}/nnpdfcpp/src/common/src/md5.cc
${PROJECT_SOURCE_DIR}/nnpdfcpp/src/common/src/exportgrid.cc
${PROJECT_SOURCE_DIR}/nnpdfcpp/src/nnfit/src/evolgrid.cc )

# Set all flags
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${NNPDF_LDFLAGS} ${LHAPDF_LIBRARIES} ${GSL_LDFLAGS} ${APFEL_LIBRARIES} ${YAML_LDFLAGS}")

# I am pretty sure this should not be a thing
string(REPLACE ";" " " CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS}")
target_link_libraries(evolven3fit nnpdf ${LHAPDF_LIBRARIES} ${YAML_LDFLAGS} ${APFEL_LIBRARIES} ${GSL_LDFLAGS})

install(TARGETS evolven3fit DESTINATION bin
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ
GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)

106 changes: 106 additions & 0 deletions n3fit/evolven3fit/evolven3fit.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
// $Id$
//
// NNPDF++ 2016
//
// Authors: Nathan Hartland, n.p.hartland@ed.ac.uk
// Stefano Carrazza, stefano.carrazza@mi.infn.it

#include <string>
#include <iomanip>
#include "common.h"
#include "nnpdfsettings.h"
#include "exportgrid.h"
#include "evolgrid.h"
#include <sys/stat.h>
using namespace NNPDF;
using std::cout;
using std::endl;
using std::cerr;
using std::string;
using std::stoi;

// Check if folder exists
bool CheckConsistency(string const& folder, string const& exportfile)
{
bool status1 = false, status2 = false;
struct stat s, t;
if (stat(folder.c_str(), &s) == 0)
if (s.st_mode & S_IFDIR)
status1 = true;
if (stat(exportfile.c_str(), &t) == 0)
if (t.st_mode)
status2 = true;
if (status1 == true && status2 == true) return true;
else return false;
}

/**
* This program:
* - takes as input a fit folder and a theoryID,
* - loads a vector of ExportGrid for all replicas generated by nnfit,
* - computes the DGLAP evolution operators for the theoryID
* - applies the evolution operators to the ExportGrid objects
* - outputs the evolved PDFs in the LHAPDF format to the fit folder.
*/
int main(int argc, char **argv)
{
// Read configuration filename from arguments
if (argc != 3)
{
cerr << Colour::FG_RED << "\nusage: evolven3fit [configuration folder] [max_replicas]\n" << Colour::FG_DEFAULT << endl;
exit(EXIT_FAILURE);
}

const string fit_path = argv[1];
const int maxreplica = stoi(argv[2]);

// load settings from config folder
NNPDFSettings settings(fit_path);
const int theory_id = settings.Get("theory","theoryid").as<int>();

// load theory from db
std::map<string,string> theory_map;
NNPDF::IndexDB db(get_data_path() + "/theory.db", "theoryIndex");
db.ExtractMap(theory_id, APFEL::kValues, theory_map);

// load grids
vector<ExportGrid> initialscale_grids;
vector<int> replicas;
for (int nrep = 1; nrep <= maxreplica; nrep++)
{
const string folder = fit_path + "/nnfit/replica_" + std::to_string(nrep);
const string path = folder + "/" + settings.GetPDFName() + ".exportgrid";
bool status = CheckConsistency(folder, path);
if (status)
{
initialscale_grids.emplace_back(path);
replicas.push_back(nrep);
}
else
{
cout << "Skipping exportgrid (missing file): " << path << endl;
}
}

if (initialscale_grids.size() == 0)
throw NNPDF::RuntimeException("main", "nrep = 0, check replica folder/files.");

string infofile = fit_path + "/nnfit/" + settings.GetPDFName() + ".info";
auto dglapg = EvolveGrid(initialscale_grids, theory_map);
dglapg.WriteInfoFile(infofile);

const auto outstream = dglapg.WriteLHAFile();
for (size_t i = 0; i < outstream.size(); i++)
{
stringstream replica_file;
replica_file << fit_path
<< "/nnfit/replica_"
<< replicas[i]
<< "/"
<< settings.GetPDFName()
<< ".dat";
write_to_file(replica_file.str(), outstream[i].str());
}

return 0;
}
1 change: 1 addition & 0 deletions n3fit/evolven3fit/version.h
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
#define SVN_REV 3.1
150 changes: 150 additions & 0 deletions n3fit/runcards/Basic_runcard.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
#
# Configuration file for NNPDF++
#

############################################################
description: NNPDF3.1 NLO fitted charm global dataset

############################################################
# frac: training fraction
# ewk: apply ewk k-factors
# sys: systematics treatment (see systypes)
experiments:
- experiment: ALL
datasets:
- { dataset: SLACP, frac: 0.5}
- { dataset: NMCPD, frac: 0.5 }
- { dataset: CMSJETS11, frac: 0.5, sys: 10 }

############################################################
datacuts:
t0pdfset : NNPDF31_nlo_as_0118 # PDF set to generate t0 covmat
q2min : 3.49 # Q2 minimum
w2min : 12.5 # W2 minimum
combocuts : NNPDF31 # NNPDF3.0 final kin. cuts
jetptcut_tev : 0 # jet pt cut for tevatron
jetptcut_lhc : 0 # jet pt cut for lhc
wptcut_lhc : 30.0 # Minimum pT for W pT diff distributions
jetycut_tev : 1e30 # jet rap. cut for tevatron
jetycut_lhc : 1e30 # jet rap. cut for lhc
dymasscut_min: 0 # dy inv.mass. min cut
dymasscut_max: 1e30 # dy inv.mass. max cut
jetcfactcut : 1e30 # jet cfact. cut

############################################################
theory:
theoryid: 53 # database id

###########################################################
hyperscan:
stopping:
min_epochs: 5e2
max_epochs: 40e2
min_patience: 0.10
max_patience: 0.40
positivity:
min_multiplier: 1.00000001
max_multiplier: 1.00000002
min_initial:
max_initial:
optimizer:
names: 'ALL' # Use all implemented optimizers
min_lr: 0.0005
max_lr: 0.5
architecture:
initializers: 'ALL'
max_drop: 0.15
n_layers: [2,3,4]
min_units: 5
max_units: 50
activations: ['sigmoid', 'tanh']


############################################################
fitting:
genrep : True # on = generate MC replicas, False = use real data
trvlseed: 1
nnseed: 2
mcseed: 3
epochs: 900
# CHANGE THE FOLLOWING OPTIONS
save: False
savefile: 'weights.hd5'
load: False
loadfile: 'weights.hd5'
plot: False

parameters: # This defines the parameter dictionary that is passed to the Model Trainer
nodes_per_layer: [15, 10, 8]
activation_per_layer: ['sigmoid', 'sigmoid', 'linear']
initializer: 'glorot_normal'
learning_rate: 0.01
optimizer: 'RMSprop'
epochs: 900
pos_multiplier: 1.05
pos_initial: # believe the pos_lambda below
stopping_patience: 0.30 # percentage of the number of epochs
layer_type: 'dense'
dropout: 0.0

# NN23(QED) = sng=0,g=1,v=2,t3=3,ds=4,sp=5,sm=6,(pht=7)
# EVOL(QED) = sng=0,g=1,v=2,v3=3,v8=4,t3=5,t8=6,(pht=7)
# EVOLS(QED)= sng=0,g=1,v=2,v8=4,t3=4,t8=5,ds=6,(pht=7)
# FLVR(QED) = g=0, u=1, ubar=2, d=3, dbar=4, s=5, sbar=6, (pht=7)
fitbasis: NN31IC # EVOL (7), EVOLQED (8), etc.
basis:
# remeber to change the name of PDF accordingly with fitbasis
# pos: True for NN squared
# mutsize: mutation size
# mutprob: mutation probability
# smallx, largex: preprocessing ranges
- { fl: sng, pos: False, mutsize: [15], mutprob: [0.05], smallx: [1.05,1.19], largex: [1.47,2.70] }
- { fl: g, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.94,1.25], largex: [0.11,5.87] }
- { fl: v, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.54,0.75], largex: [1.15,2.76] }
- { fl: v3, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.21,0.57], largex: [1.35,3.08] }
- { fl: v8, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.52,0.76], largex: [0.77,3.56] }
- { fl: t3, pos: False, mutsize: [15], mutprob: [0.05], smallx: [-0.37,1.52], largex: [1.74,3.39] }
- { fl: t8, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.56,1.29], largex: [1.45,3.03] }
- { fl: cp, pos: False, mutsize: [15], mutprob: [0.05], smallx: [0.12,1.19], largex: [1.83,6.70] }

############################################################
stopping:
stopmethod: LOOKBACK # Stopping method
lbdelta : 0 # Delta for look-back stopping
mingen : 0 # Minimum number of generations
window : 500 # Window for moving average
minchi2 : 3.5 # Minimum chi2
minchi2exp: 6.0 # Minimum chi2 for experiments
nsmear : 200 # Smear for stopping
deltasm : 200 # Delta smear for stopping
rv : 2 # Ratio for validation stopping
rt : 0.5 # Ratio for training stopping
epsilon : 1e-6 # Gradient epsilon

############################################################
positivity:
posdatasets:
- { dataset: POSF2U, poslambda: 1e6 } # Positivity Lagrange Multiplier

############################################################
closuretest:
filterseed : 0 # Random seed to be used in filtering data partitions
fakedata : False # on = to use FAKEPDF to generate pseudo-data
fakepdf : MSTW2008nlo68cl # Theory input for pseudo-data
errorsize : 1.0 # uncertainties rescaling
fakenoise : False # on = to add random fluctuations to pseudo-data
rancutprob : 1.0 # Fraction of data to be included in the fit
rancutmethod: 0 # Method to select rancutprob data fraction
rancuttrnval: False # 0(1) to output training(valiation) chi2 in report
printpdf4gen: False # To print info on PDFs during minimization

############################################################
lhagrid:
nx : 150
xmin: 1e-9
xmed: 0.1
xmax: 1.0
nq : 50
qmax: 1e5

############################################################
Loading