From 2f91d15aa9e31a320c72f045cc38b003c2693bca Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 21 Nov 2016 09:30:29 +0000 Subject: [PATCH 01/90] change tauscan pltName if using unfolded --- dps/analysis/unfolding_tests/getBestTau.py | 320 ++++++++++++--------- 1 file changed, 188 insertions(+), 132 deletions(-) diff --git a/dps/analysis/unfolding_tests/getBestTau.py b/dps/analysis/unfolding_tests/getBestTau.py index 305b14dc..97381714 100644 --- a/dps/analysis/unfolding_tests/getBestTau.py +++ b/dps/analysis/unfolding_tests/getBestTau.py @@ -17,8 +17,9 @@ usage: python getBestTau.py config.json # for 13 TeV in the visible phase space : - python dps/analysis/unfolding_tests/getBestTau.py config/unfolding/VisiblePS/*.json -n 100 --refold_plots --test + python dps/analysis/unfolding_tests/getBestTau.py config/unfolding/VisiblePS/*.json -n 100 -t 0.005 --refold_plots --test -n = number of tau points + -t = specific tau value --refold_plots = output some comparison plots for every tau (suggest few tau) --test = runs the measured distribution as data. Should return P(Chi2|NDF) of 0 i.e. exact ''' @@ -36,7 +37,7 @@ from dps.utils.plotting import Histogram_properties from dps.config import CMS from dps.config.latex_labels import variables_latex -from ROOT import TUnfoldDensity, TUnfold, TCanvas, TPad, TMath, gROOT, TRandom3 +from ROOT import TUnfoldDensity, TUnfold, TCanvas, TPad, TLegend, TMath, gROOT, TRandom3 from dps.config.variable_binning import reco_bin_edges_vis # , gen_bin_edges_vis @@ -84,10 +85,11 @@ def __init__( self, input_values ): def __set_unfolding_histograms__( self ): # at the moment only one file is supported for the unfolding input - files = set( [self.truth['file'], - self.gen_vs_reco['file'], - self.measured['file']] - ) + files = set( + [self.truth['file'], + self.gen_vs_reco['file'], + self.measured['file']] + ) if len( files ) > 1: print "Currently not supported to have different files for truth, gen_vs_reco and measured" sys.exit() @@ -141,51 +143,69 @@ def main(): clear_old_df('tables/taufinding/') for input_values, json_file in zip( input_values_sets, json_input_files ): - # print '\nProcessing', json_file + # Initialise the TauFinding class regularisation_settings = TauFinding( input_values ) - # Set additional elemtents - regularisation_settings.taus_to_test = get_tau_list(args.n_ticks_in_log) - variable = regularisation_settings.variable channel = regularisation_settings.channel com = regularisation_settings.centre_of_mass_energy - if 'muon' not in channel : continue + + # Specific channel or variable + if args.ch: + if args.ch not in channel: continue + if args.var: + if args.var not in variable: continue + + print 'Running for:' print 'Variable = {0}, channel = {1}, sqrt(s) = {2}'.format(variable, channel, com) + # Set additional elements + regularisation_settings.taus_to_test = get_tau_values(args.n_tau_in_log) + isTauCalculator = True + + # Specific unfolding tests go here + if args.specific_tau is not None: + regularisation_settings.taus_to_test = [args.specific_tau] + df_chi2_specific_tau = get_chi2(regularisation_settings, args) + isTauCalculator = False + if args.run_measured_as_data: regularisation_settings.taus_to_test = [0] regularisation_settings.h_data = regularisation_settings.h_measured - df_chi2 = get_chi2s_of_tau_range(regularisation_settings, args) - - if args.perform_varied_measured_unfolding_test: - h_data = hist_to_value_error_tuplelist(regularisation_settings.h_data) - h_data_varied = [(return_rnd_Poisson(val),return_rnd_Poisson(err)) for val, err in h_data ] - h_data_varied = value_error_tuplelist_to_hist(h_data_varied, reco_bin_edges_vis[variable]) - regularisation_settings.h_data = h_data_varied - df_chi2_smeared = get_chi2s_of_tau_range(regularisation_settings, args, unfold_test=True) - print df_chi2_smeared - # No point in trying to find best tau if it is given as 0... - sys.exit() + df_chi2_measured = get_chi2(regularisation_settings, args) + isTauCalculator = False + + if args.run_smeared_measured_as_data: + regularisation_settings.taus_to_test = [0] + regularisation_settings.h_data = regularisation_settings.h_measured + h_data = hist_to_value_error_tuplelist(regularisation_settings.h_data) + h_data_varied = [(return_rnd_Poisson(val),return_rnd_Poisson(err)) for val, err in h_data ] + h_data_varied = value_error_tuplelist_to_hist(h_data_varied, reco_bin_edges_vis[variable]) + regularisation_settings.h_data = h_data_varied + df_chi2_smeared = get_chi2(regularisation_settings, args, smearing_test=True) + isTauCalculator = False - # Find the corresponding Chi2 and write to file - df_chi2 = get_chi2s_of_tau_range(regularisation_settings, args) + # Dont need to calculate tau for given tests + if not isTauCalculator: sys.exit() + + # Find Chi2 for each tau and write to file + df_chi2 = get_chi2(regularisation_settings, args) print df_chi2 - # Have the dataframes now - albeit read to a file - # Read in each one corresponding to their channel - # Find the best tau and print to screen - for channel in ['electron', 'muon', 'combined']: - chi2_cut = 0.005 - path = regularisation_settings.outpath+'tbl_'+channel+'_tauscan.txt' - df_chi2 = get_df_from_file(path) - if df_chi2 is None: continue - print '\n', "1 - P(Chi2|NDF)", '\n', df_chi2, '\n' - - # cutoff to be changed to 0.001 when able to - best_taus = interpolate_tau(chi2_cut, df_chi2) - chi2_to_plots(df_chi2, regularisation_settings, chi2_cut, channel) - print_results_to_screen(best_taus, channel) + # Have the dataframes now - albeit read to a file + # Read in each one corresponding to their channel + # Find the best tau and print to screen + for channel in ['electron', 'muon', 'combined']: + chi2_cut = 0.005 + path = regularisation_settings.outpath+'tbl_'+channel+'_tauscan.txt' + df_chi2 = get_df_from_file(path) + if df_chi2 is None: continue + print '\n', "1 - P(Chi2|NDF)", '\n', df_chi2, '\n' + + # cutoff to be changed to 0.001 when able to + best_taus = interpolate_tau(chi2_cut, df_chi2) + chi2_to_plots(args, df_chi2, regularisation_settings, chi2_cut, channel) + print_results_to_screen(best_taus, channel) return @@ -197,27 +217,40 @@ def parse_options(): parser.add_argument("in_files", nargs='*', help="List of the input files") - parser.add_argument( "-t", "--test", + parser.add_argument( "--measured_test", dest = "run_measured_as_data", action = "store_true", - help = "For debugging - run the measured distribution as data." ) - parser.add_argument( "-v", "--vary_measured_test", - dest = "perform_varied_measured_unfolding_test", + help = "For debugging. Run the measured distribution as data." ) + parser.add_argument( "--smeared_test", + dest = "run_smeared_measured_as_data", action = "store_true", - help = "Unfolding test. Vary measured vals by Poisson then find ChiSq" ) + help = "Test. Run (poisson) smeared measured distribution as data" ) parser.add_argument( "-p", "--refold_plots", - dest = "run_refold_plots", + dest = "create_refold_plots", action = "store_true", - help = "For debugging - output unfolded vs refolded for each tau" ) - parser.add_argument( "-n", "--n_ticks_in_log", - dest = "n_ticks_in_log", + help = "Plot. Produce unfolded vs refolded plot for each tau run" ) + parser.add_argument( "-n", "--n_tau_in_log", + dest = "n_tau_in_log", default = 10, type = int, help = "How many taus in the range do you want" ) + parser.add_argument( "-t", "--tau", + dest = "specific_tau", + default = None, + type = float, + help = "How many taus in the range do you want" ) parser.add_argument( "-u", "--unfolded_binning", dest = "unfolded_binning", action = "store_true", help = "Run the tau scans for unfolded (gen) binning" ) + parser.add_argument( "-c", "--channel", + dest = "ch", + default = "", + help = "Which channel to run over" ) + parser.add_argument( "-v", "--variable", + dest = "var", + default = "", + help = "Which varibale to run over" ) args = parser.parse_args() if args.unfolded_binning: @@ -238,37 +271,36 @@ def clear_old_df(path): ''' Delete any previous dataframe. (Code would append a new dataframe to file instead of replace) ''' - for root, dirs, files in os.walk(path, topdown=False): for name in files: os.remove(os.path.join(root, name)) return -def get_tau_list(logSpacing, logMin = log10(pow(10,-16)), logMax = log10(1)): +def get_tau_values(logSpacing, logMin = log10(pow(10,-16)), logMax = log10(1)): ''' - Large scanning range from unity to 10^-8. Split into equal points based on log system + Large scanning range from 1 to 10^-16. Split into equal points based on log system given the number of tau points to scan over. ''' - taus = [] r = int(logMax - logMin) - tau_test_range = [10**(logMax - i/float(logSpacing)) for i in range(r*logSpacing)] - return tau_test_range + tau_values = [10**(logMax - i/float(logSpacing)) for i in range(r*logSpacing)] + return tau_values -def get_chi2s_of_tau_range( regularisation_settings, args, unfold_test=False ): +def get_chi2( regularisation_settings, args, smearing_test=False ): ''' Takes each tau value, unfolds and refolds, calcs the chi2, the prob of chi2 given ndf (n_bins) and returns a dictionary of (1-P(Chi2|NDF)) for each tau For measured test where we only worry about tau=0 outputs tau variables to data frame (+smeared measured values) ''' h_truth, h_response, h_measured, h_data, h_fakes = regularisation_settings.get_histograms() - if not args.run_measured_as_data : + + # Dont remove any fakes if we are using the true mc distribution + if not args.run_measured_as_data or not args.run_smeared_measured_as_data: h_data = removeFakes( h_measured, h_fakes, h_data ) variable = regularisation_settings.variable taus = regularisation_settings.taus_to_test chi2_ndf = [] for tau in taus: - unfolding = Unfolding( h_data, h_truth, @@ -278,6 +310,7 @@ def get_chi2s_of_tau_range( regularisation_settings, args, unfold_test=False ): method = 'TUnfold', tau = tau ) + # Cannot refold without first unfolding h_unfolded_data = unfolding.unfold() h_refolded_data = unfolding.refold() @@ -292,27 +325,28 @@ def get_chi2s_of_tau_range( regularisation_settings, args, unfold_test=False ): regularisation_settings.h_refolded = h_refolded_data ndf = regularisation_settings.ndf - if args.run_refold_plots: - plot_data_vs_refold(args, regularisation_settings, tau) if args.unfolded_binning: unfolding.refolded_data = h_refolded_data.rebinned(2) unfolding.data = h_data.rebinned(2) ndf = int(regularisation_settings.ndf / 2) - # print("Data") - # print (hist_to_value_error_tuplelist(regularisation_settings.h_data)) - # print("Refolded Data") - # print (hist_to_value_error_tuplelist(regularisation_settings.h_refolded)) + regularisation_settings.h_refolded = unfolding.refolded_data + regularisation_settings.h_data = unfolding.data + if args.create_refold_plots: + plot_data_vs_refold(args, regularisation_settings, tau) + # Calculate the chi2 between refold and unfold chi2 = unfolding.getUnfoldRefoldChi2() + # Calculate the Prob chi2 given NDF prob = TMath.Prob( chi2, ndf ) + # 1-P(Chi2|NDF) chi2_ndf.append(1-prob) # print( tau, chi2, prob, 1-prob ) - # Create pandas dictionary + # Create tau and Chi2 dictionary d_chi2 = {variable : pd.Series( chi2_ndf )} d_taus = {'tau' : pd.Series( taus )} - if unfold_test: + if smearing_test: d_tau_vars = { variable : { 'Tau' : tau, @@ -383,8 +417,6 @@ def chi2_to_df(chi2, taus, regularisation_settings, appendage=''): # return the new df return df_new - - def get_df_from_file(p): ''' Get the dataframe from the file @@ -399,11 +431,10 @@ def get_df_from_file(p): print "Cannot find path : ", p return df -def chi2_to_plots(df_chi2, regularisation_settings, chi2_cut, channel): +def chi2_to_plots(args,df_chi2, regularisation_settings, chi2_cut, channel): ''' Plot chi2 figures ''' - # variable = regularisation_settings.variable plot_outpath = regularisation_settings.outpath.replace('tables/', 'plots/') + 'tauscan/' make_folder_if_not_exists(plot_outpath) @@ -414,107 +445,89 @@ def chi2_to_plots(df_chi2, regularisation_settings, chi2_cut, channel): for var in df_chi2.columns: if var == 'tau': continue + # Plot tau distributions for each variable plt.loglog( df_chi2['tau'], df_chi2[var], label = variables_latex[var], ) + # Plot current chi2 cutoff value plt.axhline(y=chi2_cut, color='black', linestyle='dashed') + + # Plot legend handles, labels = ax1.get_legend_handles_labels() ax1.legend(handles, labels, loc=4) + + # Plot axis titles ax1.set_xlabel('Regularisation Parameter \ensuremath{\\tau}') ax1.set_ylabel('\ensuremath{1-P(\\chi^{2}|NDF)}') + # Save plot pltName = os.path.join(plot_outpath,'{channel}_all_tauscan.pdf'.format(channel = channel)) + if args.unfolded_binning: + pltName = pltName.replace('.pdf', '_unf_binning.pdf') fig1.savefig(pltName) - print "Written plots to {plot_outpath}{channel}_all_tauscan.pdf".format(plot_outpath = plot_outpath, channel = channel) - + print "Written plots to {plot_outpath}{pltName}".format(plot_outpath = plot_outpath, pltName = pltName) return -def interpolate_tau(cutoff, df_chi2): - ''' - Interpolate to get best tau from tau scan - 1e-8 < tau < 1 - n < i < 0 - - chisq_lo chisq cutoff chisq_hi - |------------|-------------------------| - a b - Find ratio a/(a+b) - Interpolate to find best tau - tau = tau_lo + ratio * (tau_hi - tau_lo) - - | - \|/ - |--------------------------------------| - tau_lo best tau tau_hi - ''' - best_tau = {} - for variable in df_chi2.columns: - if variable == 'tau': continue - - i=0 - for chisq in df_chi2[variable]: - if chisq > cutoff: - i+=1 - continue - else: - break - if chisq > cutoff: - print "{var} exceeds required cut".format(var=variable) - # last i becomes out of range - best_tau[variable] = df_chi2['tau'][i-1] - else: - chisq_lo = df_chi2[variable][i+1] - chisq_hi = df_chi2[variable][i] - ratio = (cutoff - chisq_lo) / (chisq_hi - chisq_lo) - tau_lo = df_chi2['tau'][i+1] - tau_hi = df_chi2['tau'][i] - tau = tau_lo + ratio*(tau_hi - tau_lo) - best_tau[variable] = tau - return best_tau - - def plot_data_vs_refold(args, regularisation_settings, tau): ''' Plot the differences between the unfolded and refolded distributions TODO Include also with best tau - redo unfolding with best tau then come here ''' - tau = str(tau).replace('.', 'p') - # data = hist_to_value_error_tuplelist(regularisation_settings.h_data) - # measured = hist_to_value_error_tuplelist(regularisation_settings.h_measured) + from ROOT import gStyle + variable = regularisation_settings.variable channel = regularisation_settings.channel - plot_outpath = regularisation_settings.outpath.replace('tables/', 'plots/')+variable+'/' + plot_outpath = regularisation_settings.outpath.replace('tables/', 'plots/')+'tauscan/taus/' make_folder_if_not_exists(plot_outpath) - outfile = plot_outpath+channel+'_unfold_refold_test_tau_'+tau+'.pdf' + + # tau as string name for output + tau = str(tau).replace('.', 'p') + + outfile = plot_outpath+'data_vs_refold_'+channel+'_'+variable+'_tau_'+tau+'.pdf' if args.run_measured_as_data: - outfile = plot_outpath+channel+'_run_measured_as_data_tau_'+tau+'.pdf' + outfile = plot_outpath+'measured_vs_refold_'+channel+'_'+variable+'_tau_'+tau+'.pdf' + if args.run_smeared_measured_as_data: + outfile = plot_outpath+'smeared_vs_refold_'+channel+'_'+variable+'_tau_'+tau+'.pdf' + if args.unfolded_binning: + outfile = outfile.replace('.pdf', '_unf_binning.pdf') + + c = TCanvas('c1','c1',1000,800) + gStyle.SetOptStat(0) - c = TCanvas('c1','c1',600,400) - c.SetFillColor(2); p1 = TPad("pad1", "p1",0.0,0.2,1.0,1.0,21) - p2 = TPad("pad2", "p2",0.0,0.0,1.0,0.2,22) p1.SetFillColor(0); - p2.SetFillColor(0); p1.Draw() + p2 = TPad("pad2", "p2",0.0,0.0,1.0,0.2,22) + p2.SetFillColor(0); p2.Draw() + p1.cd() - regularisation_settings.h_refolded.SetMarkerStyle(10); - regularisation_settings.h_refolded.SetMarkerColor(4); - # regularisation_settings.h_refolded.SetMarkerSize(10); - regularisation_settings.h_refolded.Draw() - regularisation_settings.h_data.SetFillColor(3); - regularisation_settings.h_data.Draw("hist same"); + regularisation_settings.h_data.SetTitle("Data vs Refolded Data;;NEvents") + regularisation_settings.h_data.Draw() + + regularisation_settings.h_refolded.SetLineColor(2) + regularisation_settings.h_refolded.Draw("same") + + leg1 = TLegend(0.7, 0.8, 0.9, 0.9) + leg1.SetLineColor(0) + leg1.SetFillColor(0) + leg1.AddEntry(regularisation_settings.h_data, "Data") + leg1.AddEntry(regularisation_settings.h_refolded, "Refolded Data") + leg1.Draw() p2.cd() h_ratio = regularisation_settings.h_data.Clone() h_ratio.Divide(regularisation_settings.h_refolded) - h_ratio.SetMarkerSize(0.1); + h_ratio.SetTitle(";"+variable+";") + h_ratio.SetLineColor(1); h_ratio.Draw() + c.SaveAs(outfile) c.Delete() + print "Written plots to {outfile}".format(outfile = outfile) return def print_results_to_screen(best_taus, channel): @@ -527,7 +540,6 @@ def print_results_to_screen(best_taus, channel): print '"{0}" : {1},'.format(variable, tau) return - def return_rnd_Poisson(mu): ''' Returning a random poisson number @@ -545,6 +557,50 @@ def return_rnd_Poisson(mu): poisson = gRandom.Poisson rnd_po = poisson( mu ) return rnd_po + +def interpolate_tau(cutoff, df_chi2): + ''' + Interpolate to get best tau from tau scan + 1e-8 < tau < 1 + n < i < 0 + + chisq_lo chisq cutoff chisq_hi + |------------|-------------------------| + a b + Find ratio a/(a+b) + Interpolate to find best tau + tau = tau_lo + ratio * (tau_hi - tau_lo) + + | + \|/ + |--------------------------------------| + tau_lo best tau tau_hi + ''' + best_tau = {} + for variable in df_chi2.columns: + if variable == 'tau': continue + + i=0 + for chisq in df_chi2[variable]: + if chisq > cutoff: + i+=1 + continue + else: + break + if chisq > cutoff: + print "{var} exceeds required cut".format(var=variable) + # last i becomes out of range + best_tau[variable] = df_chi2['tau'][i-1] + else: + chisq_lo = df_chi2[variable][i+1] + chisq_hi = df_chi2[variable][i] + ratio = (cutoff - chisq_lo) / (chisq_hi - chisq_lo) + tau_lo = df_chi2['tau'][i+1] + tau_hi = df_chi2['tau'][i] + tau = tau_lo + ratio*(tau_hi - tau_lo) + best_tau[variable] = tau + return best_tau + if __name__ == '__main__': set_root_defaults( set_batch = True, msg_ignore_level = 3001 ) main() From 6006c0b9efc1d67b34c2db800f3908b4bc765705 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 22 Nov 2016 09:36:01 +0000 Subject: [PATCH 02/90] Break properly if tau does not need to be calc --- dps/analysis/unfolding_tests/getBestTau.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/dps/analysis/unfolding_tests/getBestTau.py b/dps/analysis/unfolding_tests/getBestTau.py index 97381714..62fd4324 100644 --- a/dps/analysis/unfolding_tests/getBestTau.py +++ b/dps/analysis/unfolding_tests/getBestTau.py @@ -185,12 +185,14 @@ def main(): df_chi2_smeared = get_chi2(regularisation_settings, args, smearing_test=True) isTauCalculator = False - # Dont need to calculate tau for given tests - if not isTauCalculator: sys.exit() + # Dont need to calculate chi2 for given tau tests + if not isTauCalculator: continue # Find Chi2 for each tau and write to file df_chi2 = get_chi2(regularisation_settings, args) - print df_chi2 + + # Dont need to calculate tau for given tests + if not isTauCalculator: sys.exit() # Have the dataframes now - albeit read to a file # Read in each one corresponding to their channel @@ -245,11 +247,13 @@ def parse_options(): help = "Run the tau scans for unfolded (gen) binning" ) parser.add_argument( "-c", "--channel", dest = "ch", - default = "", + default = None, + type = str, help = "Which channel to run over" ) parser.add_argument( "-v", "--variable", dest = "var", - default = "", + default = None, + type = str, help = "Which varibale to run over" ) args = parser.parse_args() @@ -553,7 +557,6 @@ def return_rnd_Poisson(mu): gRandom = TRandom3() gRandom.SetSeed(0) # Cache for quicker running - landau = gRandom.Landau poisson = gRandom.Poisson rnd_po = poisson( mu ) return rnd_po From 68fa1362291fbfa0a90f1321d6dd7b0efb881392 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 4 Nov 2016 09:11:59 +0000 Subject: [PATCH 03/90] add printouts --- .../DougsBTagEff/makeBTagEfficiencies.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py index 5c8a5d67..a026d47c 100644 --- a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py +++ b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py @@ -27,8 +27,8 @@ 0 : ["TTJets_PowhegPythia8_tree.root" , "PowhegPythia8"], 1 : ["TTJets_powhegHerwigpp_tree.root" , "PowhegHerwigpp"], 2 : ["TTJets_amc_tree.root" , "aMCatNLOPythia8"], - 3 : ["TTJets_amcatnloHerwigpp_tree.root" , "aMCatNLOHerwigpp"], - 4 : ["TTJets_madgraph_tree.root" , "Madgraph"], + 3 : ["TTJets_madgraph_tree.root" , "Madgraph"], + # 4 : ["TTJets_amcatnloHerwigpp_tree.root" , "aMCatNLOHerwigpp"], } partonHists = [ @@ -114,8 +114,20 @@ if (NJets == 0): continue; for JetIndex in range (0,int(NJets)): - - if (pt[JetIndex] < 25): continue; + if (pt[JetIndex] < 30): continue; + if (abs(eta[JetIndex]) > 2.4): continue; + # print "- "*10 + # print 'leptonweight chosen ' , leptonWeight + # print 'electron wieght ' ,event.__getattr__("ElectronEfficiencyCorrection") + # print 'muon weight ' ,event.__getattr__("MuonEfficiencyCorrection") + # print "NJets ", NJets + # print "Weight ", weight + # print "eventWeight ", eventWeight + # print "puWeight ", puWeight + # print "lepWeight ", leptonWeight + # print "HadronFlavour ", hadronFlavour[JetIndex] + # print "is Med BTagged", isMedium[JetIndex] + # print "Pt jet", pt[JetIndex] if (hadronFlavour[JetIndex] == 5): bQuarkJets_Total_Hist[sample].Fill(pt[JetIndex], eta[JetIndex], weight) From 8067a15b552fe513927f39c332f1eabc33b61a4a Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 7 Nov 2016 09:16:42 +0000 Subject: [PATCH 04/90] add TTBarMor17 samnple --- dps/experimental/DougsBTagEff/makeBTagEfficiencies.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py index a026d47c..a237eb28 100644 --- a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py +++ b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py @@ -13,21 +13,22 @@ gStyle.SetOptStat("") parser = OptionParser() - parser.add_option("-t", "--test", dest="test", default=False, + parser.add_option("-t", "--test", dest="test", action = "store_true", help="Run over a few events only") - parser.add_option("-p", "--plots", dest="make_plots", default=True, + parser.add_option("-p", "--plots", dest="make_plots", action = "store_true", help="Print out files to .png") - parser.add_option("-o", "--only_plots", dest="only_plots", default=False, + parser.add_option("-o", "--only_plots", dest="only_plots", action = "store_true", help="Print out files to .png") (options, args) = parser.parse_args() if options.test : print "RUNNING OVER TEST SAMPLE" - basepath = "/hdfs/TopQuarkGroup/run2/atOutput/13TeV/25ns/20_05_16/" + basepath = "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/" input_files = { 0 : ["TTJets_PowhegPythia8_tree.root" , "PowhegPythia8"], 1 : ["TTJets_powhegHerwigpp_tree.root" , "PowhegHerwigpp"], 2 : ["TTJets_amc_tree.root" , "aMCatNLOPythia8"], 3 : ["TTJets_madgraph_tree.root" , "Madgraph"], + 4 : ["TTJets_PowhegPythia8_Moriond17_tree.root" , "PowhegPythia8_Moriond17"] # 4 : ["TTJets_amcatnloHerwigpp_tree.root" , "aMCatNLOHerwigpp"], } @@ -60,6 +61,8 @@ in_file = input_files[key][0] sample = input_files[key][1] input_file = basepath+in_file + if key == 4: + input_file = "/hdfs/TopQuarkGroup/run2/atOutput/13TeV/2016/TTJets_PowhegPythia8_Moriond17_tree.root" print "Generator : ", sample From a54e7b45384b676c637da0f18f2efa622ef23041 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 14 Oct 2016 08:49:11 +0100 Subject: [PATCH 05/90] Remove FractionFitter/SimultaneousFit methods, move 01_fit_results to legacy --- .../xsection/01_get_ttjet_normalisation.py | 202 ++++------- .../xsection/01_get_fit_results.py | 0 .../xsection/01_get_ttjet_normalisation.py | 333 ++++++++++++++++++ 3 files changed, 403 insertions(+), 132 deletions(-) rename dps/{analysis => legacy}/xsection/01_get_fit_results.py (100%) create mode 100644 dps/legacy/xsection/01_get_ttjet_normalisation.py diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index 38685280..226c916c 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -4,13 +4,13 @@ by subtracting backgrounds from data. Usage: - python src/cross_section_measurement/01_get_ttjet_normalisation.py \ + python dps/analysis/xsection/01_get_ttjet_normalisation.py \ -c -v -i \ -p Example: - python src/cross_section_measurement/01_get_ttjet_normalisation.py \ - -c 8 -v MET -i config/measurements/background_subtraction/ + python dps/analysis/xsection/01_get_ttjet_normalisation.py \ + -c 13 -v MET -i config/measurements/background_subtraction/ TODO: In the end this and 01_get_fit_results.py should be merged. All should come down to the function to extract the # events from TTJet @@ -20,11 +20,11 @@ from dps.utils.logger import log from dps.config.xsection import XSectionConfig from dps.analysis.xsection.lib import closure_tests -from dps.utils.file_utilities import write_data_to_JSON +from dps.utils.file_utilities import write_data_to_JSON, get_files_in_path from dps.utils.hist_utilities import clean_control_region, \ hist_to_value_error_tuplelist, fix_overflow -import glob +import os from copy import deepcopy from dps.utils.Calculation import combine_complex_results from dps.utils.measurement import Measurement @@ -35,59 +35,43 @@ class TTJetNormalisation(object): - - ''' - Determines the normalisation for top quark pair production based on - different methods. Unless stated otherwise all templates and - (initial) normalisations are taken from simulation, except for QCD - where the template is extracted from data. - - Supported methods: - BACKGROUND_SUBTRACTION: - Subtracts the known backgrounds from data to obtain TTJet template - and normalisation - SIMULTANEOUS_FIT: - Uses Minuit and several fit variables (quotation needed) to perform - a simultaneous fit (does not use statistical errors of templates). - FRACTION_FITTER: - Uses the TFractionFitter class to fit the TTJet normalisation ''' + Determines the normalisation for top quark pair production. + Unless stated otherwise all templates and (initial) normalisations + are taken from simulation, except for QCD where the template is + extracted from data. - BACKGROUND_SUBTRACTION = 10 - SIMULTANEOUS_FIT = 20 - FRACTION_FITTER = 30 + Subtracts the known backgrounds from data to obtain TTJet template + and normalisation + ''' @mylog.trace() def __init__(self, config, measurement, - method=BACKGROUND_SUBTRACTION, phase_space='FullPS'): self.config = config self.variable = measurement.variable self.category = measurement.name self.channel = measurement.channel - self.method = method self.phase_space = phase_space self.measurement = measurement self.measurement.read() - self.met_type = measurement.met_type - self.fit_variables = ['M3'] - self.normalisation = {} self.initial_normalisation = {} - self.templates = {} + # self.unity_normalisation = {} + self.auxiliary_info = {} self.have_normalisation = False - for sample, hist in self.measurement.histograms.items(): - h = deepcopy(hist) - h_norm = h.integral() - if h_norm > 0: - h.Scale(1 / h.integral()) - self.templates[sample] = hist_to_value_error_tuplelist(h) - self.auxiliary_info = {} + # for sample, hist in self.measurement.histograms.items(): + # h = deepcopy(hist) + # h_norm = h.integral() + # if h_norm > 0: + # h.Scale(1 / h.integral()) + # self.unity_normalisation[sample] = hist_to_value_error_tuplelist(h) + self.auxiliary_info['norms'] = measurement.aux_info_norms @mylog.trace() @@ -96,7 +80,7 @@ def calculate_normalisation(self): 1. get file names 2. get histograms from files 3. ??? - 4. calculate normalisation based on self.method + 4. calculate normalisation ''' if self.have_normalisation: return @@ -106,15 +90,10 @@ def calculate_normalisation(self): # TODO: this should be a list of bin-contents hist = fix_overflow(hist) histograms[sample] = hist - self.initial_normalisation[ - sample] = hist_to_value_error_tuplelist(hist) - if self.method == self.BACKGROUND_SUBTRACTION and sample != 'TTJet': - self.normalisation[sample] = self.initial_normalisation[sample] + self.initial_normalisation[sample] = hist_to_value_error_tuplelist(hist) + self.normalisation[sample] = self.initial_normalisation[sample] - if self.method == self.BACKGROUND_SUBTRACTION: - self.background_subtraction(histograms) - if self.method == self.SIMULTANEOUS_FIT: - self.simultaneous_fit(histograms) + self.background_subtraction(histograms) # next, let's round all numbers (they are event numbers after all for sample, values in self.normalisation.items(): @@ -123,83 +102,48 @@ def calculate_normalisation(self): self.have_normalisation = True - def background_subtraction(self, histograms): - ttjet_hist = clean_control_region(histograms, - subtract=['QCD', 'V+Jets', 'SingleTop']) - self.normalisation[ - 'TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - @mylog.trace() - def simultaneous_fit(self, histograms): - from dps.utils.Fitting import FitData, FitDataCollection, Minuit - print('not in production yet') - fitter = None - fit_data_collection = FitDataCollection() - for fit_variable in self.fit_variables: - mc_histograms = { - 'TTJet': histograms['TTJet'], - 'SingleTop': histograms['SingleTop'], - 'V+Jets': histograms['V+Jets'], - 'QCD': histograms['QCD'], - } - h_data = histograms['data'] - fit_data = FitData(h_data, mc_histograms, - fit_boundaries=self.config.fit_boundaries[fit_variable]) - fit_data_collection.add(fit_data, name=fit_variable) - fitter = Minuit(fit_data_collection) - fitter.fit() - fit_results = fitter.readResults() - - normalisation = fit_data_collection.mc_normalisation( - self.fit_variables[0]) - normalisation_errors = fit_data_collection.mc_normalisation_errors( - self.fit_variables[0]) - print normalisation, normalisation_errors + def background_subtraction(self, histograms): + ttjet_hist = clean_control_region( + histograms, + subtract=['QCD', 'V+Jets', 'SingleTop'] + ) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) @mylog.trace() def save(self, output_path): if not self.have_normalisation: self.calculate_normalisation() - folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/' - folder_template += '{phase_space}/{category}/' - inputs = { - 'path': output_path, - 'CoM': self.config.centre_of_mass_energy, - 'variable': self.variable, - 'category': self.category, - 'method': self.method_string(), - 'phase_space': self.phase_space, - } - output_folder = folder_template.format(**inputs) - - file_template = '{type}_{channel}_{met_type}.txt' - inputs = { - 'channel': self.channel, - 'met_type': self.met_type, - } - write_data_to_JSON(self.normalisation, - output_folder + file_template.format(type='normalisation', **inputs)) - write_data_to_JSON(self.initial_normalisation, - output_folder + file_template.format(type='initial_normalisation', **inputs)) - write_data_to_JSON(self.templates, - output_folder + file_template.format(type='templates', **inputs)) - write_data_to_JSON(self.auxiliary_info, - output_folder + file_template.format(type='auxiliary_info', **inputs)) - + file_template = '{type}_{channel}.txt' + folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/{phase_space}/{category}/' + output_folder = folder_template.format( + path = output_path, + CoM = self.config.centre_of_mass_energy, + variable = self.variable, + category = self.category, + method = 'background_subtraction', + phase_space = self.phase_space, + ) + + write_data_to_JSON( + self.normalisation, + output_folder + file_template.format(type='normalisation', channel=self.channel) + ) + write_data_to_JSON( + self.initial_normalisation, + output_folder + file_template.format(type='initial_normalisation', channel=self.channel) + ) + # write_data_to_JSON( + # self.unity_normalisation, + # output_folder + file_template.format(type='unity_normalisation', channel=self.channel) + # ) + write_data_to_JSON( + self.auxiliary_info, + output_folder + file_template.format(type='auxiliary_info', channel=self.channel) + ) return output_folder - @mylog.trace() - def method_string(self): - if self.method == self.BACKGROUND_SUBTRACTION: - return 'background_subtraction' - if self.method == self.SIMULTANEOUS_FIT: - return 'simultaneous_fit_' + '_'.join(self.fit_variables) - if self.method == self.FRACTION_FITTER: - return 'fraction_fitter' - - return 'unknown_method' - @mylog.trace() def combine(self, other): if not self.have_normalisation or not other.have_normalisation: @@ -211,8 +155,8 @@ def combine(self, other): self.normalisation, other.normalisation) self.initial_normalisation = combine_complex_results( self.initial_normalisation, other.initial_normalisation) - self.templates = combine_complex_results( - self.templates, other.templates) + # self.unity_normalisation = combine_complex_results( + # self.unity_normalisation, other.unity_normalisation) self.channel = 'combined' @@ -247,29 +191,25 @@ def parse_options(): return options, args - @mylog.trace() def main(): # construct categories from files: - input_template = options.input + '{energy}TeV/{channel}/{variable}/{phase_space}/*.json' - categories = ['QCD_shape'] - categories.extend(measurement_config.categories_and_prefixes.keys()) - categories.extend(measurement_config.rate_changing_systematics_names) - categories.extend([measurement_config.vjets_theory_systematic_prefix + - systematic for systematic in measurement_config.generator_systematics]) + input_template = options.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' phase_space = 'FullPS' if options.visiblePS: phase_space = 'VisiblePS' results = {} + for channel in ['electron', 'muon']: - inputs = { - 'energy': options.CoM, - 'channel': channel, - 'variable': variable, - 'phase_space': phase_space, - } - measurement_files = glob.glob(input_template.format(**inputs)) + measurement_filepath = input_template.format( + energy = options.CoM, + channel = channel, + variable = variable, + phase_space = phase_space, + ) + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + for f in sorted(measurement_files): if options.test and not 'central' in f : continue print('Processing file ' + f) @@ -278,7 +218,6 @@ def main(): norm = TTJetNormalisation( config=measurement_config, measurement=measurement, - method=TTJetNormalisation.BACKGROUND_SUBTRACTION, phase_space=phase_space, ) norm.calculate_normalisation() @@ -300,7 +239,6 @@ def main(): n1.combine(n2) n1.save(output_path) - def get_category_from_file(json_file): filename = json_file.split('/')[-1] # remove type string diff --git a/dps/analysis/xsection/01_get_fit_results.py b/dps/legacy/xsection/01_get_fit_results.py similarity index 100% rename from dps/analysis/xsection/01_get_fit_results.py rename to dps/legacy/xsection/01_get_fit_results.py diff --git a/dps/legacy/xsection/01_get_ttjet_normalisation.py b/dps/legacy/xsection/01_get_ttjet_normalisation.py new file mode 100644 index 00000000..02bcc9db --- /dev/null +++ b/dps/legacy/xsection/01_get_ttjet_normalisation.py @@ -0,0 +1,333 @@ +''' + Takes AnalysisSoftware (https://github.com/BristolTopGroup/AnalysisSoftware) + output files and extracts the TTJet normalisation for each measured variable + by subtracting backgrounds from data. + + Usage: + python dps/analysis/xsection/01_get_ttjet_normalisation.py \ + -c -v -i \ + -p + + Example: + python dps/analysis/xsection/01_get_ttjet_normalisation.py \ + -c 13 -v MET -i config/measurements/background_subtraction/ + + TODO: In the end this and 01_get_fit_results.py should be merged. + All should come down to the function to extract the # events from TTJet +''' +from __future__ import division +from optparse import OptionParser +from dps.utils.logger import log +from dps.config.xsection import XSectionConfig +from dps.analysis.xsection.lib import closure_tests +from dps.utils.file_utilities import write_data_to_JSON, get_files_in_path +from dps.utils.hist_utilities import clean_control_region, \ + hist_to_value_error_tuplelist, fix_overflow + +import os +from copy import deepcopy +from dps.utils.Calculation import combine_complex_results +from dps.utils.measurement import Measurement +from dps.utils.ROOT_utils import set_root_defaults + +# define logger for this module +mylog = log["01b_get_ttjet_normalisation"] + + +class TTJetNormalisation(object): + + ''' + Determines the normalisation for top quark pair production based on + different methods. Unless stated otherwise all templates and + (initial) normalisations are taken from simulation, except for QCD + where the template is extracted from data. + + Supported methods: + BACKGROUND_SUBTRACTION: + Subtracts the known backgrounds from data to obtain TTJet template + and normalisation + SIMULTANEOUS_FIT: + Uses Minuit and several fit variables (quotation needed) to perform + a simultaneous fit (does not use statistical errors of templates). + FRACTION_FITTER: + Uses the TFractionFitter class to fit the TTJet normalisation + ''' + + BACKGROUND_SUBTRACTION = 10 + SIMULTANEOUS_FIT = 20 + FRACTION_FITTER = 30 + + @mylog.trace() + def __init__(self, + config, + measurement, + method=BACKGROUND_SUBTRACTION, + phase_space='FullPS'): + self.config = config + self.variable = measurement.variable + self.category = measurement.name + self.channel = measurement.channel + self.method = method + self.phase_space = phase_space + self.measurement = measurement + self.measurement.read() + + self.met_type = measurement.met_type + self.fit_variables = ['M3'] + + self.normalisation = {} + self.initial_normalisation = {} + self.templates = {} + + self.have_normalisation = False + + for sample, hist in self.measurement.histograms.items(): + h = deepcopy(hist) + h_norm = h.integral() + if h_norm > 0: + h.Scale(1 / h.integral()) + self.templates[sample] = hist_to_value_error_tuplelist(h) + self.auxiliary_info = {} + self.auxiliary_info['norms'] = measurement.aux_info_norms + + @mylog.trace() + def calculate_normalisation(self): + ''' + 1. get file names + 2. get histograms from files + 3. ??? + 4. calculate normalisation based on self.method + ''' + if self.have_normalisation: + return + histograms = self.measurement.histograms + + for sample, hist in histograms.items(): + # TODO: this should be a list of bin-contents + hist = fix_overflow(hist) + histograms[sample] = hist + self.initial_normalisation[ + sample] = hist_to_value_error_tuplelist(hist) + if self.method == self.BACKGROUND_SUBTRACTION and sample != 'TTJet': + self.normalisation[sample] = self.initial_normalisation[sample] + + if self.method == self.BACKGROUND_SUBTRACTION: + self.background_subtraction(histograms) + if self.method == self.SIMULTANEOUS_FIT: + self.simultaneous_fit(histograms) + + # next, let's round all numbers (they are event numbers after all + for sample, values in self.normalisation.items(): + new_values = [(round(v, 1), round(e, 1)) for v, e in values] + self.normalisation[sample] = new_values + + self.have_normalisation = True + + def background_subtraction(self, histograms): + ttjet_hist = clean_control_region(histograms, + subtract=['QCD', 'V+Jets', 'SingleTop']) + self.normalisation[ + 'TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + + @mylog.trace() + def simultaneous_fit(self, histograms): + from dps.utils.Fitting import FitData, FitDataCollection, Minuit + print('not in production yet') + fitter = None + fit_data_collection = FitDataCollection() + for fit_variable in self.fit_variables: + mc_histograms = { + 'TTJet': histograms['TTJet'], + 'SingleTop': histograms['SingleTop'], + 'V+Jets': histograms['V+Jets'], + 'QCD': histograms['QCD'], + } + h_data = histograms['data'] + fit_data = FitData(h_data, mc_histograms, + fit_boundaries=self.config.fit_boundaries[fit_variable]) + fit_data_collection.add(fit_data, name=fit_variable) + fitter = Minuit(fit_data_collection) + fitter.fit() + fit_results = fitter.readResults() + + normalisation = fit_data_collection.mc_normalisation( + self.fit_variables[0]) + normalisation_errors = fit_data_collection.mc_normalisation_errors( + self.fit_variables[0]) + print normalisation, normalisation_errors + + @mylog.trace() + def save(self, output_path): + if not self.have_normalisation: + self.calculate_normalisation() + + folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/' + folder_template += '{phase_space}/{category}/' + inputs = { + 'path': output_path, + 'CoM': self.config.centre_of_mass_energy, + 'variable': self.variable, + 'category': self.category, + 'method': self.method_string(), + 'phase_space': self.phase_space, + } + output_folder = folder_template.format(**inputs) + + file_template = '{type}_{channel}_{met_type}.txt' + inputs = { + 'channel': self.channel, + 'met_type': self.met_type, + } + write_data_to_JSON(self.normalisation, + output_folder + file_template.format(type='normalisation', **inputs)) + write_data_to_JSON(self.initial_normalisation, + output_folder + file_template.format(type='initial_normalisation', **inputs)) + write_data_to_JSON(self.templates, + output_folder + file_template.format(type='templates', **inputs)) + write_data_to_JSON(self.auxiliary_info, + output_folder + file_template.format(type='auxiliary_info', **inputs)) + + return output_folder + + @mylog.trace() + def method_string(self): + if self.method == self.BACKGROUND_SUBTRACTION: + return 'background_subtraction' + if self.method == self.SIMULTANEOUS_FIT: + return 'simultaneous_fit_' + '_'.join(self.fit_variables) + if self.method == self.FRACTION_FITTER: + return 'fraction_fitter' + + return 'unknown_method' + + @mylog.trace() + def combine(self, other): + if not self.have_normalisation or not other.have_normalisation: + mylog.warn( + 'One of the TTJetNormalisations does not have a normalisation, aborting.') + return + + self.normalisation = combine_complex_results( + self.normalisation, other.normalisation) + self.initial_normalisation = combine_complex_results( + self.initial_normalisation, other.initial_normalisation) + self.templates = combine_complex_results( + self.templates, other.templates) + self.channel = 'combined' + + +def parse_options(): + parser = OptionParser(__doc__) + parser.add_option("-p", "--path", dest="path", default='data', + help="set output path for JSON files. Default is 'data'.") + parser.add_option("-i", "--input", dest="input", + default='config/measurements/background_subtraction/', + help="set output path for JSON files") + parser.add_option("-v", "--variable", dest="variable", default='MET', + help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") + parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]") + parser.add_option('-d', '--debug', dest="debug", action="store_true", + help="Print the debug information") + parser.add_option('--closure_test', dest="closure_test", action="store_true", + help="Perform fit on data == sum(MC) * scale factor (MC process)") + parser.add_option('--closure_test_type', dest="closure_test_type", default='simple', + help="Type of closure test (relative normalisation):" + '|'.join(closure_tests.keys())) + parser.add_option('--test', dest="test", action="store_true", + help="Just run the central measurement") + parser.add_option('--visiblePS', dest="visiblePS", action="store_true", + help="Unfold to visible phase space") + + (options, args) = parser.parse_args() + # fix some of the inputs + if not options.path.endswith('/'): + options.path = options.path + '/' + if not options.input.endswith('/'): + options.input = options.input + '/' + + return options, args + + +@mylog.trace() +def main(): + # construct categories from files: + input_template = options.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' + # categories = [ category for category + # in measurement_config.measurements_and_prefixes.keys() + # if not measurement_config.ttbar_theory_systematic_prefix in category] + + phase_space = 'FullPS' + if options.visiblePS: + phase_space = 'VisiblePS' + results = {} + + for channel in ['electron', 'muon']: + measurement_filepath = input_template.format( + energy = options.CoM, + channel = channel, + variable = variable, + phase_space = phase_space, + ) + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + + for f in sorted(measurement_files): + print('Processing file ' + f) + measurement = Measurement.fromJSON(f) + # for each measurement + norm = TTJetNormalisation( + config=measurement_config, + measurement=measurement, + method=TTJetNormalisation.BACKGROUND_SUBTRACTION, + phase_space=phase_space, + ) + norm.calculate_normalisation() + mylog.info('Saving results to {0}'.format(output_path)) + norm.save(output_path) + # store results for later combination + r_name = f.replace(channel, '') + if not results.has_key(r_name): + results[r_name] = [norm] + else: + results[r_name].append(norm) + + for f, r_list in results.items(): + if not len(r_list) == 2: + msg = 'Only found results ({0}) for one channel, not combining.' + mylog.warn(msg.format(f)) + continue + n1, n2 = r_list + n1.combine(n2) + n1.save(output_path) + + +def get_category_from_file(json_file): + filename = json_file.split('/')[-1] + # remove type string + category = filename.replace('_shape_systematic', '') + category = category.replace('_rate_systematic', '') + # remove file ending + category = category.replace('.json', '') + + return category + +if __name__ == '__main__': + set_root_defaults() + + options, args = parse_options() + + # set global variables + debug = options.debug + if debug: + log.setLevel(log.DEBUG) + + measurement_config = XSectionConfig(options.CoM) + # caching of variables for shorter access + translate_options = measurement_config.translate_options + variable = options.variable + + output_path = options.path + if options.closure_test: + output_path += '/closure_test/' + output_path += options.closure_test_type + '/' + + main() From e1d7939de7318dd707a6fd775ca413e77f6aa1aa Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:15:43 +0100 Subject: [PATCH 06/90] Add in temporary full systematic list[WIP], remove deprecaated func from 01 --- .../xsection/01_get_ttjet_normalisation.py | 12 ----- dps/config/xsection.py | 49 +++++++++++++++++++ 2 files changed, 49 insertions(+), 12 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index 226c916c..dedb804d 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -239,16 +239,6 @@ def main(): n1.combine(n2) n1.save(output_path) -def get_category_from_file(json_file): - filename = json_file.split('/')[-1] - # remove type string - category = filename.replace('_shape_systematic', '') - category = category.replace('_rate_systematic', '') - # remove file ending - category = category.replace('.json', '') - - return category - if __name__ == '__main__': set_root_defaults() @@ -261,9 +251,7 @@ def get_category_from_file(json_file): measurement_config = XSectionConfig(options.CoM) # caching of variables for shorter access - translate_options = measurement_config.translate_options variable = options.variable - output_path = options.path if options.closure_test: output_path += '/closure_test/' diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 232a7ed0..8168f4e7 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -198,6 +198,55 @@ def __fill_defaults__( self ): 'UnclusteredEnDown' : '', } + self.measurements_and_prefixes = { + 'central' : '', + 'JER_up' : '', + 'JER_down' : '', + 'JES_up' : '', + 'JES_down' : '', + 'ElectronEnUp' : '', + 'ElectronEnDown' : '', + 'MuonEnUp' : '', + 'MuonEnDown' : '', + 'TauEnUp' : '', + 'TauEnDown' : '', + 'UnclusteredEnUp' : '', + 'UnclusteredEnDown' : '', + 'PileUp_up' : '', + 'PileUp_down' : '', + 'LightJet_up' : '', + 'LightJet_down' : '', + 'BJet_up' : '', + 'BJet_down' : '', + 'Electron_up' : '', + 'Electron_down' : '', + 'Muon_up' : '', + 'Muon_down' : '', + 'luminosity+' : '', + 'luminosity-' : '', + 'SingleTop_cross_section+' : '', + 'SingleTop_cross_section-' : '', + 'V+Jets_cross_section+' : '', + 'V+Jets_cross_section-' : '', + 'QCD_cross_section+' : '', + 'QCD_cross_section-' : '', + 'QCD_shape' : '', + 'TTJets_scaleup' : '', + 'TTJets_scaledown' : '', + 'TTJets_massup' : '', + 'TTJets_massdown' : '', + 'TTJets_hadronisation' : '', + 'TTJets_NLOgenerator' : '', + 'TTJets_factorisationup' : '', + 'TTJets_factorisationdown' : '', + 'TTJets_renormalisationup' : '', + 'TTJets_renormalisationdown' : '', + 'TTJets_combinedup' : '', + 'TTJets_combineddown' : '', + 'TTJets_alphaSup' : '', + 'TTJets_alphaSdown' : '', + } + self.list_of_systematics = { # Theoretical Uncertainties (Rate Changing) 'V+Jets_cross_section' : ['V+Jets_cross_section+', 'V+Jets_cross_section-'], From cfa1872f0d4817bd581da86c549c87105df51af3 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:18:58 +0100 Subject: [PATCH 07/90] Remove metType from paths, use temp syst list --- .../xsection/02_unfold_and_measure.py | 725 +++++++----------- 1 file changed, 292 insertions(+), 433 deletions(-) diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index 80dc7430..93a72b8b 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -39,6 +39,9 @@ def unfold_results( results, category, channel, tau_value, h_truth, h_measured, h_unfolded_data = unfolding.unfold() + # print "h_response bin edges : ", h_response + # print "h_unfolded_data bin edges : ", h_unfolded_data + del unfolding return hist_to_value_error_tuplelist( h_unfolded_data ), hist_to_value_error_tuplelist( h_data ) @@ -51,286 +54,204 @@ def data_covariance_matrix( data ): cov_matrix.SetBinContent( bin_i + 1, bin_i + 1, error * error ) return cov_matrix -def get_unfolded_normalisation( TTJet_fit_results, category, channel, tau_value, visiblePS ): - global centre_of_mass, luminosity, ttbar_xsection, method - global variable, met_type, path_to_JSON, file_for_unfolding, file_for_powheg_pythia, file_for_powheg_herwig, file_for_ptreweight, files_for_pdfs - global file_for_powhegPythia8, file_for_madgraphMLM, file_for_amcatnlo, file_for_amcatnlo_herwig - # global file_for_matchingdown, file_for_matchingup - global file_for_fsrdown, file_for_fsrup - global file_for_isrdown, file_for_isrup - global file_for_uedown, file_for_ueup +def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, tau_value, visiblePS ): + global centre_of_mass, luminosity, ttbar_xsection, method, variable, path_to_JSON + global file_for_unfolding, file_for_ptreweight, files_for_pdfs + global file_for_powhegPythia8, file_for_powheg_herwig, file_for_madgraphMLM, file_for_amcatnlo, file_for_amcatnlo_herwig + global file_for_scaledown, file_for_scaleup global file_for_massdown, file_for_massup - global ttbar_generator_systematics, ttbar_theory_systematics, pdf_uncertainties + global pdf_uncertainties + # Add in this option? global use_ptreweight files_for_systematics = { - ttbar_theory_systematic_prefix + 'fsrdown' : file_for_fsrdown, - ttbar_theory_systematic_prefix + 'fsrup' : file_for_fsrup, - ttbar_theory_systematic_prefix + 'isrdown' : file_for_isrdown, - ttbar_theory_systematic_prefix + 'isrup' : file_for_isrup, - ttbar_theory_systematic_prefix + 'uedown' : file_for_uedown, - ttbar_theory_systematic_prefix + 'ueup' : file_for_ueup, - ttbar_theory_systematic_prefix + 'massdown' : file_for_massdown, - ttbar_theory_systematic_prefix + 'massup' : file_for_massup, - - ttbar_theory_systematic_prefix + 'factorisationdown' : file_for_factorisationdown, - ttbar_theory_systematic_prefix + 'factorisationup' : file_for_factorisationup, - ttbar_theory_systematic_prefix + 'renormalisationdown' : file_for_renormalisationdown, - ttbar_theory_systematic_prefix + 'renormalisationup' : file_for_renormalisationup, - ttbar_theory_systematic_prefix + 'combineddown' : file_for_combineddown, - ttbar_theory_systematic_prefix + 'combinedup' : file_for_combinedup, - # ttbar_theory_systematic_prefix + 'alphaSdown' : file_for_alphaSdown, - # ttbar_theory_systematic_prefix + 'alphaSup' : file_for_alphaSup, - - 'JES_down' : file_for_jesdown, - 'JES_up' : file_for_jesup, - - 'JER_down' : file_for_jerdown, - 'JER_up' : file_for_jerup, - - 'BJet_up' : file_for_bjetup, - 'BJet_down' : file_for_bjetdown, - - 'LightJet_up' : file_for_lightjetup, - 'LightJet_down' : file_for_lightjetdown, - - ttbar_theory_systematic_prefix + 'hadronisation' : file_for_powheg_herwig, - # ttbar_theory_systematic_prefix + 'NLOgenerator' : file_for_amcatnlo, - - 'ElectronEnUp' : file_for_ElectronEnUp, - 'ElectronEnDown' : file_for_ElectronEnDown, - 'MuonEnUp' : file_for_MuonEnUp, - 'MuonEnDown' : file_for_MuonEnDown, - 'TauEnUp' : file_for_TauEnUp, - 'TauEnDown' : file_for_TauEnDown, - 'UnclusteredEnUp' : file_for_UnclusteredEnUp, - 'UnclusteredEnDown' : file_for_UnclusteredEnDown, - - 'Muon_up' : file_for_LeptonUp, - 'Muon_down' : file_for_LeptonDown, - 'Electron_up' : file_for_LeptonUp, - 'Electron_down' : file_for_LeptonDown, - - 'PileUp_up' : file_for_PUUp, - 'PileUp_down' : file_for_PUDown, - } + 'TTJets_scaledown' : file_for_scaledown, + 'TTJets_scaleup' : file_for_scaleup, + 'TTJets_massdown' : file_for_massdown, + 'TTJets_massup' : file_for_massup, + + 'TTJets_factorisationdown' : file_for_factorisationdown, + 'TTJets_factorisationup' : file_for_factorisationup, + 'TTJets_renormalisationdown' : file_for_renormalisationdown, + 'TTJets_renormalisationup' : file_for_renormalisationup, + 'TTJets_combineddown' : file_for_combineddown, + 'TTJets_combinedup' : file_for_combinedup, + 'TTJets_alphaSdown' : file_for_alphaSdown, + 'TTJets_alphaSup' : file_for_alphaSup, + + 'JES_down' : file_for_jesdown, + 'JES_up' : file_for_jesup, + + 'JER_down' : file_for_jerdown, + 'JER_up' : file_for_jerup, + + 'BJet_up' : file_for_bjetup, + 'BJet_down' : file_for_bjetdown, + + 'LightJet_up' : file_for_lightjetup, + 'LightJet_down' : file_for_lightjetdown, + + 'TTJets_hadronisation' : file_for_powheg_herwig, + 'TTJets_NLOgenerator' : file_for_amcatnlo, + + 'ElectronEnUp' : file_for_ElectronEnUp, + 'ElectronEnDown' : file_for_ElectronEnDown, + 'MuonEnUp' : file_for_MuonEnUp, + 'MuonEnDown' : file_for_MuonEnDown, + 'TauEnUp' : file_for_TauEnUp, + 'TauEnDown' : file_for_TauEnDown, + 'UnclusteredEnUp' : file_for_UnclusteredEnUp, + 'UnclusteredEnDown' : file_for_UnclusteredEnDown, + + 'Muon_up' : file_for_LeptonUp, + 'Muon_down' : file_for_LeptonDown, + 'Electron_up' : file_for_LeptonUp, + 'Electron_down' : file_for_LeptonDown, + + 'PileUp_up' : file_for_PUUp, + 'PileUp_down' : file_for_PUDown, + } h_truth, h_measured, h_response, h_fakes = None, None, None, None + # Systematics where you change the response matrix if category in files_for_systematics : print 'Doing category',category,'by changing response matrix' - h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( inputfile = files_for_systematics[category], - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) + h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( + inputfile = files_for_systematics[category], + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) elif category in pdf_uncertainties: - h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( inputfile = files_for_pdfs[category], - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) + print 'Doing category',category,'by changing response matrix' + h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( + inputfile = files_for_pdfs[category], + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) # Central and systematics where you just change input MC else: - h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( inputfile = file_for_unfolding, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - -# central_results = hist_to_value_error_tuplelist( h_truth ) - TTJet_fit_results_unfolded, TTJet_fit_results_withoutFakes = unfold_results( TTJet_fit_results, - category, - channel, - tau_value, - h_truth, - h_measured, - h_response, - h_fakes, - method, - visiblePS, - ) - normalisation_unfolded = { - 'TTJet_measured' : TTJet_fit_results, - 'TTJet_measured_withoutFakes' : TTJet_fit_results_withoutFakes, - 'TTJet_unfolded' : TTJet_fit_results_unfolded - } + h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( + inputfile = file_for_unfolding, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) - # - # THESE ARE FOR GETTING THE HISTOGRAMS FOR COMPARING WITH UNFOLDED DATA - # + # Unfold current normalisation measurements + TTJet_normalisation_results_unfolded, TTJet_normalisation_results_withoutFakes = unfold_results( + TTJet_normalisation_results, + category, + channel, + tau_value, + h_truth, + h_measured, + h_response, + h_fakes, + method, + visiblePS, + ) + + # Store measurements + normalisation_unfolded = { + 'TTJet_measured' : TTJet_normalisation_results, + 'TTJet_measured_withoutFakes' : TTJet_normalisation_results_withoutFakes, + 'TTJet_unfolded' : TTJet_normalisation_results_unfolded + } + # Return truth of different generators for comparison to data in 04 if category == 'central': - h_truth_fsrdown, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_fsrdown, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - h_truth_fsrup, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_fsrup, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - h_truth_isrdown, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_isrdown, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - h_truth_isrup, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_isrup, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - h_truth_uedown, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_uedown, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - h_truth_ueup, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_ueup, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - h_truth_massdown, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_massdown, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - h_truth_massup, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_massup, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - h_truth_powhegPythia8, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_powhegPythia8, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - # h_truth_amcatnlo, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_amcatnlo, - # variable = variable, - # channel = channel, - # met_type = met_type, - # centre_of_mass = centre_of_mass, - # ttbar_xsection = ttbar_xsection, - # luminosity = luminosity, - # load_fakes = True, - # visiblePS = visiblePS, - # ) - - # h_truth_madgraphMLM, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_madgraphMLM, - # variable = variable, - # channel = channel, - # met_type = met_type, - # centre_of_mass = centre_of_mass, - # ttbar_xsection = ttbar_xsection, - # luminosity = luminosity, - # load_fakes = True, - # visiblePS = visiblePS, - # ) - - h_truth_powheg_herwig, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_powheg_herwig, - variable = variable, - channel = channel, - met_type = met_type, - centre_of_mass = centre_of_mass, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - - # h_truth_amcatnlo_herwig, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_amcatnlo_herwig, - # variable = variable, - # channel = channel, - # met_type = met_type, - # centre_of_mass = centre_of_mass, - # ttbar_xsection = ttbar_xsection, - # luminosity = luminosity, - # load_fakes = True, - # visiblePS = visiblePS, - # ) + h_truth_massdown, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_massdown, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_massup, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_massup, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_powhegPythia8, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_powhegPythia8, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_amcatnlo, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_amcatnlo, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_madgraphMLM, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_madgraphMLM, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_powheg_herwig, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_powheg_herwig, + variable = variable, + channel = channel, + centre_of_mass = centre_of_mass, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + # h_truth_amcatnlo_herwig, _, _, _ = get_unfold_histogram_tuple( + # inputfile = file_for_amcatnlo_herwig, + # variable = variable, + # channel = channel, + # centre_of_mass = centre_of_mass, + # ttbar_xsection = ttbar_xsection, + # luminosity = luminosity, + # load_fakes = True, + # visiblePS = visiblePS, + # ) - # MADGRAPH_ptreweight_results = hist_to_value_error_tuplelist( h_truth_ptreweight ) - # POWHEG_PYTHIA_results = hist_to_value_error_tuplelist( h_truth_POWHEG_PYTHIA ) - # MCATNLO_results = None powhegPythia8_results = hist_to_value_error_tuplelist( h_truth_powhegPythia8 ) # madgraphMLM_results = hist_to_value_error_tuplelist( h_truth_madgraphMLM ) # amcatnloPythia8_results = hist_to_value_error_tuplelist( h_truth_amcatnlo ) powheg_herwig_results = hist_to_value_error_tuplelist( h_truth_powheg_herwig ) # amcatnlo_herwig_results = hist_to_value_error_tuplelist( h_truth_amcatnlo_herwig ) - fsrdown_results = hist_to_value_error_tuplelist( h_truth_fsrdown ) - fsrup_results = hist_to_value_error_tuplelist( h_truth_fsrup ) - isrdown_results = hist_to_value_error_tuplelist( h_truth_isrdown ) - isrup_results = hist_to_value_error_tuplelist( h_truth_isrup ) - uedown_results = hist_to_value_error_tuplelist( h_truth_uedown ) - ueup_results = hist_to_value_error_tuplelist( h_truth_ueup ) - massdown_results = hist_to_value_error_tuplelist( h_truth_massdown ) massup_results = hist_to_value_error_tuplelist( h_truth_massup ) @@ -340,46 +261,34 @@ def get_unfolded_normalisation( TTJet_fit_results, category, channel, tau_value, normalisation_unfolded['powhegHerwig'] = powheg_herwig_results # normalisation_unfolded['amcatnloHerwig'] = amcatnlo_herwig_results - normalisation_unfolded['fsrdown'] = fsrdown_results - normalisation_unfolded['fsrup'] = fsrup_results - normalisation_unfolded['isrdown'] = isrdown_results - normalisation_unfolded['isrup'] = isrup_results - normalisation_unfolded['uedown'] = uedown_results - normalisation_unfolded['ueup'] = ueup_results normalisation_unfolded['massdown'] = massdown_results normalisation_unfolded['massup'] = massup_results - return normalisation_unfolded def calculate_xsections( normalisation, category, channel ): - global variable, met_type, path_to_JSON + global variable, path_to_JSON # calculate the x-sections branching_ratio = 0.15 - if channel == 'combined': + if 'combined' in channel: branching_ratio = branching_ratio * 2 TTJet_xsection = calculate_xsection( normalisation['TTJet_measured'], luminosity, branching_ratio ) # L in pb1 TTJet_withoutFakes_xsection = calculate_xsection( normalisation['TTJet_measured_withoutFakes'], luminosity, branching_ratio ) # L in pb1 TTJet_xsection_unfolded = calculate_xsection( normalisation['TTJet_unfolded'], luminosity, branching_ratio ) # L in pb1 - xsection_unfolded = {'TTJet_measured' : TTJet_xsection, - 'TTJet_measured_withoutFakes' : TTJet_withoutFakes_xsection, - 'TTJet_unfolded' : TTJet_xsection_unfolded, - } + xsection_unfolded = { + 'TTJet_measured' : TTJet_xsection, + 'TTJet_measured_withoutFakes' : TTJet_withoutFakes_xsection, + 'TTJet_unfolded' : TTJet_xsection_unfolded, + } if category == 'central': powhegPythia8_xsection = calculate_xsection( normalisation['powhegPythia8'], luminosity, branching_ratio ) # L in pb1 # amcatnlo_xsection = calculate_xsection( normalisation['amcatnlo'], luminosity, branching_ratio ) # L in pb1 powhegHerwig_xsection = calculate_xsection( normalisation['powhegHerwig'], luminosity, branching_ratio ) # L in pb1 # amcatnloHerwig_xsection = calculate_xsection( normalisation['amcatnloHerwig'], luminosity, branching_ratio ) # L in pb1 - # madgraphMLM_xsection = calculate_xsection( normalisation['madgraphMLM'], luminosity, branching_ratio ) - - fsrdown_xsection = calculate_xsection( normalisation['fsrdown'], luminosity, branching_ratio ) # L in pb1 - fsrup_xsection = calculate_xsection( normalisation['fsrup'], luminosity, branching_ratio ) # L in pb1 - isrdown_xsection = calculate_xsection( normalisation['isrdown'], luminosity, branching_ratio ) # L in pb1 - isrup_xsection = calculate_xsection( normalisation['isrup'], luminosity, branching_ratio ) # L in pb1 - uedown_xsection = calculate_xsection( normalisation['uedown'], luminosity, branching_ratio ) # L in pb1 - ueup_xsection = calculate_xsection( normalisation['ueup'], luminosity, branching_ratio ) # L in pb1 + madgraphMLM_xsection = calculate_xsection( normalisation['madgraphMLM'], luminosity, branching_ratio ) + massdown_xsection = calculate_xsection( normalisation['massdown'], luminosity, branching_ratio ) # L in pb1 massup_xsection = calculate_xsection( normalisation['massup'], luminosity, branching_ratio ) # L in pb1 @@ -389,26 +298,21 @@ def calculate_xsections( normalisation, category, channel ): xsection_unfolded['powhegHerwig'] = powhegHerwig_xsection # xsection_unfolded['amcatnloHerwig'] = amcatnloHerwig_xsection - xsection_unfolded['fsrdown'] = fsrdown_xsection - xsection_unfolded['fsrup'] = fsrup_xsection - xsection_unfolded['isrdown'] = isrdown_xsection - xsection_unfolded['isrup'] = isrup_xsection - xsection_unfolded['uedown'] = uedown_xsection - xsection_unfolded['ueup'] = ueup_xsection xsection_unfolded['massdown'] = massdown_xsection xsection_unfolded['massup'] = massup_xsection + file_template = '{path_to_JSON}/{category}/xsection_{channel}_{method}.txt' filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) - + path_to_JSON = path_to_JSON, + category = category, + channel = channel, + method = method, + ) write_data_to_JSON( xsection_unfolded, filename ) + return def calculate_normalised_xsections( normalisation, category, channel, normalise_to_one = False ): - global variable, met_type, path_to_JSON, phase_space + global variable, path_to_JSON, phase_space binWidths = None if phase_space == 'VisiblePS': @@ -448,32 +352,26 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ normalised_xsection['powhegHerwig'] = powhegHerwig_normalised_xsection # normalised_xsection['amcatnloHerwig'] = amcatnloHerwig_normalised_xsection - normalised_xsection['fsrdown'] = fsrdown_normalised_xsection - normalised_xsection['fsrup'] = fsrup_normalised_xsection - normalised_xsection['isrdown'] = isrdown_normalised_xsection - normalised_xsection['isrup'] = isrup_normalised_xsection - normalised_xsection['uedown'] = uedown_normalised_xsection - normalised_xsection['ueup'] = ueup_normalised_xsection normalised_xsection['massdown'] = massdown_normalised_xsection normalised_xsection['massup'] = massup_normalised_xsection - file_template = '{path_to_JSON}/{category}/normalised_xsection_{channel}_{method}.txt' + file_template = '{path_to_JSON}/{category}/xsection_normalised_{channel}_{method}.txt' filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) + path_to_JSON = path_to_JSON, + category = category, + channel = channel, + method = method, + ) if normalise_to_one: - filename = filename.replace( 'normalised_xsection', 'normalised_to_one_xsection' ) + filename = filename.replace( 'xsection_normalised', 'xsection_normalised_to_one' ) write_data_to_JSON( normalised_xsection, filename ) if __name__ == '__main__': set_root_defaults( msg_ignore_level = 3001 ) # setup parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/normalisation/', + parser.add_option( "-p", "--path", dest = "path", default = 'data/normalisation/background_subtraction/', help = "set path to JSON files" ) parser.add_option( "-v", "--variable", dest = "variable", default = 'MET', help = "set the variable to analyse (MET, HT, ST, MT)" ) @@ -506,8 +404,6 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ # caching of variables for faster access translate_options = measurement_config.translate_options ttbar_theory_systematic_prefix = measurement_config.ttbar_theory_systematic_prefix - vjets_theory_systematic_prefix = measurement_config.vjets_theory_systematic_prefix - met_systematics = measurement_config.met_systematics centre_of_mass = options.CoM luminosity = measurement_config.luminosity * measurement_config.luminosity_scale @@ -515,25 +411,11 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ path_to_files = measurement_config.path_to_files file_for_unfolding = File( measurement_config.unfolding_central, 'read' ) - # Not unfolding with other files at the moment - ### - ### # file_for_powheg_pythia = File( measurement_config.unfolding_powheg_pythia, 'read' ) - ### # file_for_mcatnlo = None - ### # if centre_of_mass == 8: - ### # file_for_mcatnlo = File( measurement_config.unfolding_mcatnlo, 'read' ) - ### # file_for_ptreweight = File ( measurement_config.unfolding_ptreweight, 'read' ) files_for_pdfs = { 'PDFWeights_%d' % (index) : File ( measurement_config.unfolding_pdfweights[index] ) for index in range( 0, 100 ) } ### - # file_for_fsrdown = File( measurement_config.unfolding_fsr_down, 'read' ) - print 'WARNING - using fsr up file for non existent fsr down file' - file_for_fsrdown = File( measurement_config.unfolding_fsr_up, 'read' ) - file_for_fsrup = File( measurement_config.unfolding_fsr_up, 'read' ) - file_for_isrdown = File( measurement_config.unfolding_isr_down, 'read' ) - file_for_isrup = File( measurement_config.unfolding_isr_up, 'read' ) - file_for_uedown = File( measurement_config.unfolding_ue_down, 'read' ) - file_for_ueup = File( measurement_config.unfolding_ue_up, 'read' ) - + file_for_scaledown = File( measurement_config.unfolding_scale_down, 'read' ) + file_for_scaleup = File( measurement_config.unfolding_scale_up, 'read' ) ### file_for_renormalisationdown = File( measurement_config.unfolding_renormalisation_down, 'read' ) file_for_renormalisationup = File( measurement_config.unfolding_renormalisation_up, 'read' ) @@ -596,7 +478,6 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ unfoldCfg.error_treatment = options.error_treatment method = options.unfolding_method combine_before_unfolding = options.combine_before_unfolding - met_type = translate_options[options.metType] b_tag_bin = translate_options[options.bjetbin] path_to_JSON = '{path}/{com}TeV/{variable}/{phase_space}/'.format( path = options.path, @@ -605,153 +486,130 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ phase_space = phase_space, ) - categories = deepcopy( measurement_config.categories_and_prefixes.keys() ) - # No generator or theory systematics yet - ttbar_generator_systematics = [ttbar_theory_systematic_prefix + systematic for systematic in measurement_config.generator_systematics] - ### vjets_generator_systematics = [vjets_theory_systematic_prefix + systematic for systematic in measurement_config.generator_systematics] - # categories.extend( ttbar_generator_systematics ) - ### categories.extend( vjets_generator_systematics ) - - # ### ttbar theory systematics, including pt reweightingnsystematic - # ttbar_theory_systematics = [] #[ ttbar_theory_systematic_prefix + 'ptreweight' ] - # categories.extend( ttbar_theory_systematics ) - - pdf_uncertainties = ['PDFWeights_%d' % index for index in range( measurement_config.pdfWeightMin, measurement_config.pdfWeightMax )] - rate_changing_systematics = [systematic for systematic in measurement_config.rate_changing_systematics_names] - # all MET uncertainties except JES as this is already included - met_uncertainties = [suffix for suffix in measurement_config.met_systematics_suffixes if not 'JetEn' in suffix and not 'JetRes' in suffix] + all_measurements = deepcopy( measurement_config.measurements_and_prefixes.keys() ) - all_measurements = deepcopy( categories ) - all_measurements.extend( ttbar_generator_systematics ) + # ### ttbar pt reweightingng systematic + # ttbar_theory_systematics = [] #[ 'TTJets_ptreweight' ] + # all_measurements.extend( ttbar_theory_systematics ) + + pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 0, 100 )] all_measurements.extend( pdf_uncertainties ) - all_measurements.extend( ['QCD_shape'] ) - all_measurements.extend( rate_changing_systematics ) + print 'Performing unfolding for variable', variable for category in all_measurements: if run_just_central and not category == 'central': continue - # Don't need to consider MET uncertainties for HT - if ( variable in measurement_config.variables_no_met ) and (category in measurement_config.met_systematics_suffixes and not category in ['JES_up', 'JES_down', 'JER_up', 'JER_down']): + # Don't need to consider MET uncertainties for HT, abs_lepton_eta, lepton_pt and NJets + if ( variable in measurement_config.variables_no_met ) and (category in measurement_config.met_specific_systematics): continue - print 'Doing category ', category print 'Unfolding category "%s"' % category - # Setting up systematic MET for JES up/down samples - met_type = translate_options[options.metType] - - if category == 'JES_up': - met_type += 'JetEnUp' - elif category == 'JES_down': - met_type += 'JetEnDown' - elif category == 'JER_up': - met_type += 'JetResUp' - elif category == 'JER_down': - met_type += 'JetResDown' - if category in met_uncertainties and not 'JES' in category and not 'JER' in category: - met_type += category - - # read fit results from JSON - electron_file = path_to_JSON + '/' + category + '/normalisation_electron_' + met_type + '.txt' - muon_file = path_to_JSON + '/' + category + '/normalisation_muon_' + met_type + '.txt' - - # don't change fit input for ttbar generator/theory systematics and PDF weights - if category in ttbar_generator_systematics or category in pdf_uncertainties: - # or category in ttbar_mass_systematics - electron_file = path_to_JSON + '/central/normalisation_electron_' + met_type + '.txt' - muon_file = path_to_JSON + '/central/normalisation_muon_' + met_type + '.txt' - # combined_file = path_to_JSON + '/central/normalisation_combined_' + met_type + '.txt' - elif category in rate_changing_systematics or category == 'QCD_shape': - electron_file = path_to_JSON + '/' + category + '/normalisation_electron_' + met_type + '.txt' - muon_file = path_to_JSON + '/' + category + '/normalisation_muon_' + met_type + '.txt' - elif category == 'central_TTJet': - electron_file = path_to_JSON + '/central/initial_normalisation_electron_' + met_type + '.txt' - muon_file = path_to_JSON + '/central/initial_normalisation_muon_' + met_type + '.txt' - # elif category in met_uncertainties and not 'JES' in category and not 'JER' in category: - # electron_file = path_to_JSON + '/'+category+'/initial_normalisation_electron_' + met_type + '.txt' - # muon_file = path_to_JSON + '/'+category+'/initial_normalisation_muon_' + met_type + '.txt' - elif category != 'central': - electron_file = path_to_JSON + '/' + category + '/normalisation_electron_' + met_type + '.txt' - muon_file = path_to_JSON + '/' + category + '/normalisation_muon_' + met_type + '.txt' - - fit_results_electron = None - fit_results_muon = None - + + # read normalisation results from JSON + electron_file = path_to_JSON + '/' + category + '/normalisation_electron.txt' + muon_file = path_to_JSON + '/' + category + '/normalisation_muon.txt' + + # don't change normalisation input for ttbar generator/theory systematics and PDF weights + # For systematics not run in 01 [PDF and TTJet_] then use the central normalisations + if ttbar_theory_systematic_prefix in category or category in pdf_uncertainties: + electron_file = path_to_JSON + '/central/normalisation_electron.txt' + muon_file = path_to_JSON + '/central/normalisation_muon.txt' + # combined_file = path_to_JSON + '/central/normalisation_combined.txt' + + # Read the normalisations + normalisation_results_electron = None + normalisation_results_muon = None if category == 'Muon_up' or category == 'Muon_down': - # fit_results_electron = read_data_from_JSON( path_to_JSON + '/central/initial_normalisation_electron_' + met_type + '.txt' ) - fit_results_electron = read_data_from_JSON( path_to_JSON + '/central/normalisation_electron_' + met_type + '.txt' ) - fit_results_muon = read_data_from_JSON( muon_file ) + normalisation_results_electron = read_data_from_JSON( path_to_JSON + '/central/normalisation_electron.txt' ) + normalisation_results_muon = read_data_from_JSON( muon_file ) elif category == 'Electron_up' or category == 'Electron_down': - fit_results_electron = read_data_from_JSON( electron_file ) - # fit_results_muon = read_data_from_JSON( path_to_JSON + '/central/initial_normalisation_muon_' + met_type + '.txt' ) - fit_results_muon = read_data_from_JSON( path_to_JSON + '/central/normalisation_muon_' + met_type + '.txt' ) + normalisation_results_electron = read_data_from_JSON( electron_file ) + normalisation_results_muon = read_data_from_JSON( path_to_JSON + '/central/normalisation_muon.txt' ) else: - fit_results_electron = read_data_from_JSON( electron_file ) - fit_results_muon = read_data_from_JSON( muon_file ) - fit_results_combined = combine_complex_results(fit_results_electron, fit_results_muon) - TTJet_fit_results_electron = fit_results_electron['TTJet'] - TTJet_fit_results_muon = fit_results_muon['TTJet'] - TTJet_fit_results_combined = fit_results_combined['TTJet'] - - # # change back to original MET type for the unfolding - met_type = translate_options[options.metType] - # # ad-hoc switch for PFMET -> patMETsPFlow - # if met_type == 'PFMET': - # met_type = 'patMETsPFlow' + normalisation_results_electron = read_data_from_JSON( electron_file ) + normalisation_results_muon = read_data_from_JSON( muon_file ) + + # Combine the normalisations (beforeUnfolding) + normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) + TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] + TTJet_normalisation_results_muon = normalisation_results_muon['TTJet'] + TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] file_template = '{path_to_JSON}/{category}/unfolded_normalisation_{channel}_{method}.txt' filename = '' - # # get unfolded normalisation + # get unfolded normalisations and xsections unfolded_normalisation_electron = {} unfolded_normalisation_muon = {} + # Electron channel - unfolded_normalisation_electron = get_unfolded_normalisation( TTJet_fit_results_electron, category, 'electron', tau_value_electron, visiblePS = visiblePS ) + channel = 'electron' + unfolded_normalisation_electron = get_unfolded_normalisation( + TTJet_normalisation_results_electron, + category, + channel, + tau_value_electron, + visiblePS = visiblePS + ) filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = 'electron', - method = method, - ) + path_to_JSON = path_to_JSON, + category = category, + channel = channel, + method = method, + ) write_data_to_JSON( unfolded_normalisation_electron, filename ) # measure xsection - calculate_xsections( unfolded_normalisation_electron, category, 'electron' ) - calculate_normalised_xsections( unfolded_normalisation_electron, category, 'electron' ) - calculate_normalised_xsections( unfolded_normalisation_electron, category, 'electron' , True ) + calculate_xsections( unfolded_normalisation_electron, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_electron, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_electron, category, channel , True ) + # Muon channel - unfolded_normalisation_muon = get_unfolded_normalisation( TTJet_fit_results_muon, category, 'muon', tau_value_muon, visiblePS = visiblePS ) + channel = 'muon' + unfolded_normalisation_muon = get_unfolded_normalisation( + TTJet_normalisation_results_muon, + category, + channel, + tau_value_muon, + visiblePS = visiblePS + ) filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = 'muon', - method = method, - ) + path_to_JSON = path_to_JSON, + category = category, + channel = channel, + method = method, + ) write_data_to_JSON( unfolded_normalisation_muon, filename ) # measure xsection - calculate_xsections( unfolded_normalisation_muon, category, 'muon' ) - calculate_normalised_xsections( unfolded_normalisation_muon, category, 'muon' ) - calculate_normalised_xsections( unfolded_normalisation_muon, category, 'muon' , True ) + calculate_xsections( unfolded_normalisation_muon, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_muon, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_muon, category, channel , True ) + # Results where the channels are combined after unfolding unfolded_normalisation_combined = combine_complex_results( unfolded_normalisation_electron, unfolded_normalisation_muon ) channel = 'combined' filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) + path_to_JSON = path_to_JSON, + category = category, + channel = channel, + method = method, + ) write_data_to_JSON( unfolded_normalisation_combined, filename ) + # measure xsection calculate_xsections( unfolded_normalisation_combined, category, channel ) calculate_normalised_xsections( unfolded_normalisation_combined, category, channel ) calculate_normalised_xsections( unfolded_normalisation_combined, category, channel , True ) - # Results where the channels are combined before unfolding + + # Results where the channels are combined before unfolding (the 'combined in the response matrix') unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( - TTJet_fit_results_combined, - category,'combined', tau_value=tau_value_combined, - visiblePS=visiblePS, + TTJet_normalisation_results_combined, + category, + 'combined', + tau_value=tau_value_combined, + visiblePS=visiblePS, ) channel = 'combinedBeforeUnfolding' filename = file_template.format( @@ -761,6 +619,7 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ method = method, ) write_data_to_JSON( unfolded_normalisation_combinedBeforeUnfolding, filename ) + # measure xsection calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) From bbe7c9c2feb0853d06f837bc8c29fc0503e4ff3d Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:21:24 +0100 Subject: [PATCH 08/90] Remove met_type, rename outputfiles so they are easier to find by eye --- dps/analysis/xsection/03_calculate_systematics.py | 7 +------ dps/utils/systematic.py | 8 ++++---- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/dps/analysis/xsection/03_calculate_systematics.py b/dps/analysis/xsection/03_calculate_systematics.py index 075f5697..a5e7005b 100644 --- a/dps/analysis/xsection/03_calculate_systematics.py +++ b/dps/analysis/xsection/03_calculate_systematics.py @@ -26,7 +26,6 @@ write_normalised_xsection_measurement,\ write_systematic_xsection_measurement - if __name__ == '__main__': ''' 1) read all background subtraction results (group by MET, PDF, other) @@ -34,12 +33,10 @@ 3) ''' parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/M3_angle_bl/', + parser.add_option( "-p", "--path", dest = "path", default = 'data/normalisation/background_subtraction/', help = "set path to JSON files" ) parser.add_option( "-v", "--variable", dest = "variable", default = 'MET', help = "set variable to plot (MET, HT, ST, MT)" ) - parser.add_option( "-m", "--metType", dest = "metType", default = 'type1', - help = "set MET type used in the analysis of MET, ST or MT" ) parser.add_option( "-b", "--bjetbin", dest = "bjetbin", default = '2m', help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" ) parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, type = int, @@ -56,7 +53,6 @@ # caching of variables for shorter access translate_options = measurement_config.translate_options met_specific_systematics = measurement_config.met_specific_systematics - met_type = translate_options[options.metType] variables_no_met = measurement_config.variables_no_met method = options.unfolding_method symmetrise_errors = options.symmetrise_errors @@ -81,7 +77,6 @@ # List of options to pass to systematic functions opts={ 'met_specific_systematics' : met_specific_systematics, - 'met_type' : met_type, 'variables_no_met' : variables_no_met, 'symmetrise_errors' : symmetrise_errors, 'path_to_JSON' : path_to_JSON, diff --git a/dps/utils/systematic.py b/dps/utils/systematic.py index 89f43dc4..e2344dae 100644 --- a/dps/utils/systematic.py +++ b/dps/utils/systematic.py @@ -16,7 +16,7 @@ def write_normalised_xsection_measurement(options, measurement, measurement_unfo method=options['method'] channel=options['channel'] - output_file = '{path_to_JSON}/central/normalised_xsection_{channel}_{method}_with_errors.txt' + output_file = '{path_to_JSON}/central/xsection_normalised_{channel}_{method}_with_errors.txt' output_file = output_file.format( path_to_JSON = path_to_JSON, channel = channel, @@ -40,7 +40,7 @@ def write_systematic_xsection_measurement(options, systematic, total_syst, summa method=options['method'] channel=options['channel'] - output_file = '{path_to_JSON}/central/normalised_xsection_{channel}_{method}_summary_absolute.txt' + output_file = '{path_to_JSON}/central/xsection_normalised_{channel}_{method}_summary_absolute.txt' output_file = output_file.format( path_to_JSON = path_to_JSON, channel = channel, @@ -100,7 +100,7 @@ def read_normalised_xsection_measurement(options, category): path_to_JSON=options['path_to_JSON'] method=options['method'] channel=options['channel'] - filename = '{path}/{category}/normalised_xsection_{channel}_{method}.txt' + filename = '{path}/{category}/xsection_normalised_{channel}_{method}.txt' # Disregarding Met Uncertainties if variable does not use MET if (category in met_specific_systematics) and (variable in variables_no_met): filename = filename.format( @@ -117,7 +117,7 @@ def read_normalised_xsection_measurement(options, category): method = method ) normalised_xsection = read_data_from_JSON( filename ) - measurement = normalised_xsection['TTJet_measured']#should this be measured without fakes??? + measurement = normalised_xsection['TTJet_measured_withoutFakes']#should this be measured without fakes??? measurement_unfolded = normalised_xsection['TTJet_unfolded'] return measurement, measurement_unfolded From d7fc8682dbbb7b135a7de8244d7f37d8fb4706dc Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:22:35 +0100 Subject: [PATCH 09/90] Use new naming scheme. Remove old plotting scripts --- .../xsection/04_make_plots_matplotlib.py | 193 +----------------- 1 file changed, 5 insertions(+), 188 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index fe59cbd1..c0d379a2 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -16,6 +16,8 @@ from ROOT import kRed, kGreen, kMagenta, kBlue, kBlack from dps.utils.ROOT_utils import set_root_defaults import matplotlib as mpl +from matplotlib import rc + from dps.utils.plotting import get_best_max_y mpl.use( 'agg' ) import rootpy.plotting.root2matplotlib as rplt @@ -28,6 +30,7 @@ setup_matplotlib() import matplotlib.patches as mpatches +import latexcodec from dps.utils.logger import log xsec_04_log = log["src/cross_section_measurement/04_make_plots_matplotlib"] @@ -40,7 +43,7 @@ def read_xsection_measurement_results( category, channel ): filename = file_template.format( path = path_to_JSON, category = category, - name = 'normalised_xsection', + name = 'xsection_normalised', channel = channel, method = method, suffix = '', @@ -92,7 +95,7 @@ def read_xsection_measurement_results( category, channel ): filename = file_template.format( path = path_to_JSON, category = category, - name = 'normalised_xsection', + name = 'xsection_normalised', channel = channel, method = method, suffix = '_with_errors', @@ -137,192 +140,6 @@ def read_xsection_measurement_results( category, channel ): return histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts -@xsec_04_log.trace() -def read_fit_templates_and_results_as_histograms( category, channel ): - global path_to_JSON, variable, met_type, phase_space - templates = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/templates_' + channel + '_' + met_type + '.txt' ) - - data_values = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/initial_values_' + channel + '_' + met_type + '.txt' )['data'] - fit_results = read_data_from_JSON( path_to_JSON + '/fit_results/' + category + '/fit_results_' + channel + '_' + met_type + '.txt' ) - fit_variables = templates.keys() - template_histograms = {fit_variable: {} for fit_variable in fit_variables} - fit_results_histograms = {fit_variable: {} for fit_variable in fit_variables} - - variableBins = None - if phase_space == 'VisiblePS': - variableBins = variable_bins_visiblePS_ROOT - elif phase_space == 'FullPS': - variableBins = variable_bins_ROOT - - for bin_i, variable_bin in enumerate( variableBins[variable] ): - for fit_variable in fit_variables: - h_template_data = value_tuplelist_to_hist( templates[fit_variable]['data'][bin_i], fit_variable_bin_edges[fit_variable] ) - h_template_ttjet = value_tuplelist_to_hist( templates[fit_variable]['TTJet'][bin_i], fit_variable_bin_edges[fit_variable] ) - h_template_singletop = value_tuplelist_to_hist( templates[fit_variable]['SingleTop'][bin_i], fit_variable_bin_edges[fit_variable] ) - h_template_VJets = value_tuplelist_to_hist( templates[fit_variable]['V+Jets'][bin_i], fit_variable_bin_edges[fit_variable] ) - h_template_QCD = value_tuplelist_to_hist( templates[fit_variable]['QCD'][bin_i], fit_variable_bin_edges[fit_variable] ) - template_histograms[fit_variable][variable_bin] = { - 'TTJet' : h_template_ttjet, - 'SingleTop' : h_template_singletop, - 'V+Jets':h_template_VJets, - 'QCD':h_template_QCD - } - h_data = h_template_data.Clone() - h_ttjet = h_template_ttjet.Clone() - h_singletop = h_template_singletop.Clone() - h_VJets = h_template_VJets.Clone() - h_QCD = h_template_QCD.Clone() - - data_normalisation = data_values[bin_i][0] - n_ttjet = fit_results['TTJet'][bin_i][0] - n_singletop = fit_results['SingleTop'][bin_i][0] - VJets_normalisation = fit_results['V+Jets'][bin_i][0] - QCD_normalisation = fit_results['QCD'][bin_i][0] - - h_data.Scale( data_normalisation ) - h_ttjet.Scale( n_ttjet ) - h_singletop.Scale( n_singletop ) - h_VJets.Scale( VJets_normalisation ) - h_QCD.Scale( QCD_normalisation ) - h_background = h_VJets + h_QCD + h_singletop - - for bin_i_data in range( len( h_data ) ): - h_data.SetBinError( bin_i_data + 1, sqrt( h_data.GetBinContent( bin_i_data + 1 ) ) ) - - fit_results_histograms[fit_variable][variable_bin] = { - 'data' : h_data, - 'signal' : h_ttjet, - 'background' : h_background - } - - return template_histograms, fit_results_histograms - -@xsec_04_log.trace() -def make_template_plots( histograms, category, channel ): - global variable, output_folder, phase_space - fit_variables = histograms.keys() - - variableBins = None - if phase_space == 'VisiblePS': - variableBins = variable_bins_visiblePS_ROOT - elif phase_space == 'FullPS': - variableBins = variable_bins_ROOT - - for variable_bin in variableBins[variable]: - path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' + category + '/fit_templates/' - make_folder_if_not_exists( path ) - for fit_variable in fit_variables: - plotname = path + channel + '_' + fit_variable + '_template_bin_' + variable_bin - - # check if template plots exist already - for output_format in output_formats: - if os.path.isfile( plotname + '.' + output_format ): - continue - - # plot with matplotlib - h_ttjet = histograms[fit_variable][variable_bin]['TTJet'] - h_single_top = histograms[fit_variable][variable_bin]['SingleTop'] - h_VJets = histograms[fit_variable][variable_bin]['V+Jets'] - h_QCD = histograms[fit_variable][variable_bin]['QCD'] - - h_ttjet.linecolor = 'red' - h_single_top.linecolor = 'magenta' - h_VJets.linecolor = 'green' - h_QCD.linecolor = 'gray' - h_VJets.linestyle = 'dashed' - h_QCD.linestyle = 'dotted' # currently not working - # bug report: http://trac.sagemath.org/sage_trac/ticket/13834 - - h_ttjet.linewidth = 5 - h_single_top.linewidth = 5 - h_VJets.linewidth = 5 - h_QCD.linewidth = 5 - - plt.figure( figsize = ( 16, 16 ), dpi = 200, facecolor = 'white' ) - axes = plt.axes() - if not variable in ['NJets']: - axes.minorticks_on() - - plt.xlabel( fit_variables_latex[fit_variable], CMS.x_axis_title ) - plt.ylabel( 'normalised to unit area/(%s)' % get_unit_string(fit_variable), CMS.y_axis_title ) - plt.tick_params( **CMS.axis_label_major ) - if not variable in ['NJets']: - plt.tick_params( **CMS.axis_label_minor ) - - rplt.hist( h_ttjet, axes = axes, label = 'signal' ) - rplt.hist( h_single_top, axes = axes, label = 'Single Top' ) - - if ( h_VJets.Integral() != 0 ): - rplt.hist( h_VJets, axes = axes, label = 'V+Jets' ) - else: - print("WARNING: in %s bin %s, %s category, %s channel, V+Jets template is empty: not plotting." % ( variable, variable_bin, category, channel )) - if ( h_QCD.Integral() != 0 ): - rplt.hist( h_QCD, axes = axes, label = 'QCD' ) - else: - print("WARNING: in %s bin %s, %s category, %s channel, QCD template is empty: not plotting." % ( variable, variable_bin, category, channel )) - y_max = get_best_max_y([h_ttjet, h_single_top, h_VJets, h_QCD]) - axes.set_ylim( [0, y_max * 1.1] ) - axes.set_xlim( measurement_config.fit_boundaries[fit_variable] ) - - plt.legend( numpoints = 1, loc = 'upper right', prop = CMS.legend_properties ) - label, channel_label = get_cms_labels( channel ) - plt.title( label, CMS.title ) - # CMS text - # note: fontweight/weight does not change anything as we use Latex text!!! - plt.text(0.95, 0.95, r"\textbf{CMS}", transform=axes.transAxes, fontsize=42, - verticalalignment='top',horizontalalignment='right') - # channel text - axes.text(0.95, 0.95, r"\emph{%s}" %channel_label, transform=axes.transAxes, fontsize=40, - verticalalignment='top',horizontalalignment='right') - - plt.tight_layout() - - for output_format in output_formats: - plt.savefig( plotname + '.' + output_format ) - - plt.close() - gc.collect() - -@xsec_04_log.trace() -def plot_fit_results( histograms, category, channel ): - global variable, b_tag_bin, output_folder, phase_space - from dps.utils.plotting import Histogram_properties, make_data_mc_comparison_plot - fit_variables = histograms.keys() - - variableBins = None - if phase_space == 'VisiblePS': - variableBins = variable_bins_visiblePS_ROOT - elif phase_space == 'FullPS': - variableBins = variable_bins_ROOT - - for variable_bin in variableBins[variable]: - path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' + category + '/fit_results/' - make_folder_if_not_exists( path ) - for fit_variable in fit_variables: - plotname = channel + '_' + fit_variable + '_bin_' + variable_bin - # check if template plots exist already - for output_format in output_formats: - if os.path.isfile( plotname + '.' + output_format ): - continue - - # plot with matplotlib - h_data = histograms[fit_variable][variable_bin]['data'] - h_signal = histograms[fit_variable][variable_bin]['signal'] - h_background = histograms[fit_variable][variable_bin]['background'] - - histogram_properties = Histogram_properties() - histogram_properties.name = plotname - histogram_properties.x_axis_title = fit_variables_latex[fit_variable] - histogram_properties.y_axis_title = 'Events/(%s)' % get_unit_string(fit_variable) - label, _ = get_cms_labels( channel ) - histogram_properties.title = label - histogram_properties.x_limits = measurement_config.fit_boundaries[fit_variable] - - make_data_mc_comparison_plot( [h_data, h_background, h_signal], - ['data', 'background', 'signal'], - ['black', 'green', 'red'], histogram_properties, - save_folder = path, save_as = output_formats ) - @xsec_04_log.trace() def get_cms_labels( channel ): global b_tag_bin From 6289feac47a721f9e0b92d6bf907fd7d4c5dbee2 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:23:56 +0100 Subject: [PATCH 10/90] Add new plots showing largest systematics --- .../xsection/05_make_systematic_plots.py | 154 ++++++++++++++---- 1 file changed, 123 insertions(+), 31 deletions(-) diff --git a/dps/analysis/xsection/05_make_systematic_plots.py b/dps/analysis/xsection/05_make_systematic_plots.py index 2bf342c4..8dee2248 100644 --- a/dps/analysis/xsection/05_make_systematic_plots.py +++ b/dps/analysis/xsection/05_make_systematic_plots.py @@ -14,54 +14,85 @@ # dynamic matplotlib settings from matplotlib import rc +from operator import itemgetter + rc( 'font', **CMS.font ) rc( 'text', usetex = False ) -def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder): +def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder, subcategories = [], subname = '', plot_largest = False): ''' Plot the systematic uncertainties ''' + print subcategories + if not subcategories: subcategories = systematic_uncertainties.keys() + x_limits = [bin_edges[0], bin_edges[-1]] - y_limits = [0,0.6] + y_limits = [-0.6,0.6] fig_syst = plt.figure( figsize = ( 20, 16 ), dpi = 400, facecolor = 'white' ) ax_syst = fig_syst.add_subplot(1, 1, 1) ax_syst.minorticks_on() ax_syst.xaxis.labelpad = 12 ax_syst.yaxis.labelpad = 12 - error_hists = {} + error_hists_up = {} + error_hists_down = {} stat_hist = None for syst, vals in systematic_uncertainties.iteritems(): - if syst == 'statistical': - stat_hist = values_and_errors_to_hist( vals, [], bin_edges ) + if syst == 'central': + n = len(systematic_uncertainties[syst]) + continue + elif syst == 'statistical': + stat_hist_up = values_and_errors_to_hist( vals, [], bin_edges ) + stat_hist_down = values_and_errors_to_hist( -vals, [], bin_edges ) elif syst == 'systematic': - full_syst_hist = values_and_errors_to_hist( vals, [], bin_edges ) - elif syst == 'central': - central_hist = values_and_errors_to_hist( vals, [], bin_edges ) - else: - error_hists[syst] = values_and_errors_to_hist( vals, [], bin_edges ) + syst_hist_up = values_and_errors_to_hist( vals, [], bin_edges ) + syst_hist_down = values_and_errors_to_hist( -vals, [], bin_edges ) + elif syst in subcategories: + error_hists_up[syst] = values_and_errors_to_hist( vals, [], bin_edges ) + error_hists_down[syst] = values_and_errors_to_hist( -vals, [], bin_edges ) + else: continue + + if plot_largest: + largest_syst = [] + for bin_i in range( n ): + high = [] + for syst, vals in systematic_uncertainties.iteritems(): + if syst == 'central': continue + if syst == 'statistical': continue + if syst == 'systematic': continue + high.append([syst,vals[bin_i]]) + high = sorted(high, key = itemgetter(1), reverse=True) + # Retrieve highest systematics + if high[0][0] not in largest_syst: largest_syst.append(high[0][0]) + elif high[1][0] not in largest_syst: largest_syst.append(high[1][0]) + else: continue + + rplt.fill_between( syst_hist_up, syst_hist_down, color = 'yellow', label='Syst.' ) + rplt.fill_between( stat_hist_down, stat_hist_up, color = 'grey', label='Stat.' ) plt.tick_params( **CMS.axis_label_major ) plt.tick_params( **CMS.axis_label_minor ) colours = ['red', 'blue', 'green', 'chartreuse', 'indigo', 'magenta', 'darkmagenta', 'hotpink', 'cyan', 'darkred', 'darkgoldenrod', 'mediumvioletred', 'mediumspringgreen', 'gold', 'darkgoldenrod', 'slategray', 'dodgerblue', 'cadetblue', 'darkblue', 'seagreen', 'deeppink' ] - for source, colour in zip (error_hists.keys(), colours): - hist = error_hists[source] - hist.linewidth = 4 - hist.color = colour - rplt.hist( hist, stacked=False, axes = ax_syst, label = source ) - - stat_hist.linewidth = 4 - stat_hist.color = 'black' - stat_hist.linestyle = 'dashed' - rplt.hist( stat_hist, stacked=False, axes = ax_syst, label = 'stat.' ) - - full_syst_hist.linewidth = 4 - full_syst_hist.color = 'black' - rplt.hist( full_syst_hist, stacked=False, axes = ax_syst, label = 'tot syst.' ) - - leg = plt.legend(loc=1,prop={'size':30},ncol=2) + + for error_hists in [error_hists_up, error_hists_down]: + for i, source, in enumerate(error_hists.keys()): + hist = error_hists[source] + hist.linewidth = 4 + hist.color = colours[i] + if plot_largest: + if source not in largest_syst: + hist.linestyle = 'dashed' + hist.alpha = 0.4 + hist.linewidth = 2 + # Only label systematic once + if error_hists == error_hists_up: + rplt.hist( hist, stacked=False, label = source ) + else: + rplt.hist( hist, stacked=False, label = '' ) + + leg = plt.legend(loc='lower right',prop={'size':20},ncol=4) leg.draw_frame(False) x_title = variables_NonLatex[variable] @@ -72,16 +103,53 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, ax_syst.set_ylim( y_limits ) plt.xlabel( x_title, CMS.x_axis_title ) plt.ylabel( 'Relative Uncertainty', CMS.y_axis_title) + + template = '%.1f fb$^{-1}$ (%d TeV)' + label = template % ( measurement_config.new_luminosity/1000, measurement_config.centre_of_mass_energy) + plt.title( label,loc='right', **CMS.title ) + + logo_location = (0.05, 0.98) + prelim_location = (0.05, 0.92) + channel_location = ( 0.05, 0.86) + plt.text(logo_location[0], logo_location[1], + r"\textbf{CMS}", + transform=ax_syst.transAxes, + fontsize=42, + verticalalignment='top', + horizontalalignment='left' + ) + # preliminary + plt.text(prelim_location[0], prelim_location[1], + r"\emph{Preliminary}", + transform=ax_syst.transAxes, + fontsize=42, + verticalalignment='top', + horizontalalignment='left' + ) + # channel text + plt.text(channel_location[0], channel_location[1], + r"\emph{%s}" %channel, + transform=ax_syst.transAxes, + fontsize=40, + verticalalignment='top', + horizontalalignment='left' + ) + plt.tight_layout() - file_template = output_folder + '{var}_systematics_{com}TeV.pdf'.format( + file_template = output_folder + '{var}_systematics_{com}TeV'.format( var = variable, com = measurement_config.centre_of_mass_energy, ) + if subname: file_template = file_template + '_' + subname + file_template += '.pdf' fig_syst.savefig(file_template) print "Written plots to {f}".format(f = file_template) + # plt.show() return + + if __name__ == '__main__': parser = ArgumentParser(__doc__) parser.add_argument( @@ -135,18 +203,22 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, variable = args.variable output_folder = args.output_folder ps_vis = args.visiblePS - + + phase_space = 'FullPS' bin_edges = bin_edges_full[variable] if ps_vis: phase_space = 'VisiblePS' bin_edges = bin_edges_vis[variable] measurement_config = XSectionConfig(com) + # for keys in measurement_config.rate_changing_systematics_values.keys(): + # print keys + # print measurement_config.rate_changing_systematics_values[keys].scale for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - - input_file = '{basepath}/{com}TeV/{var}/{ps}/central/normalised_xsection_{channel}_{method}_summary_relative.txt'.format( + # if channel != 'combined':continue + input_file = '{basepath}/{com}TeV/{var}/{ps}/central/xsection_normalised_{channel}_{method}_summary_relative.txt'.format( basepath = path, com = com, var = variable, @@ -162,5 +234,25 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, systematic_uncertainties = pu.file_to_df(input_file) - plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder) + # any group of systematics you want to plot + l_xsec = [] + l_mc = [] + l_weight = [] + l_met = [] + l_shape = [] + for k in systematic_uncertainties.keys(): + if 'cross_section' in k: l_xsec.append(k) + elif 'TTJets_' in k: l_mc.append(k) + elif ('Electron' in k or 'Muon' in k or 'PileUp' in k or 'luminosity' in k or 'BJet' in k) and 'En' not in k: l_weight.append(k) + elif 'En' in k: l_met.append(k) + elif 'JES' in k or 'JER' in k or 'QCD_shape' in k or 'PDF' in k: l_shape.append(k) + else : print ' Not including {}'.format(k) + # # Plot them + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder) + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder, plot_largest = True, subname = 'largest') + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder,l_xsec, "xsection") + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder,l_mc, "mc") + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder,l_weight, "weight") + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder,l_met, "met") + plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder,l_shape, "shape") From 0879f099aa1b919c89d391fc64e9eecabd4666ea Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:25:04 +0100 Subject: [PATCH 11/90] get_files_in_path glob->os.walk --- dps/utils/file_utilities.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/dps/utils/file_utilities.py b/dps/utils/file_utilities.py index 64ca1af6..c0282e6e 100644 --- a/dps/utils/file_utilities.py +++ b/dps/utils/file_utilities.py @@ -52,9 +52,19 @@ def read_data_from_JSON(JSON_input_file): return data def get_files_in_path(path, file_ending = '.root'): - path += '/*' + file_ending - files = glob.glob(path) - return files + ''' + Return the files for a given path + ''' + input_files=[] + print path + if os.path.exists(path): + for root, dirs, files in os.walk(path): + for name in files: + if file_ending in name: + input_files.append(os.path.join(root, name)) + else: + print "Could not find required folder" + return input_files def check_ROOT_file(filename): passesCheck = can_open_ROOT_file(filename) From 680013ad7cc2131aad8c8b9b336481a96f33de8d Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 18 Oct 2016 12:38:23 +0100 Subject: [PATCH 12/90] update path to new ntuples. remove amcHerwig --- .../DougsBTagEff/makeBTagEfficiencies.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py index a237eb28..e2cad743 100644 --- a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py +++ b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py @@ -6,6 +6,7 @@ import math import os from optparse import OptionParser +from dps.utils.file_utilities import make_folder_if_not_exists ROOT.gROOT.SetBatch(True) if __name__ == '__main__': @@ -82,7 +83,6 @@ Mu_inputTree = "TTbar_plus_X_analysis/MuPlusJets/Ref selection NoBSelection/BTagEfficiencies/Jets" Mu_Chain = TChain(Mu_inputTree) Mu_Chain.Add(input_file) - Chain = { 0 : E_Chain, 1 : Mu_Chain, @@ -98,6 +98,7 @@ n=n+1 if options.test : if n == 10000 : break + # if n == 10 : break NJets = event.__getattr__("NJets") pt = event.__getattr__("pt") eta = event.__getattr__("eta") @@ -111,10 +112,13 @@ puWeight = event.__getattr__("PUWeight") if key == 0 : leptonWeight = event.__getattr__("ElectronEfficiencyCorrection") else : leptonWeight = event.__getattr__("MuonEfficiencyCorrection") - + + + weight = eventWeight * puWeight * leptonWeight + - if (NJets == 0): continue; + if (NJets <= 0): continue; for JetIndex in range (0,int(NJets)): if (pt[JetIndex] < 30): continue; @@ -183,7 +187,7 @@ if options.make_plots: f = TFile("BTagEfficiency.root", "OPEN") - + make_folder_if_not_exists('plots/') for key in range (0, len(input_files)): generator = input_files[key][1] From 373e9ec525f01567a3d6400594d8767e268e7920 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 4 Nov 2016 09:05:08 +0000 Subject: [PATCH 13/90] change unfolding config to find files --- dps/analysis/unfolding_tests/makeConfig.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dps/analysis/unfolding_tests/makeConfig.py b/dps/analysis/unfolding_tests/makeConfig.py index 8f28c057..e1e10459 100644 --- a/dps/analysis/unfolding_tests/makeConfig.py +++ b/dps/analysis/unfolding_tests/makeConfig.py @@ -48,7 +48,7 @@ histogramTemplate = "%s_%s" % ( variable, channel ) outputJson = { "output_folder": "plots/unfolding/bestRegularisation/VisiblePS", - "output_format": ["png", "pdf"], + "output_format": ["pdf"], "centre-of-mass energy" : com, "channel": "%s" % channel, "variable": "%s" % variable, @@ -66,7 +66,7 @@ # "histogram": "%s/measuredVis" % ( histogramTemplate ), }, "data" : { - "file": "data/normalisation/background_subtraction/%sTeV/%s/VisiblePS/central/normalisation_%s_patType1CorrectedPFMet.txt" % ( com, variable, channel), + "file": "data/normalisation/background_subtraction/%sTeV/%s/VisiblePS/central/normalisation_%s.txt" % ( com, variable, channel), "histogram": "TTJet" }, } From 2af5bcf7137ace4d581b422d0a487800acdc57f6 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 22 Nov 2016 13:19:02 +0000 Subject: [PATCH 14/90] Fix for 00. NJet now skips resolution properly --- dps/analysis/xsection/00_pick_bins.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/dps/analysis/xsection/00_pick_bins.py b/dps/analysis/xsection/00_pick_bins.py index d24549ef..1c2d3303 100644 --- a/dps/analysis/xsection/00_pick_bins.py +++ b/dps/analysis/xsection/00_pick_bins.py @@ -106,6 +106,8 @@ def main(): best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min_lepton, minimum_bin_width[variable], x_min=23. ) elif variable == 'abs_lepton_eta': best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min_lepton, minimum_bin_width[variable] ) + elif variable == 'NJets': + best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min, minimum_bin_width[variable], is_NJet=True) else: best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min, minimum_bin_width[variable] ) @@ -209,7 +211,7 @@ def get_histograms( variable, options ): -def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_min = None ): +def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_min = None, is_NJet=False ): ''' Step 1: Change the size of the first bin until it fulfils the minimal criteria Step 3: Check if it is true for all other histograms. If not back to step 2 @@ -232,7 +234,7 @@ def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_m while current_bin_end < n_bins: # bin_End, p, s, N_reco - current_bin_end, _, _, _, r = get_next_end( histograms, current_bin_start, current_bin_end, p_min, s_min, n_min, min_width ) + current_bin_end, _, _, _, r = get_next_end( histograms, current_bin_start, current_bin_end, p_min, s_min, n_min, min_width, is_NJet=is_NJet ) resolutions.append(r) if not bin_edges: # if empty @@ -264,7 +266,7 @@ def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_m return bin_edges, histogram_information -def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width ): +def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width, is_NJet=False ): current_bin_start = bin_start current_bin_end = bin_end p, s = 0, 0 @@ -313,8 +315,8 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width # The StdDev of Gaussian = Resolution. # If Resolution < Bin width then we are all good - # NJets is not great at the moment for fitting guassians - if (var=='NJets'): + # Dont use resolution information on NJets + if is_NJet: current_bin_end = bin_i break @@ -325,7 +327,6 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width current_bin_end = bin_i break - # if it gets to the end, this is the best we can do current_bin_end = bin_i return current_bin_end, p, s, n_reco, res From c694f553461bb4dfc2147040bc0676c219867cae Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 22 Nov 2016 13:42:17 +0000 Subject: [PATCH 15/90] 01 opt_parser->arg_parser if we want to go beyond python2.7 --- .../xsection/01_get_ttjet_normalisation.py | 56 +++++++++---------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index dedb804d..99540e4a 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -16,7 +16,7 @@ All should come down to the function to extract the # events from TTJet ''' from __future__ import division -from optparse import OptionParser +from argparse import ArgumentParser from dps.utils.logger import log from dps.config.xsection import XSectionConfig from dps.analysis.xsection.lib import closure_tests @@ -160,50 +160,50 @@ def combine(self, other): self.channel = 'combined' -def parse_options(): - parser = OptionParser(__doc__) - parser.add_option("-p", "--path", dest="path", default='data', +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument("-p", "--path", dest="path", default='data', help="set output path for JSON files. Default is 'data'.") - parser.add_option("-i", "--input", dest="input", + parser.add_argument("-i", "--input", dest="input", default='config/measurements/background_subtraction/', help="set output path for JSON files") - parser.add_option("-v", "--variable", dest="variable", default='MET', + parser.add_argument("-v", "--variable", dest="variable", default='MET', help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") - parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_option('-d', '--debug', dest="debug", action="store_true", + parser.add_argument('-d', '--debug', dest="debug", action="store_true", help="Print the debug information") - parser.add_option('--closure_test', dest="closure_test", action="store_true", + parser.add_argument('--closure_test', dest="closure_test", action="store_true", help="Perform fit on data == sum(MC) * scale factor (MC process)") - parser.add_option('--closure_test_type', dest="closure_test_type", default='simple', + parser.add_argument('--closure_test_type', dest="closure_test_type", default='simple', help="Type of closure test (relative normalisation):" + '|'.join(closure_tests.keys())) - parser.add_option('--test', dest="test", action="store_true", + parser.add_argument('--test', dest="test", action="store_true", help="Just run the central measurement") - parser.add_option('--visiblePS', dest="visiblePS", action="store_true", + parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", help="Unfold to visible phase space") - (options, args) = parser.parse_args() + args = parser.parse_args() # fix some of the inputs - if not options.path.endswith('/'): - options.path = options.path + '/' - if not options.input.endswith('/'): - options.input = options.input + '/' + if not args.path.endswith('/'): + args.path = args.path + '/' + if not args.input.endswith('/'): + args.input = args.input + '/' - return options, args + return args @mylog.trace() def main(): # construct categories from files: - input_template = options.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' + input_template = args.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' phase_space = 'FullPS' - if options.visiblePS: + if args.visiblePS: phase_space = 'VisiblePS' results = {} for channel in ['electron', 'muon']: measurement_filepath = input_template.format( - energy = options.CoM, + energy = args.CoM, channel = channel, variable = variable, phase_space = phase_space, @@ -242,19 +242,19 @@ def main(): if __name__ == '__main__': set_root_defaults() - options, args = parse_options() + args = parse_arguments() # set global variables - debug = options.debug + debug = args.debug if debug: log.setLevel(log.DEBUG) - measurement_config = XSectionConfig(options.CoM) + measurement_config = XSectionConfig(args.CoM) # caching of variables for shorter access - variable = options.variable - output_path = options.path - if options.closure_test: + variable = args.variable + output_path = args.path + if args.closure_test: output_path += '/closure_test/' - output_path += options.closure_test_type + '/' + output_path += args.closure_test_type + '/' main() From e598c54c157860165d49bf53bff0bc48a977f81b Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 22 Nov 2016 14:28:53 +0000 Subject: [PATCH 16/90] --test now does central only --- dps/analysis/xsection/01_get_ttjet_normalisation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index 99540e4a..c0b6e954 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -211,7 +211,8 @@ def main(): measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') for f in sorted(measurement_files): - if options.test and not 'central' in f : continue + if args.test and 'central' not in f: continue + print('Processing file ' + f) measurement = Measurement.fromJSON(f) # for each measurement From 3aca0a36fca4e390b25944a2333bbe049564c589 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 23 Nov 2016 10:41:32 +0000 Subject: [PATCH 17/90] Fixed debug argument --- .../BLTUnfold/produceUnfoldingHistograms.py | 22 ++++++++----------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index 443a473b..f08c40e6 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -8,8 +8,6 @@ from dps.utils.file_utilities import make_folder_if_not_exists from math import trunc, exp, sqrt -from scaleFactors import * - import ROOT as ROOT ROOT.gROOT.SetBatch(True) ROOT.gROOT.ProcessLine( 'gErrorIgnoreLevel = 2001;' ) @@ -51,34 +49,32 @@ def getFileName( com, sample, measurementConfig ) : fileNames = { '13TeV' : { 'central' : measurementConfig.ttbar_category_templates_trees['central'], + 'amcatnlo' : measurementConfig.ttbar_amc_category_templates_trees, 'madgraph' : measurementConfig.ttbar_madgraph_category_templates_trees, 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_category_templates_trees, - 'amcatnloherwigpp' : measurementConfig.ttbar_amcatnloherwigpp_category_templates_trees, + # 'amcatnloherwigpp' : measurementConfig.ttbar_amcatnloherwigpp_category_templates_trees, + + 'scaleup' : measurementConfig.ttbar_scaleup_category_templates_trees, + 'scaledown' : measurementConfig.ttbar_scaledown_category_templates_trees, 'massdown' : measurementConfig.ttbar_mtop1695_category_templates_trees, 'massup' : measurementConfig.ttbar_mtop1755_category_templates_trees, - 'topPtSystematic' : measurementConfig.ttbar_category_templates_trees['central'], - 'fsrup' : measurementConfig.ttbar_fsrup_category_templates_trees, - 'fsrdown' : measurementConfig.ttbar_fsrdown_category_templates_trees, - 'isrup' : measurementConfig.ttbar_isrup_category_templates_trees, - 'isrdown' : measurementConfig.ttbar_isrdown_category_templates_trees, - 'ueup' : measurementConfig.ttbar_ueup_category_templates_trees, - 'uedown' : measurementConfig.ttbar_uedown_category_templates_trees, 'jesdown' : measurementConfig.ttbar_jesdown_category_templates_trees, 'jesup' : measurementConfig.ttbar_jesup_category_templates_trees, 'jerdown' : measurementConfig.ttbar_jerdown_category_templates_trees, 'jerup' : measurementConfig.ttbar_jerup_category_templates_trees, + 'bjetdown' : measurementConfig.ttbar_category_templates_trees['central'], 'bjetup' : measurementConfig.ttbar_category_templates_trees['central'], 'lightjetdown' : measurementConfig.ttbar_category_templates_trees['central'], 'lightjetup' : measurementConfig.ttbar_category_templates_trees['central'], + 'leptondown' : measurementConfig.ttbar_category_templates_trees['central'], 'leptonup' : measurementConfig.ttbar_category_templates_trees['central'], 'pileupUp' : measurementConfig.ttbar_category_templates_trees['central'], 'pileupDown' : measurementConfig.ttbar_category_templates_trees['central'], - 'ElectronEnUp' : measurementConfig.ttbar_category_templates_trees['central'], 'ElectronEnDown' : measurementConfig.ttbar_category_templates_trees['central'], 'MuonEnUp' : measurementConfig.ttbar_category_templates_trees['central'], @@ -119,7 +115,6 @@ def main(): # Input file name file_name = 'crap.root' if int(options.centreOfMassEnergy) == 13: - # file_name = fileNames['13TeV'][options.sample] file_name = getFileName('13TeV', options.sample, measurement_config) # if options.generatorWeight >= 0: # file_name = 'localInputFile.root' @@ -204,7 +199,6 @@ def main(): for variable in allVariablesBins: if options.debug and variable != 'HT' : continue - if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue @@ -449,6 +443,7 @@ def main(): nOfflineNotVis[channel.channelName] += offlineWeight for variable in allVariablesBins: + if options.debug and variable != 'HT' : continue if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue @@ -515,6 +510,7 @@ def main(): # Output histgorams to file # for variable in allVariablesBins: + if options.debug and variable != 'HT' : continue if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue for channel in channels: From ceb5b71b5c99ba69f6682a8ab410718065dc7beb Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 23 Nov 2016 11:02:57 +0000 Subject: [PATCH 18/90] Move a couple legacy scripts to legacy/ --- dps/{analysis => legacy}/BLTUnfold/getOutput.py | 0 dps/{analysis => legacy}/BLTUnfold/getScaleFactors.py | 0 dps/{analysis => legacy}/BLTUnfold/runJobsInteractive.py | 0 dps/{analysis => legacy}/BLTUnfold/scaleFactors.py | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename dps/{analysis => legacy}/BLTUnfold/getOutput.py (100%) rename dps/{analysis => legacy}/BLTUnfold/getScaleFactors.py (100%) rename dps/{analysis => legacy}/BLTUnfold/runJobsInteractive.py (100%) rename dps/{analysis => legacy}/BLTUnfold/scaleFactors.py (100%) diff --git a/dps/analysis/BLTUnfold/getOutput.py b/dps/legacy/BLTUnfold/getOutput.py similarity index 100% rename from dps/analysis/BLTUnfold/getOutput.py rename to dps/legacy/BLTUnfold/getOutput.py diff --git a/dps/analysis/BLTUnfold/getScaleFactors.py b/dps/legacy/BLTUnfold/getScaleFactors.py similarity index 100% rename from dps/analysis/BLTUnfold/getScaleFactors.py rename to dps/legacy/BLTUnfold/getScaleFactors.py diff --git a/dps/analysis/BLTUnfold/runJobsInteractive.py b/dps/legacy/BLTUnfold/runJobsInteractive.py similarity index 100% rename from dps/analysis/BLTUnfold/runJobsInteractive.py rename to dps/legacy/BLTUnfold/runJobsInteractive.py diff --git a/dps/analysis/BLTUnfold/scaleFactors.py b/dps/legacy/BLTUnfold/scaleFactors.py similarity index 100% rename from dps/analysis/BLTUnfold/scaleFactors.py rename to dps/legacy/BLTUnfold/scaleFactors.py From f4ed82fa1f2204d13619c9a664b01316a65aa622 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 23 Nov 2016 11:04:09 +0000 Subject: [PATCH 19/90] add TopEtaReweighting to list of unfolding matrices to be done, updated nombuer of jobs to be run --- dps/analysis/BLTUnfold/runJobsCrab.py | 11 ++++++++++- dps/analysis/BLTUnfold/submitBLTUnfold.description | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/dps/analysis/BLTUnfold/runJobsCrab.py b/dps/analysis/BLTUnfold/runJobsCrab.py index 8613dc62..2d7d7d00 100755 --- a/dps/analysis/BLTUnfold/runJobsCrab.py +++ b/dps/analysis/BLTUnfold/runJobsCrab.py @@ -8,12 +8,21 @@ '--centreOfMassEnergy 13 -s central', + '--centreOfMassEnergy 13 -s central --topPtReweighting 1', + '--centreOfMassEnergy 13 -s central --topPtReweighting -1', + '--centreOfMassEnergy 13 -s central --topEtaReweighting 1', + '--centreOfMassEnergy 13 -s central --topEtaReweighting -1', + # '--centreOfMassEnergy 13 -s amcatnlo', # '--centreOfMassEnergy 13 -s madgraph', '--centreOfMassEnergy 13 -s powhegherwigpp', # # '--centreOfMassEnergy 13 -s amcatnloherwigpp', - # # ME scale weights + # # PS scale samples + '--centreOfMassEnergy 13 -s scaleup', + '--centreOfMassEnergy 13 -s scaledown', + + # ME scale weights '--centreOfMassEnergy 13 --muFmuRWeight 1', '--centreOfMassEnergy 13 --muFmuRWeight 2', '--centreOfMassEnergy 13 --muFmuRWeight 3', diff --git a/dps/analysis/BLTUnfold/submitBLTUnfold.description b/dps/analysis/BLTUnfold/submitBLTUnfold.description index ecde298e..feee567b 100644 --- a/dps/analysis/BLTUnfold/submitBLTUnfold.description +++ b/dps/analysis/BLTUnfold/submitBLTUnfold.description @@ -15,4 +15,4 @@ request_memory=500 # use the ENV that is provided getenv = true -queue 138 +queue 141 From 66e1bde9d3441d5ca4b5cc62168917c3b00a4d9f Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 24 Nov 2016 08:57:36 +0000 Subject: [PATCH 20/90] 00_pickbins - resolution now initialised --- dps/analysis/xsection/00_pick_bins.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dps/analysis/xsection/00_pick_bins.py b/dps/analysis/xsection/00_pick_bins.py index 1c2d3303..955ed732 100644 --- a/dps/analysis/xsection/00_pick_bins.py +++ b/dps/analysis/xsection/00_pick_bins.py @@ -174,10 +174,10 @@ def get_histograms( variable, options ): path_combined = '%s_combined/%s' % ( variable, histogram_name ) histogram_information = [ - {'file': config.unfolding_central_raw, - 'CoM': 13, - 'path':path_electron, - 'channel':'electron'}, + # {'file': config.unfolding_central_raw, + # 'CoM': 13, + # 'path':path_electron, + # 'channel':'electron'}, {'file':config.unfolding_central_raw, 'CoM': 13, 'path':path_muon, @@ -269,7 +269,6 @@ def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_m def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width, is_NJet=False ): current_bin_start = bin_start current_bin_end = bin_end - p, s = 0, 0 for gen_vs_reco_histogram in histograms: reco = asrootpy( gen_vs_reco_histogram.ProjectionX() ) gen = asrootpy( gen_vs_reco_histogram.ProjectionY( 'py', 1 ) ) @@ -299,7 +298,7 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width # the histogram and taking the diagonal elements (which is what we want) n_gen_and_reco = gen_vs_reco_histogram.Integral( current_bin_start + 1, bin_i , current_bin_start + 1, bin_i ) - p, s = 0, 0 + p, s, res = 0, 0, 99 if n_reco > 0: p = round( n_gen_and_reco / n_reco, 3 ) if n_gen > 0: @@ -314,6 +313,7 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width # Find slices of X and Y between bin edges and fit them with a Gaussian. # The StdDev of Gaussian = Resolution. # If Resolution < Bin width then we are all good + # Initiate res # Dont use resolution information on NJets if is_NJet: From 4c66fb87efffec47e9eee61f65001a6f669b0b00 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 23 Nov 2016 08:37:50 +0000 Subject: [PATCH 21/90] Start on new 01 config --- Testing.json | 212 ++++++++ .../xsection/create_measurement2p0.py | 495 ++++++++++++++++++ 2 files changed, 707 insertions(+) create mode 100644 Testing.json create mode 100644 dps/analysis/xsection/create_measurement2p0.py diff --git a/Testing.json b/Testing.json new file mode 100644 index 00000000..2015e942 --- /dev/null +++ b/Testing.json @@ -0,0 +1,212 @@ +{ + "QCD": [ + [ + 3429.2999999999997, + 88.07190244340133 + ], + [ + 6243.7, + 131.69153351677548 + ], + [ + 4392.6, + 138.64270626325788 + ], + [ + 2654.3, + 124.37105772646625 + ], + [ + 1680.3, + 144.71672329071023 + ], + [ + 326.0, + 98.60182554090974 + ], + [ + 174.79999999999998, + 59.501344522624024 + ], + [ + 57.5, + 38.200523556621576 + ], + [ + 42.2, + 57.20489489545453 + ], + [ + 2.5, + 13.6 + ] + ], + "SingleTop": [ + [ + 395.5, + 13.152946437965905 + ], + [ + 1036.4, + 21.286850401127925 + ], + [ + 1039.0, + 21.286850401127925 + ], + [ + 1019.6, + 21.156795598577776 + ], + [ + 1341.9, + 24.26211037811839 + ], + [ + 753.1, + 18.179383927955314 + ], + [ + 462.8, + 14.289856542317002 + ], + [ + 242.60000000000002, + 10.339245620450265 + ], + [ + 344.3, + 12.37457069962429 + ], + [ + 49.8, + 6.296824596572465 + ] + ], + "TTJet": [ + [ + 17820.4, + 317.0337836887419 + ], + [ + 44894.9, + 489.2253672899638 + ], + [ + 49550.2, + 508.31590571218607 + ], + [ + 46563.7, + 450.07077221254883 + ], + [ + 59673.8, + 485.85701600367986 + ], + [ + 29404.7, + 347.3057010761557 + ], + [ + 15403.2, + 245.13771639631466 + ], + [ + 7162.3, + 164.60938612363512 + ], + [ + 6297.299999999999, + 200.89544544364367 + ], + [ + 348.9, + 82.56791144264217 + ] + ], + "V+Jets": [ + [ + 1049.7, + 264.28577335906675 + ], + [ + 3681.9, + 407.02122794763426 + ], + [ + 3277.2, + 424.84757266577384 + ], + [ + 2485.4, + 365.9343247086832 + ], + [ + 1732.0, + 387.4737926621619 + ], + [ + 1404.2, + 280.5469122981039 + ], + [ + 863.2, + 198.64684744541 + ], + [ + 202.4, + 133.6726224774542 + ], + [ + 787.2, + 171.55395652680238 + ], + [ + 137.8, + 75.45130880243232 + ] + ], + "data": [ + [ + 22695.0, + 150.66947268773458 + ], + [ + 55857.0, + 236.30971203063152 + ], + [ + 58259.0, + 241.3578256448297 + ], + [ + 52723.0, + 229.6385420612141 + ], + [ + 64428.0, + 253.807998297926 + ], + [ + 31888.0, + 178.55220525101336 + ], + [ + 16904.0, + 129.97557462846623 + ], + [ + 7665.0, + 87.54164723147491 + ], + [ + 7471.0, + 86.4916759000541 + ], + [ + 539.0, + 30.076070222022025 + ] + ] +} \ No newline at end of file diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py new file mode 100644 index 00000000..9be9f797 --- /dev/null +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -0,0 +1,495 @@ +''' + Translates the current config (for a given centre-of-mass energy) + into JSON configs. The configs will be written to + config/measurements/background_subtraction/TeV/ + + Usage: + python src/cross_section_measurement/create_measurement.py -c + + Example: + python src/cross_section_measurement/create_measurement.py -c +''' +from argparse import ArgumentParser +from dps.config.xsection import XSectionConfig +from dps.config import variable_binning +from dps.utils.input import Input +from dps.utils.logger import log +from copy import deepcopy +from dps.utils.measurement import Measurement, Systematic + +# define logger for this module +create_measurement_log = log["01b_get_ttjet_normalisation"] +cml = create_measurement_log # alias + + + + + +@cml.trace() +def main(): + parser = ArgumentParser(__doc__) + parser.add_argument( + "-c", + "--centre-of-mass-energy", + dest="CoM", + default=13, + type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]" + ) + parser.add_argument( + '-d', + '--debug', + dest="debug", + action="store_true", + help="Print the debug information" + ) + args = parser.parse_args() + centre_of_mass_energy = args.CoM + if args.debug: log.setLevel(log.DEBUG) + + + measurement_config = XSectionConfig(centre_of_mass_energy) + categories = measurement_config.normalisation_systematics + print categories + +# for variable in measurement_config.variables: +# for category in categories: +# for channel in ['electron', 'muon']: +# if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): +# continue +# elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): +# continue +# # create_measurement( +# # centre_of_mass_energy, category, variable, channel, +# # phase_space='FullPS', norm_method='background_subtraction') +# # and the visible phase space +# create_measurement( +# centre_of_mass_energy, category, variable, channel, +# phase_space='VisiblePS', norm_method='background_subtraction') + + +# @cml.trace() +# def create_measurement(com, category, variable, channel, phase_space, norm_method): +# if com == 13: +# # exclude non existing systematics +# if 'VJets' in category and 'scale' in category: +# print('Excluding {0} for now'.format(category)) +# return +# config = XSectionConfig(com) +# met_type = get_met_type(category, config) +# should_not_run_systematic = category in config.met_systematics_suffixes and variable in config.variables_no_met and not 'JES' in category and not 'JER' in category +# if should_not_run_systematic: +# # no MET uncertainty on HT (but JES and JER of course) +# return + +# m = None +# if category == 'central': +# m = Measurement(category) +# else: +# vjet_systematics = [config.vjets_theory_systematic_prefix + +# systematic for systematic in config.generator_systematics] +# if category in config.categories_and_prefixes.keys() or \ +# category in config.met_systematics_suffixes or \ +# category in vjet_systematics: +# m = Systematic(category, +# stype=Systematic.SHAPE, +# affected_samples=config.samples) +# elif category in config.rate_changing_systematics_names: +# m = config.rate_changing_systematics_values[category] + +# elif category == 'QCD_shape': +# m = Systematic(category, +# stype=Systematic.SHAPE, +# affected_samples=['QCD'], +# ) + +# m.setVariable(variable) +# m.setCentreOfMassEnergy(com) +# m.setChannel(channel) +# m.setMETType(met_type) + +# inputs = { +# 'channel': config.analysis_types[channel], +# 'met_type': met_type, +# 'selection': 'Ref selection', +# 'btag': config.translate_options['2m'], # 2 or more +# 'energy': com, +# 'variable': variable, +# 'category': category, +# 'phase_space': phase_space, +# 'norm_method': norm_method, +# 'lepton': channel.title(), +# } +# variable_template = config.variable_path_templates[ +# variable].format(**inputs) + +# template_category = category +# if category == 'QCD_shape' or category in config.rate_changing_systematics_names: +# template_category = 'central' +# if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: +# template_category = 'central' + +# m.addSample( +# 'TTJet', +# False, +# input=create_input( +# config, 'TTJet', variable, template_category, channel, +# variable_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m.addSample( +# 'V+Jets', +# False, +# input=create_input( +# config, 'V+Jets', variable, template_category, channel, +# variable_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m.addSample( +# 'SingleTop', +# False, +# input=create_input( +# config, 'SingleTop', variable, template_category, channel, +# variable_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m.addSample( +# 'QCD', +# False, +# input=create_input( +# config, 'QCD', variable, template_category, channel, +# variable_template, phase_space=phase_space, measurement=m, +# ), +# ) +# variable_template_data = variable_template.replace( +# met_type, config.translate_options['type1']) + +# m.addSample( +# 'data', +# False, +# input=create_input( +# config, 'data', variable, template_category, channel, +# variable_template_data, phase_space=phase_space, measurement=m, +# ), +# ) + +# m_qcd = Measurement(category) +# m_qcd.setVariable(variable) +# m_qcd.setCentreOfMassEnergy(com) + +# qcd_template = get_qcd_template(config, variable, category, channel) + +# # we want "measurement = m" here since all rate systematics should apply +# # to the control regions as well +# m_qcd.addSample( +# 'TTJet', +# False, +# input=create_input( +# config, 'TTJet', variable, template_category, channel, +# qcd_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m_qcd.addSample( +# 'V+Jets', +# False, +# input=create_input( +# config, 'V+Jets', variable, template_category, channel, +# qcd_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m_qcd.addSample( +# 'SingleTop', +# False, +# input=create_input( +# config, 'SingleTop', variable, template_category, channel, +# qcd_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m_qcd.addSample( +# 'QCD', +# False, +# input=create_input( +# config, 'QCD', variable, template_category, channel, +# qcd_template, phase_space=phase_space, measurement=m, +# ), +# ) +# m_qcd.addSample( +# 'data', +# False, +# input=create_input( +# config, 'data', variable, template_category, channel, +# qcd_template, phase_space=phase_space, measurement=m, +# ), +# ) + +# m.addShapeForSample('QCD', m_qcd, False) +# norm_qcd = deepcopy(m_qcd) +# # we want QCD shape and normalisation to be separate +# if category == 'QCD_shape': +# for sample in norm_qcd.samples.keys(): +# tree = norm_qcd.samples[sample]['input'].tree_name +# if channel == 'electron': +# tree = tree.replace(config.electron_control_region_systematic, +# config.electron_control_region) +# else: +# tree = tree.replace(config.muon_control_region_systematic, +# config.muon_control_region) +# norm_qcd.samples[sample]['input'].tree_name = tree +# if 'QCD_cross_section' in category: +# for sample in norm_qcd.samples.keys(): +# tree = norm_qcd.samples[sample]['input'].tree_name +# if channel == 'electron': +# tree = tree.replace(config.electron_control_region, +# config.electron_control_region_systematic) +# else: +# tree = tree.replace(config.muon_control_region, +# config.muon_control_region_systematic) +# norm_qcd.samples[sample]['input'].tree_name = tree + +# m.addNormForSample('QCD', norm_qcd, False) + +# if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: +# v_template_category = category.replace( +# config.vjets_theory_systematic_prefix, '') +# m_vjets = Measurement(category) +# m_vjets.setVariable(variable) +# m_vjets.setCentreOfMassEnergy(com) +# m_vjets.addSample( +# 'V+Jets', +# False, +# input=create_input( +# config, 'V+Jets', variable, v_template_category, +# channel, +# variable_template, +# config.generator_systematic_vjets_templates[ +# v_template_category]), +# phase_space=phase_space, measurement=m, +# ) +# m.addShapeForSample('V+Jets', m_vjets, False) + +# inputs['channel'] = channel +# base_path = 'config/measurements/{norm_method}/{energy}TeV/' +# base_path += '{channel}/{variable}/{phase_space}/' +# if category == 'central': +# path = base_path + '{category}.json' +# m.toJSON(path.format(**inputs)) +# else: +# if m.type == Systematic.SHAPE: +# inputs['type'] = 'shape_systematic' +# else: +# inputs['type'] = 'rate_systematic' +# if category in config.met_systematics_suffixes and category not in ['JES_up', 'JES_down', 'JER_up', 'JER_down']: +# inputs['category'] = met_type +# path = base_path + '{category}_{type}.json' +# m.toJSON(path.format(**inputs)) + + +# @cml.trace() +# def get_met_type(category, config): +# met_type = config.translate_options['type1'] +# if category == 'JES_up': +# met_type += 'JetEnUp' +# elif category == 'JES_down': +# met_type += 'JetEnDown' +# elif category == 'JER_up': +# met_type += 'JetResUp' +# elif category == 'JER_down': +# met_type += 'JetResDown' + +# isJetSystematic = 'JetEn' in category or 'JetRes' in category +# isJetSystematic = isJetSystematic or 'JES' in category +# isJetSystematic = isJetSystematic or 'JER' in category + +# if category in config.met_systematics_suffixes: +# # already done them +# if not isJetSystematic: +# met_type = met_type + category + +# return met_type + + +# @cml.trace() +# def get_file(config, sample, category, channel): +# use_trees = True if config.centre_of_mass_energy == 13 else False +# if channel == 'electron': +# qcd_template = config.electron_QCD_MC_category_templates[category] +# data_template = config.data_file_electron +# qcd_template_tree = config.electron_QCD_MC_category_templates_trees[ +# category] +# data_template_tree = config.data_file_electron_trees +# else: +# qcd_template = config.muon_QCD_MC_category_templates[category] +# data_template = config.data_file_muon +# qcd_template_tree = config.muon_QCD_MC_category_templates_trees[ +# category] +# data_template_tree = config.data_file_muon_trees + +# tree_files = { +# 'TTJet': config.ttbar_category_templates_trees[category], +# 'V+Jets': config.VJets_category_templates_trees[category], +# 'SingleTop': config.SingleTop_category_templates_trees[category], +# 'QCD': qcd_template_tree, +# 'data': data_template_tree +# } +# files = { +# 'TTJet': config.ttbar_category_templates[category], +# 'V+Jets': config.VJets_category_templates[category], +# 'SingleTop': config.SingleTop_category_templates[category], +# 'QCD': qcd_template, +# 'data': data_template, +# } + +# if use_trees: +# return tree_files[sample] +# else: +# return files[sample] + + +# @cml.trace() +# def get_qcd_template(config, variable, category, channel): +# qcd_inputs = { +# 'channel': config.analysis_types[channel], +# 'met_type': config.translate_options['type1'], # always central MET +# 'selection': 'Ref selection', +# 'btag': config.translate_options['2m'], # 2 or more +# 'energy': config.centre_of_mass_energy, +# 'variable': variable, +# 'category': 'central', # always central +# 'lepton': channel.title(), +# } + +# qcd_template = config.variable_path_templates[ +# variable].format(**qcd_inputs) +# if channel == 'electron': +# qcd_template = qcd_template.replace( +# 'Ref selection', config.electron_control_region) +# if category == 'QCD_shape': +# qcd_template = qcd_template.replace( +# config.electron_control_region, +# config.electron_control_region_systematic) +# else: +# qcd_template = qcd_template.replace( +# 'Ref selection', config.muon_control_region) +# if category == 'QCD_shape': +# qcd_template = qcd_template.replace( +# config.muon_control_region, +# config.muon_control_region_systematic) + +# return qcd_template + + +# @cml.trace() +# def create_input(config, sample, variable, category, channel, template, +# input_file=None, phase_space=None, **kwargs): +# tree, branch, hist = None, None, None +# selection = '1' +# if not input_file: +# input_file = get_file(config, sample, category, channel) + +# if config.centre_of_mass_energy == 13: +# branch = template.split('/')[-1] +# tree = template.replace('/' + branch, '') + +# if 'absolute_eta' in branch: +# branch = 'abs(lepton_eta)' + +# if sample != 'data': +# if category in config.met_systematics_suffixes and not variable in config.variables_no_met: +# branch = template.split('/')[-1] +# branch += '_METUncertainties[%s]' % config.met_systematics[ +# category] + +# if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: +# tree += config.categories_and_prefixes[category] + +# if not sample == 'data': +# if 'JES_down' in category: +# input_file = input_file.replace('tree', 'minusJES_tree') +# elif 'JES_up' in category: +# input_file = input_file.replace('tree', 'plusJES_tree') +# elif 'JER_up' in category: +# input_file = input_file.replace('tree', 'plusJER_tree') +# elif 'JER_down' in category: +# input_file = input_file.replace('tree', 'minusJER_tree') + +# selection = '{0} >= 0'.format(branch) +# if variable == 'abs_lepton_eta': +# selection += ' && {0} <= 3'.format(branch) +# else: +# hist = template + +# lumi_scale = config.luminosity_scale +# scale = 1. + +# m = kwargs['measurement'] +# if m.type == Systematic.RATE: +# if 'luminosity' in m.name: +# lumi_scale = lumi_scale * m.scale +# else: +# if sample in m.affected_samples: +# scale = m.scale +# if sample == 'data': # data is not scaled in any way +# lumi_scale = 1. +# scale = 1. + +# edges = variable_binning.reco_bin_edges_full[variable] +# if phase_space == 'VisiblePS': +# edges = variable_binning.reco_bin_edges_vis[variable] + +# weight_branches = [] +# if sample == 'data': +# weight_branches.append('1') +# else: +# weight_branches.append('EventWeight') + +# if 'PileUp' not in category: +# weight_branches.append('PUWeight') +# elif category == 'PileUp_up': +# weight_branches.append('PUWeight_up') +# elif category == 'PileUp_down': +# weight_branches.append('PUWeight_down') +# else: +# weight_branches.append('1') + +# if category == 'BJet_down': +# weight_branches.append('BJetDownWeight') +# elif category == 'BJet_up': +# weight_branches.append('BJetUpWeight') +# elif category == 'LightJet_down': +# weight_branches.append('LightJetDownWeight') +# elif category == 'LightJet_up': +# weight_branches.append('LightJetUpWeight') +# else: +# weight_branches.append('BJetWeight') + +# if not 'QCD' in tree: +# if channel == 'muon': +# if category == 'Muon_down': +# weight_branches.append('MuonDown') +# elif category == 'Muon_up': +# weight_branches.append('MuonUp') +# else: +# weight_branches.append('MuonEfficiencyCorrection') +# elif channel == 'electron': +# if category == 'Electron_down': +# weight_branches.append('ElectronDown') +# elif category == 'Electron_up': +# weight_branches.append('ElectronUp') +# else: +# weight_branches.append('ElectronEfficiencyCorrection') + +# i = Input( +# input_file=input_file, +# hist=hist, +# tree=tree, +# branch=branch, +# selection=selection, +# bin_edges=edges, +# lumi_scale=lumi_scale, +# scale=scale, +# weight_branches=weight_branches, +# ) +# return i + +if __name__ == '__main__': + main() From 6e53db492c59e566294431ec0275017094677751 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 24 Nov 2016 08:12:35 +0000 Subject: [PATCH 22/90] Srtart reworking the normalisation and measurement script --- .../xsection/01_get_ttjet_normalisation2.py | 128 ++++++++++++++++++ dps/utils/measurement2.py | 120 ++++++++++++++++ 2 files changed, 248 insertions(+) create mode 100644 dps/analysis/xsection/01_get_ttjet_normalisation2.py create mode 100644 dps/utils/measurement2.py diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py new file mode 100644 index 00000000..8a9ff797 --- /dev/null +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -0,0 +1,128 @@ +from __future__ import division +from argparse import ArgumentParser +from dps.utils.logger import log +from dps.config.xsection import XSectionConfig +from dps.utils.file_utilities import write_data_to_JSON, get_files_in_path, make_folder_if_not_exists, read_data_from_JSON +from dps.utils.hist_utilities import clean_control_region, hist_to_value_error_tuplelist, fix_overflow +from dps.utils.Calculation import combine_complex_results +from dps.utils.measurement2 import Measurement +from dps.utils.ROOT_utils import set_root_defaults + +# define logger for this module +mylog = log["01b_get_ttjet_normalisation"] + +class TTJetNormalisation(object): + ''' + Determines the normalisation for top quark pair production. + Unless stated otherwise all templates and (initial) normalisations + are taken from simulation, except for QCD where the template is + extracted from data. + + Subtracts the known backgrounds from data to obtain TTJet template + and normalisation + ''' + @mylog.trace() + def __init__(self, measurement_config): + self.config = measurement_config + # self.variable = measurement.variable + # self.category = measurement.name + # self.channel = measurement.channel + # self.phase_space = phase_space + + self.have_normalisation = False + # normalisation for current config + self.normalisation = {} + + # @mylog.trace() + # def calculate_normalisation(self): + # ''' + # ''' + # # normalisation already calculated + # if self.have_normalisation: return + + + # histograms = self.measurement.histograms + + # for sample, hist in histograms.items(): + # hist = fix_overflow(hist) + # histograms[sample] = hist + # self.normalisation[sample] = self.initial_normalisation[sample] + + # self.background_subtraction(histograms) + + # # next, let's round all numbers (they are event numbers after all) + # for sample, values in self.normalisation.items(): + # new_values = [(round(v, 1), round(e, 1)) for v, e in values] + # self.normalisation[sample] = new_values + + # self.have_normalisation = True + + # @mylog.trace() + # def background_subtraction(self, histograms): + # ttjet_hist = clean_control_region( + # histograms, + # subtract=['QCD', 'V+Jets', 'SingleTop'] + # ) + # self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + + # @mylog.trace() + # def save(self): + # # If normalisation hasnt been calculated - then go calculate it! + # if not self.have_normalisation: + # self.calculate_normalisation() + + # file_template = '{type}_{channel}.txt' + # output_folder = '' + + # write_data_to_JSON( + # self.normalisation, + # output_folder + file_template.format(type='normalisation', channel=self.channel) + # ) + # return + +def main(): + ''' + 1 - Create config file reading in templates + 2 - Create 'jobs' for each config + 3 - Read in config + 4 - Differentiate between Syst and Central + 5 - Work in QCD from data + ''' + results = {} + + # construct categories from files: + input_template = 'TESTING/' + + # Create measuremewnt_filepath + measurement_filepath = input_template + # Loop over channels + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + + for f in sorted(measurement_files): + print('Processing file ' + f) + # Read in Measurement JSON + config = read_data_from_JSON(f) + # print config + # Create Measurement Class using JSON + measurement = Measurement(config) + + +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument("-v", "--variable", dest="variable", default='HT', + help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") + parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]") + parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", + help="Unfold to visible phase space") + args = parser.parse_args() + return args + +if __name__ == '__main__': + set_root_defaults() + args = parse_arguments() + measurement_config = XSectionConfig(args.CoM) + main() + + + diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py new file mode 100644 index 00000000..8372c5c0 --- /dev/null +++ b/dps/utils/measurement2.py @@ -0,0 +1,120 @@ +''' + Provides the classes Measurement and Systematic +''' +from __future__ import division +from . import log +import copy +from rootpy.io.file import Directory +from dps.utils.ROOT_utils import get_histogram_from_file +from dps.utils.file_utilities import make_folder_if_not_exists,\ + write_data_to_JSON, read_data_from_JSON +from dps.utils.input import Input +from dps.utils.hist_utilities import clean_control_region +# define logger for this module +meas_log = log["dps.utils.measurement"] + +class Measurement(): + ''' + The Measurement class combines files and histogram paths into + one container. It also allows to provide separate shapes for the + histograms while using the normalisation from the initial set. + ''' + @meas_log.trace() + def __init__(self, measurement): + self.measurement = measurement + self.histograms = {} + self.variable = None + self.com = None + self.channel = None + self.name = None + self.central = False + self.samples = {} + self.setFromConfig() + + def setFromConfig(self): + self.variable = self.measurement["variable"] + self.com = self.measurement["com"] + self.channel = self.measurement["channel"] + self.samples = self.measurement["samples"] + self.name = self.measurement["name"] + # Is this central or a systematic? + if "central" in self.name: + self.central = True + + for sample, histogram_info in self.samples.iteritems(): + print sample + print histogram_info + self.histograms[sample] = self.get_measurement_histograms(histogram_info) + + def get_measurement_histograms(d_hist_info): + ''' + Takes basic histogram info and reutrns histo. + ''' + print d_hist_info + return + + + # @staticmethod + # def fromDict(d): + # m = None + # if d['class'] == 'dps.utils.measurement.Measurement': + # m = Measurement(d['name']) + # if d['class'] == 'dps.utils.measurement.Systematic': + # m = Systematic(d['name'], d['type'], + # affected_samples=d['affected_samples'], scale=d['scale']) + # m.setVariable(d['variable']) + # m.setCentreOfMassEnergy(int(d['centre_of_mass_energy'])) + # m.setChannel(d['channel']) + # m.setMETType(d['met_type']) + # for sample, i in d['samples'].items(): + # if i.has_key('input'): + # inp = Input(**i['input']) + # m.addSample(sample, read=True, input=inp) + # else: + # m.addSample(sample, i['file'], i['hist'], read=True) + # for shape, obj in d['shapes'].items(): + # m.addShapeForSample(shape, Measurement.fromDict(obj), read=True) + # for norm, obj in d['norms'].items(): + # m.addNormForSample( + # norm, Measurement.fromDict(obj), read=True) + # return m + + + + + + +class Systematic(Measurement): + + ''' + The Systematic class is an extension of the Measurement class. + It allows to implement systematic specific functionality + (e.g. rate systematics). + ''' + + SHAPE = 10 + RATE = 20 + + @meas_log.trace() + def __init__(self, name, + stype=SHAPE, + affected_samples=[], + scale=1.): + ''' + Constructor + ''' + Measurement.__init__(self, name) + self.type = stype + + self.affected_samples = affected_samples + + self.scale = scale + + @meas_log.trace() + def toDict(self): + output = Measurement.toDict(self) + output['type'] = self.type + output['affected_samples'] = self.affected_samples + output['scale'] = self.scale + + return output From 22b794fb0035e24e62ed36ae304d8e5b3f775d92 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 24 Nov 2016 08:15:49 +0000 Subject: [PATCH 23/90] Test config --- TESTING/test_config.json | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 TESTING/test_config.json diff --git a/TESTING/test_config.json b/TESTING/test_config.json new file mode 100644 index 00000000..693067da --- /dev/null +++ b/TESTING/test_config.json @@ -0,0 +1,35 @@ +{ + "com": 13, + "channel": "electron", + "variable": "HT", + "name": "central", + "samples": { + "QCD": { + "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], + "branch": "HT", + "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined//QCD_Electron_tree.root", + "lumi_scale": 1.0, + "scale": 1.0, + "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", + "weight_branches": ["EventWeight", "PUWeight", "BJetWeight"] + }, + "TTBar": { + "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], + "branch": "HT", + "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/TTJets_PowhegPythia8_tree.root", + "lumi_scale": 1.0, + "scale": 1.0, + "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", + "weight_branches": ["EventWeight", "PUWeight", "BJetWeight"] + }, + "data": { + "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], + "branch": "HT", + "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/data_electron_tree.root", + "lumi_scale": 1.0, + "scale": 1.0, + "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", + "weight_branches": ["E1"] + } + } +} \ No newline at end of file From 92d8bb1668fada36e5e938e191fa915b640ceafb Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 24 Nov 2016 12:55:14 +0000 Subject: [PATCH 24/90] Very basic normalisation calc working --- .../xsection/01_get_ttjet_normalisation2.py | 4 + dps/utils/measurement2.py | 141 ++++++++++++++++-- 2 files changed, 134 insertions(+), 11 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index 8a9ff797..9b095186 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -105,6 +105,10 @@ def main(): # print config # Create Measurement Class using JSON measurement = Measurement(config) + print measurement.histograms + # measurement.qcd_from_data() + measurement.calculate_normalisation() + measurement.save() def parse_arguments(): diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 8372c5c0..7dbd596e 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -4,12 +4,9 @@ from __future__ import division from . import log import copy -from rootpy.io.file import Directory -from dps.utils.ROOT_utils import get_histogram_from_file -from dps.utils.file_utilities import make_folder_if_not_exists,\ - write_data_to_JSON, read_data_from_JSON +from dps.utils.file_utilities import make_folder_if_not_exists, read_data_from_JSON from dps.utils.input import Input -from dps.utils.hist_utilities import clean_control_region +from dps.utils.hist_utilities import hist_to_value_error_tuplelist # define logger for this module meas_log = log["dps.utils.measurement"] @@ -23,10 +20,12 @@ class Measurement(): def __init__(self, measurement): self.measurement = measurement self.histograms = {} + self.normalisation = {} self.variable = None self.com = None self.channel = None self.name = None + self.is_normalised = False self.central = False self.samples = {} self.setFromConfig() @@ -42,17 +41,137 @@ def setFromConfig(self): self.central = True for sample, histogram_info in self.samples.iteritems(): - print sample - print histogram_info - self.histograms[sample] = self.get_measurement_histograms(histogram_info) + self.histograms[sample] = self.return_histogram(histogram_info) + print hist_to_value_error_tuplelist(self.histograms[sample]) + return - def get_measurement_histograms(d_hist_info): + def return_histogram(self, d_hist_info, ignoreUnderflow=True): + ''' + Takes basic histogram info and returns histo. ''' - Takes basic histogram info and reutrns histo. + from rootpy.io.file import File + from rootpy.plotting import Hist + from dps.utils.hist_utilities import fix_overflow + + f = d_hist_info['input_file'] + tree = d_hist_info['tree'] + var = d_hist_info['branch'] + bins = d_hist_info['bin_edges'] + weights = d_hist_info['weight_branches'] + weights = "*".join(weights) + + root_file = File( f ) + root_tree = root_file.Get( tree ) + + root_histogram = Hist( bins ) + root_tree.Draw(var, weights, hist = root_histogram) + + # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it + if ignoreUnderflow: + root_histogram.SetBinContent(0, 0) + root_histogram.SetBinError(0,0) + + # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not | | but |--> ) + root_histogram = fix_overflow(root_histogram) + root_file.Close() + return root_histogram + + def calculate_normalisation(self): ''' - print d_hist_info + ''' + # normalisation already calculated + if self.is_normalised: return + + histograms = self.histograms + self.background_subtraction(histograms) + + # next, let's round all numbers (they are event numbers after all) + for sample, values in self.normalisation.items(): + new_values = [(round(v, 0), round(e, 0)) for v, e in values] + self.normalisation[sample] = new_values + print self.normalisation + + self.is_normalised = True + return + + def background_subtraction(self, histograms): + from dps.utils.hist_utilities import clean_control_region + + ttjet_hist = clean_control_region( + histograms, + # subtract=['QCD', 'V+Jets', 'SingleTop'] + subtract=['QCD'] + ) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) return + def save(self): + from dps.utils.file_utilities import write_data_to_JSON + # If normalisation hasnt been calculated - then go calculate it! + if not self.is_normalised: self.calculate_normalisation() + + file_template = '{type}_{channel}.txt' + output_folder = '' + + write_data_to_JSON( + self.normalisation, + output_folder + file_template.format(type='normalisation', channel=self.channel) + ) + return + + + + + + + +# def get_histograms_from_trees( +# trees = [], +# branch = 'var', +# weightBranch = 'EventWeight', +# selection = '1', +# files = {}, +# verbose = False, +# nBins = 40, +# xMin = 0, +# xMax = 100, +# ignoreUnderflow = True, +# ): +# histograms = {} +# nHistograms = 0 + +# # Setup selection and weight string for ttree draw +# weightAndSelection = '( %s ) * ( %s )' % ( weightBranch, selection ) + +# for sample, input_file in files.iteritems(): +# root_file = File( input_file ) + +# get_tree = root_file.Get +# histograms[sample] = {} + +# for tree in trees: + +# tempTree = tree +# if 'data' in sample and ( 'Up' in tempTree or 'Down' in tempTree ) : +# tempTree = tempTree.replace('_'+tempTree.split('_')[-1],'') + +# currentTree = get_tree( tempTree ) +# root_histogram = Hist( nBins, xMin, xMax) +# currentTree.Draw(branch, weightAndSelection, hist = root_histogram) +# if not is_valid_histogram( root_histogram, tree, input_file): +# return + +# # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it +# if ignoreUnderflow: +# root_histogram.SetBinContent(0, 0) +# root_histogram.SetBinError(0,0) + +# gcd() +# nHistograms += 1 +# histograms[sample][tree] = root_histogram.Clone() + +# root_file.Close() +# return histograms # @staticmethod # def fromDict(d): From 4d22335878f380351a46cd919e024cb6290517a5 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 25 Nov 2016 10:23:21 +0000 Subject: [PATCH 25/90] Add in datadriven qcd --- .../xsection/01_get_ttjet_normalisation2.py | 85 ++++---- dps/utils/measurement2.py | 205 ++++++------------ 2 files changed, 112 insertions(+), 178 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index 9b095186..f4e27fd2 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -13,13 +13,13 @@ class TTJetNormalisation(object): ''' - Determines the normalisation for top quark pair production. - Unless stated otherwise all templates and (initial) normalisations - are taken from simulation, except for QCD where the template is - extracted from data. + Determines the normalisation for top quark pair production. + Unless stated otherwise all templates and (initial) normalisations + are taken from simulation, except for QCD where the template is + extracted from data. - Subtracts the known backgrounds from data to obtain TTJet template - and normalisation + Subtracts the known backgrounds from data to obtain TTJet template + and normalisation ''' @mylog.trace() def __init__(self, measurement_config): @@ -67,7 +67,7 @@ def __init__(self, measurement_config): # @mylog.trace() # def save(self): - # # If normalisation hasnt been calculated - then go calculate it! + # # If normalisation hasnt been calculated - then go calculate it! # if not self.have_normalisation: # self.calculate_normalisation() @@ -78,49 +78,50 @@ def __init__(self, measurement_config): # self.normalisation, # output_folder + file_template.format(type='normalisation', channel=self.channel) # ) - # return + # return def main(): - ''' - 1 - Create config file reading in templates - 2 - Create 'jobs' for each config - 3 - Read in config - 4 - Differentiate between Syst and Central - 5 - Work in QCD from data - ''' - results = {} - - # construct categories from files: - input_template = 'TESTING/' - - # Create measuremewnt_filepath - measurement_filepath = input_template - # Loop over channels - measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') - - for f in sorted(measurement_files): - print('Processing file ' + f) - # Read in Measurement JSON - config = read_data_from_JSON(f) - # print config - # Create Measurement Class using JSON - measurement = Measurement(config) - print measurement.histograms + ''' + 1 - Create config file reading in templates + 2 - Create 'jobs' for each config + 3 - Read in config + 4 - Differentiate between Syst and Central + 5 - Work in QCD from data + ''' + results = {} + + # construct categories from files: + input_template = 'TESTING/' + + # Create measuremewnt_filepath + measurement_filepath = input_template + # Loop over channels + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + print measurement_files + + for f in sorted(measurement_files): + print('Processing file ' + f) + # Read in Measurement JSON + config = read_data_from_JSON(f) + # print config + # Create Measurement Class using JSON + measurement = Measurement(config) # measurement.qcd_from_data() measurement.calculate_normalisation() measurement.save() + break def parse_arguments(): - parser = ArgumentParser(__doc__) - parser.add_argument("-v", "--variable", dest="variable", default='HT', - help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") - parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, - help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", - help="Unfold to visible phase space") - args = parser.parse_args() - return args + parser = ArgumentParser(__doc__) + parser.add_argument("-v", "--variable", dest="variable", default='HT', + help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") + parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]") + parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", + help="Unfold to visible phase space") + args = parser.parse_args() + return args if __name__ == '__main__': set_root_defaults() diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 7dbd596e..ddd6335a 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -20,6 +20,7 @@ class Measurement(): def __init__(self, measurement): self.measurement = measurement self.histograms = {} + self.cr_histograms = {} self.normalisation = {} self.variable = None self.com = None @@ -28,24 +29,66 @@ def __init__(self, measurement): self.is_normalised = False self.central = False self.samples = {} - self.setFromConfig() + self.__setFromConfig() - def setFromConfig(self): + def __setFromConfig(self): self.variable = self.measurement["variable"] self.com = self.measurement["com"] self.channel = self.measurement["channel"] self.samples = self.measurement["samples"] self.name = self.measurement["name"] + data_driven_qcd = self.measurement["data_driven_qcd"] # Is this central or a systematic? if "central" in self.name: self.central = True for sample, histogram_info in self.samples.iteritems(): - self.histograms[sample] = self.return_histogram(histogram_info) - print hist_to_value_error_tuplelist(self.histograms[sample]) + self.histograms[sample] = self.__return_histogram(histogram_info) + if data_driven_qcd: + self.cr_histograms[sample] = self.__return_histogram(histogram_info, useQCDControl=True) + # print hist_to_value_error_tuplelist(self.histograms[sample]) + # print hist_to_value_error_tuplelist(self.cr_histograms[sample]) + + if data_driven_qcd: + self.__qcd_from_data() + + return + + def __qcd_from_data(self): + ''' + Replace Signal region mc qcd with data driven qcd + + N MC QCD in SR N DD QCD in CR + QCD_SHAPE * -------------- * -------------- + N DD QCD in CR N MC QCD in CR + + Shape normalise to scale from + SR mc qcd mc qcd to dd qcd + ''' + from dps.utils.hist_utilities import clean_control_region + + # Get the shape of the data driven qcd in the control region + qcd_shape = clean_control_region( + self.cr_histograms, + subtract=['TTBar', 'V+Jets', 'SingleTop'] + ) + + # Now to normalise the qcd shape to the MC in the Signal Region + # n_dd_cr= Number of datadriven qcd from Control Region + n_mc_sr = self.histograms['QCD'].Integral() + n_dd_cr = qcd_shape.Integral() + qcd_shape.Scale( n_mc_sr/n_dd_cr ) + + # Now to scale from mc qcd to datadriven qcd + n_mc_cr = self.cr_histograms['QCD'].Integral() + qcd_shape.Scale( n_dd_cr/n_mc_cr ) + + # Replace QCD histogram with datadriven one + self.histograms['QCD'] = qcd_shape return - def return_histogram(self, d_hist_info, ignoreUnderflow=True): + + def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False): ''' Takes basic histogram info and returns histo. ''' @@ -55,16 +98,25 @@ def return_histogram(self, d_hist_info, ignoreUnderflow=True): f = d_hist_info['input_file'] tree = d_hist_info['tree'] + qcd_tree = d_hist_info["qcd_control_region"] var = d_hist_info['branch'] bins = d_hist_info['bin_edges'] + lumi_scale = d_hist_info['lumi_scale'] + scale = d_hist_info['scale'] weights = d_hist_info['weight_branches'] weights = "*".join(weights) + if useQCDControl: + tree = qcd_tree + + scale *= lumi_scale + root_file = File( f ) root_tree = root_file.Get( tree ) root_histogram = Hist( bins ) root_tree.Draw(var, weights, hist = root_histogram) + root_histogram.Scale(scale) # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it if ignoreUnderflow: @@ -73,9 +125,20 @@ def return_histogram(self, d_hist_info, ignoreUnderflow=True): # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not | | but |--> ) root_histogram = fix_overflow(root_histogram) + root_file.Close() return root_histogram + def __background_subtraction(self, histograms): + from dps.utils.hist_utilities import clean_control_region + + ttjet_hist = clean_control_region( + histograms, + subtract=['QCD', 'V+Jets', 'SingleTop'] + ) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + return + def calculate_normalisation(self): ''' ''' @@ -83,7 +146,7 @@ def calculate_normalisation(self): if self.is_normalised: return histograms = self.histograms - self.background_subtraction(histograms) + self.__background_subtraction(histograms) # next, let's round all numbers (they are event numbers after all) for sample, values in self.normalisation.items(): @@ -94,17 +157,6 @@ def calculate_normalisation(self): self.is_normalised = True return - def background_subtraction(self, histograms): - from dps.utils.hist_utilities import clean_control_region - - ttjet_hist = clean_control_region( - histograms, - # subtract=['QCD', 'V+Jets', 'SingleTop'] - subtract=['QCD'] - ) - self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - return - def save(self): from dps.utils.file_utilities import write_data_to_JSON # If normalisation hasnt been calculated - then go calculate it! @@ -118,122 +170,3 @@ def save(self): output_folder + file_template.format(type='normalisation', channel=self.channel) ) return - - - - - - - -# def get_histograms_from_trees( -# trees = [], -# branch = 'var', -# weightBranch = 'EventWeight', -# selection = '1', -# files = {}, -# verbose = False, -# nBins = 40, -# xMin = 0, -# xMax = 100, -# ignoreUnderflow = True, -# ): -# histograms = {} -# nHistograms = 0 - -# # Setup selection and weight string for ttree draw -# weightAndSelection = '( %s ) * ( %s )' % ( weightBranch, selection ) - -# for sample, input_file in files.iteritems(): -# root_file = File( input_file ) - -# get_tree = root_file.Get -# histograms[sample] = {} - -# for tree in trees: - -# tempTree = tree -# if 'data' in sample and ( 'Up' in tempTree or 'Down' in tempTree ) : -# tempTree = tempTree.replace('_'+tempTree.split('_')[-1],'') - -# currentTree = get_tree( tempTree ) -# root_histogram = Hist( nBins, xMin, xMax) -# currentTree.Draw(branch, weightAndSelection, hist = root_histogram) -# if not is_valid_histogram( root_histogram, tree, input_file): -# return - -# # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it -# if ignoreUnderflow: -# root_histogram.SetBinContent(0, 0) -# root_histogram.SetBinError(0,0) - -# gcd() -# nHistograms += 1 -# histograms[sample][tree] = root_histogram.Clone() - -# root_file.Close() -# return histograms - - # @staticmethod - # def fromDict(d): - # m = None - # if d['class'] == 'dps.utils.measurement.Measurement': - # m = Measurement(d['name']) - # if d['class'] == 'dps.utils.measurement.Systematic': - # m = Systematic(d['name'], d['type'], - # affected_samples=d['affected_samples'], scale=d['scale']) - # m.setVariable(d['variable']) - # m.setCentreOfMassEnergy(int(d['centre_of_mass_energy'])) - # m.setChannel(d['channel']) - # m.setMETType(d['met_type']) - # for sample, i in d['samples'].items(): - # if i.has_key('input'): - # inp = Input(**i['input']) - # m.addSample(sample, read=True, input=inp) - # else: - # m.addSample(sample, i['file'], i['hist'], read=True) - # for shape, obj in d['shapes'].items(): - # m.addShapeForSample(shape, Measurement.fromDict(obj), read=True) - # for norm, obj in d['norms'].items(): - # m.addNormForSample( - # norm, Measurement.fromDict(obj), read=True) - # return m - - - - - - -class Systematic(Measurement): - - ''' - The Systematic class is an extension of the Measurement class. - It allows to implement systematic specific functionality - (e.g. rate systematics). - ''' - - SHAPE = 10 - RATE = 20 - - @meas_log.trace() - def __init__(self, name, - stype=SHAPE, - affected_samples=[], - scale=1.): - ''' - Constructor - ''' - Measurement.__init__(self, name) - self.type = stype - - self.affected_samples = affected_samples - - self.scale = scale - - @meas_log.trace() - def toDict(self): - output = Measurement.toDict(self) - output['type'] = self.type - output['affected_samples'] = self.affected_samples - output['scale'] = self.scale - - return output From bfc315ea236f15969113a9ef8ce6c7b9c431bc7d Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 12:42:56 +0000 Subject: [PATCH 26/90] Working config creater --- .../xsection/create_measurement2p0.py | 306 +++++++++++++++--- 1 file changed, 268 insertions(+), 38 deletions(-) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index 9be9f797..457c378c 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -12,19 +12,16 @@ from argparse import ArgumentParser from dps.config.xsection import XSectionConfig from dps.config import variable_binning -from dps.utils.input import Input from dps.utils.logger import log from copy import deepcopy -from dps.utils.measurement import Measurement, Systematic +from dps.utils.file_utilities import write_data_to_JSON +import pprint +pp = pprint.PrettyPrinter(indent=4) # define logger for this module create_measurement_log = log["01b_get_ttjet_normalisation"] cml = create_measurement_log # alias - - - - @cml.trace() def main(): parser = ArgumentParser(__doc__) @@ -43,31 +40,264 @@ def main(): action="store_true", help="Print the debug information" ) + parser.add_argument( + '-q', + '--qcd_from_data', + dest="data_driven_qcd", + default=True, + help="Print the debug information" + ) args = parser.parse_args() - centre_of_mass_energy = args.CoM + + options = {} + options['com'] = args.CoM + options['data_driven_qcd'] = args.data_driven_qcd if args.debug: log.setLevel(log.DEBUG) - measurement_config = XSectionConfig(centre_of_mass_energy) - categories = measurement_config.normalisation_systematics + xsec_config = XSectionConfig(options['com']) + categories = xsec_config.normalisation_systematics print categories -# for variable in measurement_config.variables: -# for category in categories: -# for channel in ['electron', 'muon']: -# if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): -# continue -# elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): -# continue -# # create_measurement( -# # centre_of_mass_energy, category, variable, channel, -# # phase_space='FullPS', norm_method='background_subtraction') -# # and the visible phase space -# create_measurement( -# centre_of_mass_energy, category, variable, channel, -# phase_space='VisiblePS', norm_method='background_subtraction') + # Create specific configs required + for ps in ['VisiblePS', 'FullPS']: + options['ps']=ps + for channel in ['electron', 'muon']: + options['channel']=channel + for variable in xsec_config.variables: + options['variable']=variable + for category in categories: + if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): + continue + elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): + continue + elif variable in xsec_config.variables_no_met and category in xsec_config.met_specific_systematics: + continue + options['category']=category + + m = create_measurement( + options, + norm_method='background_subtraction', + ) + + write_measurement( + options, + m, + norm_method='background_subtraction', + ) + + +def create_measurement(options, norm_method): + ''' + Create the config file + ''' + # Create dictionary to write to config file + measurement = {} + xsec_config = XSectionConfig(options['com']) + + # Generate basic normalisation config info + measurement["com"] = options['com'] + measurement["channel"] = options['channel'] + measurement["variable"] = options['variable'] + measurement["name"] = options['category'] + measurement["data_driven_qcd"] = options['data_driven_qcd'] + + # Add specific samples to config + measurement["samples"] = get_samples(options, xsec_config) + return measurement + + +def get_samples(options, xsec_config): + ''' + Return the dictionary of all sample information + ''' + # create samples dictionary + samples = {} + for s in xsec_config.samples: + samples[s] = get_sample_info(options, xsec_config, s) + + return samples + +def get_sample_info(options, xsec_config, sample): + ''' + Generate each measurements information + ''' + # create sample info + sample_info = {} + + # Branch (variable) + sample_info["branch"] = options['variable'] + + # Bin Edges + if options['ps'] == 'VisiblePS': + sample_info["bin_edges"] = variable_binning.reco_bin_edges_vis[options['variable']] + elif options['ps'] == 'FullPS': + sample_info["bin_edges"] = variable_binning.reco_bin_edges_full[options['variable']] + else: + sample_info["bin_edges"] = None + + # Lumi Scale (Rate) + sample_info["lumi_scale"]=1.0 + lumi_scale = xsec_config.rate_changing_systematics['luminosity'] + if options['category'] == 'luminosity+': + sample_info["lumi_scale"]= 1.0 + 1.0*lumi_scale + elif options['category'] == 'luminosity-': + sample_info["lumi_scale"]= 1.0 - 1.0*lumi_scale + + # Generator Scale (Rate) + sample_info["scale"]=1.0 + generator_scale = xsec_config.rate_changing_systematics['V+Jets_cross_section'] + if options['category'] == 'V+Jets_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'V+Jets_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + generator_scale = xsec_config.rate_changing_systematics['SingleTop_cross_section'] + if options['category'] == 'SingleTop_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'SingleTop_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] + if options['category'] == 'QCD_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'QCD_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + + # Weight branches (Shape) + weight_branches = [] + if sample == 'data': + weight_branches.append('1') + else: + weight_branches.append('EventWeight') + + # PU Weights + if options['category'] == 'PileUp_up': + weight_branches.append('PUWeight_up') + elif options['category'] == 'PileUp_down': + weight_branches.append('PUWeight_down') + else: + weight_branches.append('PUWeight') + + # BJet Weights + if options['category'] == 'BJet_up': + weight_branches.append('BJetUpWeight') + elif options['category'] == 'BJet_down': + weight_branches.append('BJetDownWeight') + elif options['category'] == 'LightJet_up': + weight_branches.append('LightJetUpWeight') + elif options['category'] == 'LightJet_down': + weight_branches.append('LightJetDownWeight') + else: + weight_branches.append('BJetWeight') + + # Lepton Weights + # Lepton weights for nonisolated leptons are removed in measurement.py + # The lepton sf are not derived for non isolated leptons + if options['channel'] == 'muon': + if options['category'] == 'Muon_down': + weight_branches.append('MuonDown') + elif options['category'] == 'Muon_up': + weight_branches.append('MuonUp') + else: + weight_branches.append('MuonEfficiencyCorrection') + elif options['channel'] == 'electron': + if options['category'] == 'Electron_down': + weight_branches.append('ElectronDown') + elif options['category'] == 'Electron_up': + weight_branches.append('ElectronUp') + else: + weight_branches.append('ElectronEfficiencyCorrection') + sample_info["weight_branches"] = weight_branches + + # Input File + # Scale and Mass???? + sample_info["input_file"] = get_file(xsec_config, sample, options) + if sample != 'data': + if options['category'] == 'JES_up': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJES_tree') + elif options['category'] == 'JES_down': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJES_tree') + elif options['category'] == 'JER_up': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJER_tree') + elif options['category'] == 'JER_down': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJER_tree') + + # Input Trees + # QCD Shape and QCD Control Regions + sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) + + +# 'ElectronEnUp', 'ElectronEnDown', 'MuonEnUp', 'MuonEnDown', 'TauEnUp', 'TauEnDown', 'UnclusteredEnUp', 'UnclusteredEnDown', +# if category in config.met_systematics_suffixes and not variable in config.variables_no_met: +# branch = template.split('/')[-1] +# branch += '_METUncertainties[%s]' % config.met_systematics[ +# category] +# if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: +# tree += config.categories_and_prefixes[category] + + return sample_info + + +@cml.trace() +def get_file(config, sample, options): + ''' + Return a specific sample file + ''' + if options['channel'] == 'electron': + qcd = config.electron_QCD_MC_trees[options['category']] + data = config.data_file_electron + else: + qcd = config.muon_QCD_MC_trees[options['category']] + data = config.data_file_muon + + files = { + 'TTBar': config.ttbar_trees[options['category']], + 'V+Jets': config.VJets_trees[options['category']], + 'SingleTop': config.SingleTop_trees[options['category']], + 'QCD': qcd, + 'data': data + } + return files[sample] + +@cml.trace() +def get_tree(config, options): + ''' + Return a specific sample tree + ''' + tree = config.tree_path[options['channel']] + if options["data_driven_qcd"]: + # QCD control region + qcd_tree = tree.replace( + "Ref selection", config.qcd_control_region[options['channel']]) + # QCD shape systematic + if "QCD_shape" in options['category']: + qcd_tree = tree.replace( + "Ref selection", config.qcd_shape_syst_region[options['channel']]) + else: + qcd_tree = None + return tree, qcd_tree + + + +def write_measurement(options, measurement, norm_method): + ''' + Write the config + ''' + base_path = 'TESTING/config/measurements/{norm_method}/{energy}TeV/{variable}/{phase_space}/' + path = base_path + '{category}.json' + pp.pprint(measurement) + + path = path.format( + norm_method = norm_method, + energy = options['com'], + variable = options['variable'], + phase_space = options['ps'], + category = options['category'], + ) + write_data_to_JSON(measurement, path, indent = True) + return + # @cml.trace() # def create_measurement(com, category, variable, channel, phase_space, norm_method): # if com == 13: @@ -462,21 +692,21 @@ def main(): # else: # weight_branches.append('BJetWeight') -# if not 'QCD' in tree: -# if channel == 'muon': -# if category == 'Muon_down': -# weight_branches.append('MuonDown') -# elif category == 'Muon_up': -# weight_branches.append('MuonUp') -# else: -# weight_branches.append('MuonEfficiencyCorrection') -# elif channel == 'electron': -# if category == 'Electron_down': -# weight_branches.append('ElectronDown') -# elif category == 'Electron_up': -# weight_branches.append('ElectronUp') -# else: -# weight_branches.append('ElectronEfficiencyCorrection') + # if not 'QCD' in tree: + # if channel == 'muon': + # if category == 'Muon_down': + # weight_branches.append('MuonDown') + # elif category == 'Muon_up': + # weight_branches.append('MuonUp') + # else: + # weight_branches.append('MuonEfficiencyCorrection') + # elif channel == 'electron': + # if category == 'Electron_down': + # weight_branches.append('ElectronDown') + # elif category == 'Electron_up': + # weight_branches.append('ElectronUp') + # else: + # weight_branches.append('ElectronEfficiencyCorrection') # i = Input( # input_file=input_file, From 234d90f2e64e87b60f096ff8738a4b3f6086608f Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 12:44:26 +0000 Subject: [PATCH 27/90] Remove Lepton weights when calculating datadriven qcd --- dps/utils/measurement2.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index ddd6335a..549adba9 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -104,11 +104,14 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa lumi_scale = d_hist_info['lumi_scale'] scale = d_hist_info['scale'] weights = d_hist_info['weight_branches'] - weights = "*".join(weights) if useQCDControl: tree = qcd_tree + # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) + weights = [x if not 'Electron' in x and not 'Muon' in x for x in weights]: + weights = "*".join(weights) + print weights scale *= lumi_scale root_file = File( f ) From f9407b100e2bde95697cfa265e37010e4c669ad8 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 12:45:53 +0000 Subject: [PATCH 28/90] Rewriting xsection config - in progress --- dps/config/xsection.py | 251 +++++++++++++++++++++++++---------------- 1 file changed, 154 insertions(+), 97 deletions(-) diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 8168f4e7..c2cbc21c 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -2,7 +2,7 @@ import dps.utils.measurement class XSectionConfig(): - current_analysis_path = '/hdfs/TopQuarkGroup/ec6821/1.0.0/atOutput/combined/' + current_analysis_path = '/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/' known_centre_of_mass_energies = [13] # has to be separate as many variables depend on it luminosities = {13:36459} @@ -50,7 +50,8 @@ class XSectionConfig(): ] samples = [ - 'TTJet', + 'data', + 'TTBar', 'V+Jets', 'SingleTop', 'QCD' @@ -88,7 +89,9 @@ def __fill_defaults__( self ): # self.path_to_files = self.current_analysis_path + str( self.centre_of_mass_energy ) + 'TeV/2016/' self.path_to_files = self.current_analysis_path - self.path_to_unfolding_histograms = '/hdfs/TopQuarkGroup/run2/unfolding/13TeV/Moriond2017/' + + # self.path_to_unfolding_histograms = '/hdfs/TopQuarkGroup/run2/unfolding/13TeV/2016/' + self.path_to_unfolding_histograms = 'unfolding/13TeV/' path_to_files = self.path_to_files path_to_unfolding_histograms = self.path_to_unfolding_histograms @@ -148,13 +151,17 @@ def __fill_defaults__( self ): self.middle = '_' + str( self.luminosity ) + 'pb_PFElectron_PFMuon_PF2PATJets_PFMET' middle = self.middle - self.data_file_muon = path_to_files + 'data_muon_tree.root' - self.data_file_electron = path_to_files + 'data_electron_tree.root' + # self.data_file_muon = path_to_files + 'data_muon_tree.root' + # self.data_file_electron = path_to_files + 'data_electron_tree.root' + self.data_file_muon = '/hdfs/TopQuarkGroup/db0268/data_muon_tree.root' + self.data_file_electron = '/hdfs/TopQuarkGroup/db0268/data_electron_tree.root' + + - self.data_file_muon_trees = path_to_files + 'data_muon_tree.root' - # self.data_file_muon_trees = '/storage/ec6821/AnalysisTools/CMSSW_8_0_17/src/tree_SingleMuon_15930pb_PFElectron_PFMuon_PF2PATJets_MET_201.root' - self.data_file_electron_trees = path_to_files + 'data_electron_tree.root' +# +# +# self.muon_QCD_file = path_to_files + 'QCD_data_mu.root' self.SingleTop_file = path_to_files + 'SingleTop.root' self.electron_QCD_MC_file = path_to_files + 'QCD_Electron.root' @@ -164,7 +171,9 @@ def __fill_defaults__( self ): self.muon_QCD_tree_file = path_to_files + 'QCD_Muon_tree.root' self.electron_QCD_MC_tree_file = path_to_files + 'QCD_Electron_tree.root' self.muon_QCD_MC_tree_file = path_to_files + 'QCD_Muon_tree.root' - +# +# +# self.higgs_file = path_to_files + 'central/TTH_Inclusive_M-125' + middle + '.root' self.categories_and_prefixes = { @@ -247,6 +256,53 @@ def __fill_defaults__( self ): 'TTJets_alphaSdown' : '', } + + self.normalisation_systematics = [ + 'central', + + 'JES_up', + 'JES_down', + 'JER_up', + 'JER_down', + + 'BJet_up', + 'BJet_down', + 'LightJet_up', + 'LightJet_down', + + 'PileUp_up', + 'PileUp_down', + + 'Electron_up', + 'Electron_down', + 'Muon_up', + 'Muon_down', + + 'ElectronEnUp', + 'ElectronEnDown', + 'MuonEnUp', + 'MuonEnDown', + 'TauEnUp', + 'TauEnDown', + 'UnclusteredEnUp', + 'UnclusteredEnDown', + + 'luminosity+', + 'luminosity-', + + 'V+Jets_cross_section-', + 'V+Jets_cross_section+', + 'QCD_cross_section+', + 'QCD_cross_section-', + 'SingleTop_cross_section+', + 'SingleTop_cross_section-', + + 'QCD_shape', + ] + + + + self.list_of_systematics = { # Theoretical Uncertainties (Rate Changing) 'V+Jets_cross_section' : ['V+Jets_cross_section+', 'V+Jets_cross_section-'], @@ -326,42 +382,42 @@ def __fill_defaults__( self ): 'madgraph' ] - self.rate_changing_systematics_values = {} - for systematic in self.rate_changing_systematics.keys(): - affected_samples = XSectionConfig.samples # all samples - if 'SingleTop' in systematic: - affected_samples = ['SingleTop'] - if 'TTJet' in systematic: - affected_samples = ['TTJet'] - if 'VJets' in systematic: - affected_samples = ['V+Jets'] - if 'QCD' in systematic: - affected_samples = ['QCD'] - - sp = dps.utils.measurement.Systematic( - systematic + '+', - # systematic + '_up', - stype = dps.utils.measurement.Systematic.RATE, - affected_samples = affected_samples, - scale = 1 + self.rate_changing_systematics[systematic], - ) - scale = 1 - self.rate_changing_systematics[systematic] - if scale <= 0: scale = 10e-5 - - sm = dps.utils.measurement.Systematic( - systematic + '-', - # systematic + '_down', - stype = dps.utils.measurement.Systematic.RATE, - affected_samples = affected_samples, - scale = scale, - ) - self.rate_changing_systematics_values[sp.name] = sp - self.rate_changing_systematics_values[sm.name] = sm - - self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() + # self.rate_changing_systematics_values = {} + # for systematic in self.rate_changing_systematics.keys(): + # affected_samples = XSectionConfig.samples # all samples + # if 'SingleTop' in systematic: + # affected_samples = ['SingleTop'] + # if 'TTJet' in systematic: + # affected_samples = ['TTJet'] + # if 'VJets' in systematic: + # affected_samples = ['V+Jets'] + # if 'QCD' in systematic: + # affected_samples = ['QCD'] + + # sp = dps.utils.measurement.Systematic( + # systematic + '+', + # # systematic + '_up', + # stype = dps.utils.measurement.Systematic.RATE, + # affected_samples = affected_samples, + # scale = 1 + self.rate_changing_systematics[systematic], + # ) + # scale = 1 - self.rate_changing_systematics[systematic] + # if scale <= 0: scale = 10e-5 + + # sm = dps.utils.measurement.Systematic( + # systematic + '-', + # # systematic + '_down', + # stype = dps.utils.measurement.Systematic.RATE, + # affected_samples = affected_samples, + # scale = scale, + # ) + # self.rate_changing_systematics_values[sp.name] = sp + # self.rate_changing_systematics_values[sm.name] = sm + + # self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() self.topMass_systematics = [ 'TTJets_massup', 'TTJets_massdown'] - # self.topMass_systematics = [ 'TTJets_mass_up', 'TTJets_mass_down'] + self.topMasses = [ 169.5, 172.5, @@ -381,55 +437,50 @@ def __fill_defaults__( self ): categories_and_prefixes = self.categories_and_prefixes generator_mcsamples = self.generator_mcsamples - # File Templates - self.general_category_templates = {category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} - self.ttbar_category_templates = {category: path_to_files + 'TTJets_PowhegPythia8.root' for category, prefix in categories_and_prefixes.iteritems()} - self.SingleTop_category_templates = {category: path_to_files + '/SingleTop.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.VJets_category_templates = {category: path_to_files + '/VJets.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.higgs_category_templates = {category: path_to_files + '/TTH_Inclusive_M-125' + middle + prefix + '.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.electron_QCD_MC_category_templates = {category: path_to_files + '/QCD_Electron.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.muon_QCD_MC_category_templates = {category: path_to_files + '/QCD_Muon.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - - self.general_category_templates_trees = {category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} - self.ttbar_category_templates_trees = {category: path_to_files + '/TTJets_PowhegPythia8_tree.root' for category, prefix in categories_and_prefixes.iteritems()} - self.SingleTop_category_templates_trees = {category: path_to_files + '/SingleTop_tree.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.VJets_category_templates_trees = {category: path_to_files + '/VJets_tree.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.electron_QCD_MC_category_templates_trees = {category: path_to_files + '/QCD_Electron_tree.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.muon_QCD_MC_category_templates_trees = {category: path_to_files + '/QCD_Muon_tree.root' for ( category, prefix ) in categories_and_prefixes.iteritems()} - self.ttbar_generator_category_templates_trees = {category: path_to_files + '/TTJets_' + category + '_tree.root' for category in generator_mcsamples} - - self.ttbar_fsrup_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_fsrup_tree.root' - self.ttbar_fsrdown_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_fsrdown_tree.root' - self.ttbar_isrup_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_isrup_tree.root' - self.ttbar_isrdown_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_isrdown_tree.root' - self.ttbar_ueup_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_up_tree.root' - self.ttbar_uedown_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_down_tree.root' - - self.ttbar_amc_category_templates_trees = path_to_files + '/TTJets_amc_tree.root' - self.ttbar_madgraph_category_templates_trees = path_to_files + '/TTJets_madgraph_tree.root' - self.ttbar_powhegpythia8_category_templates_trees = path_to_files + '/TTJets_powhegPythia8_tree.root' - self.ttbar_powhegherwigpp_category_templates_trees = path_to_files + '/TTJets_powhegHerwigpp_tree.root' - self.ttbar_amcatnloherwigpp_category_templates_trees = path_to_files + '/TTJets_amcatnloHerwigpp_tree.root' - self.ttbar_mtop1695_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1695_tree.root' - self.ttbar_mtop1755_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1755_tree.root' - self.ttbar_jesup_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_plusJES_tree.root' - self.ttbar_jesdown_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_minusJES_tree.root' - self.ttbar_jerup_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_plusJER_tree.root' - self.ttbar_jerdown_category_templates_trees = path_to_files + '/TTJets_PowhegPythia8_minusJER_tree.root' - - self.data_muon_category_templates = { - 'central': self.data_file_muon, - 'JES_up': self.data_file_muon, - 'JES_down': self.data_file_muon - } - self.data_muon_category_templates_trees = self.data_file_muon_trees - - self.data_electron_category_templates = { - 'central': self.data_file_electron, - 'JES_up': self.data_file_electron, - 'JES_down': self.data_file_electron, - } - self.data_electron_category_templates_trees = self.data_file_electron_trees + self.general_trees = {category: path_to_files + category + '/%s' + middle + prefix + '.root' + for category, prefix in categories_and_prefixes.iteritems()} + self.ttbar_trees = {category: path_to_files + '/TTJets_PowhegPythia8_tree.root' + for category in self.normalisation_systematics} + self.SingleTop_trees = {category: path_to_files + '/SingleTop_tree.root' + for category in self.normalisation_systematics} + self.VJets_trees = {category: path_to_files + '/VJets_tree.root' + for category in self.normalisation_systematics} + self.electron_QCD_MC_trees = {category: path_to_files + '/QCD_Electron_tree.root' + for category in self.normalisation_systematics} + self.muon_QCD_MC_trees = {category: path_to_files + '/QCD_Muon_tree.root' + for category in self.normalisation_systematics} + self.ttbar_generator_trees = {category: path_to_files + '/TTJets_' + category + '_tree.root' + for category in generator_mcsamples} + + + self.ttbar_amc_trees = path_to_files + '/TTJets_amc_tree.root' + self.ttbar_madgraph_trees = path_to_files + '/TTJets_madgraph_tree.root' + self.ttbar_powhegpythia8_trees = path_to_files + '/TTJets_powhegPythia8_tree.root' + self.ttbar_powhegherwigpp_trees = path_to_files + '/TTJets_powhegHerwigpp_tree.root' + self.ttbar_amcatnloherwigpp_trees = path_to_files + '/TTJets_amcatnloHerwigpp_tree.root' + self.ttbar_scaleup_trees = path_to_files + '/TTJets_PowhegPythia8_scaleup_tree.root' + self.ttbar_scaledown_trees = path_to_files + '/TTJets_PowhegPythia8_scaledown_tree.root' + self.ttbar_mtop1695_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1695_tree.root' + self.ttbar_mtop1755_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1755_tree.root' + self.ttbar_jesup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJES_tree.root' + self.ttbar_jesdown_trees = path_to_files + '/TTJets_PowhegPythia8_minusJES_tree.root' + self.ttbar_jerup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJER_tree.root' + self.ttbar_jerdown_trees = path_to_files + '/TTJets_PowhegPythia8_minusJER_tree.root' + + + # self.data_muon_category_templates = { + # 'central': self.data_file_muon, + # 'JES_up': self.data_file_muon, + # 'JES_down': self.data_file_muon + # } + # self.data_muon_category_templates_trees = self.data_file_muon + + # self.data_electron_category_templates = { + # 'central': self.data_file_electron, + # 'JES_up': self.data_file_electron, + # 'JES_down': self.data_file_electron, + # } + # self.data_electron_category_templates_trees = self.data_file_electron # Unfolding MC Different Generator Samples self.unfolding_powheg_pythia8_raw = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV.root' % self.centre_of_mass_energy @@ -497,16 +548,22 @@ def __fill_defaults__( self ): self.pdfWeightMax = 100 self.unfolding_pdfweights = {index : path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_pdfWeight_%d.root' % (self.centre_of_mass_energy, index) for index in range( self.pdfWeightMin, self.pdfWeightMax )} - self.tree_path_templates = { + self.tree_path = { 'electron' : 'TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables', - 'muon' : 'TTbar_plus_X_analysis/MuPlusJets/Ref selection/FitVariables' + 'muon' : 'TTbar_plus_X_analysis/MuPlusJets/Ref selection/FitVariables', + } + + self.qcd_control_region = { + 'electron' : 'QCDConversions', + 'muon' : 'QCD non iso mu+jets 1p5to3', } - self.tree_path_control_templates = { - 'electron' : 'TTbar_plus_X_analysis/EPlusJets/QCD non iso e+jets/FitVariables', - 'muon' : 'TTbar_plus_X_analysis/MuPlusJets/QCD non iso mu+jets 3toInf/FitVariables' + self.qcd_shape_syst_region = { + 'electron' : 'QCD non iso e+jets', + 'muon' : 'QCD non iso mu+jets 3toInf', } + self.variable_path_templates = { 'MET' : 'TTbar_plus_X_analysis/{channel}/{selection}/FitVariables/MET', 'HT' : 'TTbar_plus_X_analysis/{channel}/{selection}/FitVariables/HT', From 4ca68fcb03a0a69759b0cdc55aa587a9b99434d3 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 13:44:43 +0000 Subject: [PATCH 29/90] Scaling will always be > 0 --- dps/analysis/xsection/create_measurement2p0.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index 457c378c..191d08a2 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -161,6 +161,8 @@ def get_sample_info(options, xsec_config, sample): sample_info["scale"] = 1.0 + 1.0*generator_scale elif options['category'] == 'QCD_cross_section-': sample_info["scale"] = 1.0 - 1.0*generator_scale + # scaling will always have some non zero value + if sample_info["scale"] <= 0.0001: sample_info["scale"] = 0.0001 # Weight branches (Shape) weight_branches = [] From 18d13222c8e79fd77135abfdf8cb3d2abcece45f Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 14:24:51 +0000 Subject: [PATCH 30/90] Add MET uncertainty branches --- .../xsection/create_measurement2p0.py | 449 +----------------- 1 file changed, 11 insertions(+), 438 deletions(-) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index 191d08a2..e01fedc9 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -125,8 +125,14 @@ def get_sample_info(options, xsec_config, sample): # create sample info sample_info = {} - # Branch (variable) + # Branch (variable) sample_info["branch"] = options['variable'] + # MET Systematics + # Only Met Variables + if options['variable'] not in xsec_config.variables_no_met: + # Only MET Syst measurement + if options['category'] in xsec_config.met_specific_systematics: + sample_info["branch"] += '_METUncertainties[{index}]'.format(index = str(xsec_config.met_systematics[options['category']])) # Bin Edges if options['ps'] == 'VisiblePS': @@ -211,7 +217,6 @@ def get_sample_info(options, xsec_config, sample): sample_info["weight_branches"] = weight_branches # Input File - # Scale and Mass???? sample_info["input_file"] = get_file(xsec_config, sample, options) if sample != 'data': if options['category'] == 'JES_up': @@ -226,21 +231,11 @@ def get_sample_info(options, xsec_config, sample): # Input Trees # QCD Shape and QCD Control Regions sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) - - -# 'ElectronEnUp', 'ElectronEnDown', 'MuonEnUp', 'MuonEnDown', 'TauEnUp', 'TauEnDown', 'UnclusteredEnUp', 'UnclusteredEnDown', -# if category in config.met_systematics_suffixes and not variable in config.variables_no_met: -# branch = template.split('/')[-1] -# branch += '_METUncertainties[%s]' % config.met_systematics[ -# category] -# if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: -# tree += config.categories_and_prefixes[category] - + return sample_info - @cml.trace() def get_file(config, sample, options): ''' @@ -286,13 +281,14 @@ def write_measurement(options, measurement, norm_method): ''' Write the config ''' - base_path = 'TESTING/config/measurements/{norm_method}/{energy}TeV/{variable}/{phase_space}/' + base_path = 'TESTING/config/measurements/{norm_method}/{energy}TeV/{channel}/{variable}/{phase_space}/' path = base_path + '{category}.json' - pp.pprint(measurement) + # pp.pprint(measurement) path = path.format( norm_method = norm_method, energy = options['com'], + channel = options['channel'], variable = options['variable'], phase_space = options['ps'], category = options['category'], @@ -300,428 +296,5 @@ def write_measurement(options, measurement, norm_method): write_data_to_JSON(measurement, path, indent = True) return -# @cml.trace() -# def create_measurement(com, category, variable, channel, phase_space, norm_method): -# if com == 13: -# # exclude non existing systematics -# if 'VJets' in category and 'scale' in category: -# print('Excluding {0} for now'.format(category)) -# return -# config = XSectionConfig(com) -# met_type = get_met_type(category, config) -# should_not_run_systematic = category in config.met_systematics_suffixes and variable in config.variables_no_met and not 'JES' in category and not 'JER' in category -# if should_not_run_systematic: -# # no MET uncertainty on HT (but JES and JER of course) -# return - -# m = None -# if category == 'central': -# m = Measurement(category) -# else: -# vjet_systematics = [config.vjets_theory_systematic_prefix + -# systematic for systematic in config.generator_systematics] -# if category in config.categories_and_prefixes.keys() or \ -# category in config.met_systematics_suffixes or \ -# category in vjet_systematics: -# m = Systematic(category, -# stype=Systematic.SHAPE, -# affected_samples=config.samples) -# elif category in config.rate_changing_systematics_names: -# m = config.rate_changing_systematics_values[category] - -# elif category == 'QCD_shape': -# m = Systematic(category, -# stype=Systematic.SHAPE, -# affected_samples=['QCD'], -# ) - -# m.setVariable(variable) -# m.setCentreOfMassEnergy(com) -# m.setChannel(channel) -# m.setMETType(met_type) - -# inputs = { -# 'channel': config.analysis_types[channel], -# 'met_type': met_type, -# 'selection': 'Ref selection', -# 'btag': config.translate_options['2m'], # 2 or more -# 'energy': com, -# 'variable': variable, -# 'category': category, -# 'phase_space': phase_space, -# 'norm_method': norm_method, -# 'lepton': channel.title(), -# } -# variable_template = config.variable_path_templates[ -# variable].format(**inputs) - -# template_category = category -# if category == 'QCD_shape' or category in config.rate_changing_systematics_names: -# template_category = 'central' -# if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: -# template_category = 'central' - -# m.addSample( -# 'TTJet', -# False, -# input=create_input( -# config, 'TTJet', variable, template_category, channel, -# variable_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m.addSample( -# 'V+Jets', -# False, -# input=create_input( -# config, 'V+Jets', variable, template_category, channel, -# variable_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m.addSample( -# 'SingleTop', -# False, -# input=create_input( -# config, 'SingleTop', variable, template_category, channel, -# variable_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m.addSample( -# 'QCD', -# False, -# input=create_input( -# config, 'QCD', variable, template_category, channel, -# variable_template, phase_space=phase_space, measurement=m, -# ), -# ) -# variable_template_data = variable_template.replace( -# met_type, config.translate_options['type1']) - -# m.addSample( -# 'data', -# False, -# input=create_input( -# config, 'data', variable, template_category, channel, -# variable_template_data, phase_space=phase_space, measurement=m, -# ), -# ) - -# m_qcd = Measurement(category) -# m_qcd.setVariable(variable) -# m_qcd.setCentreOfMassEnergy(com) - -# qcd_template = get_qcd_template(config, variable, category, channel) - -# # we want "measurement = m" here since all rate systematics should apply -# # to the control regions as well -# m_qcd.addSample( -# 'TTJet', -# False, -# input=create_input( -# config, 'TTJet', variable, template_category, channel, -# qcd_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m_qcd.addSample( -# 'V+Jets', -# False, -# input=create_input( -# config, 'V+Jets', variable, template_category, channel, -# qcd_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m_qcd.addSample( -# 'SingleTop', -# False, -# input=create_input( -# config, 'SingleTop', variable, template_category, channel, -# qcd_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m_qcd.addSample( -# 'QCD', -# False, -# input=create_input( -# config, 'QCD', variable, template_category, channel, -# qcd_template, phase_space=phase_space, measurement=m, -# ), -# ) -# m_qcd.addSample( -# 'data', -# False, -# input=create_input( -# config, 'data', variable, template_category, channel, -# qcd_template, phase_space=phase_space, measurement=m, -# ), -# ) - -# m.addShapeForSample('QCD', m_qcd, False) -# norm_qcd = deepcopy(m_qcd) -# # we want QCD shape and normalisation to be separate -# if category == 'QCD_shape': -# for sample in norm_qcd.samples.keys(): -# tree = norm_qcd.samples[sample]['input'].tree_name -# if channel == 'electron': -# tree = tree.replace(config.electron_control_region_systematic, -# config.electron_control_region) -# else: -# tree = tree.replace(config.muon_control_region_systematic, -# config.muon_control_region) -# norm_qcd.samples[sample]['input'].tree_name = tree -# if 'QCD_cross_section' in category: -# for sample in norm_qcd.samples.keys(): -# tree = norm_qcd.samples[sample]['input'].tree_name -# if channel == 'electron': -# tree = tree.replace(config.electron_control_region, -# config.electron_control_region_systematic) -# else: -# tree = tree.replace(config.muon_control_region, -# config.muon_control_region_systematic) -# norm_qcd.samples[sample]['input'].tree_name = tree - -# m.addNormForSample('QCD', norm_qcd, False) - -# if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: -# v_template_category = category.replace( -# config.vjets_theory_systematic_prefix, '') -# m_vjets = Measurement(category) -# m_vjets.setVariable(variable) -# m_vjets.setCentreOfMassEnergy(com) -# m_vjets.addSample( -# 'V+Jets', -# False, -# input=create_input( -# config, 'V+Jets', variable, v_template_category, -# channel, -# variable_template, -# config.generator_systematic_vjets_templates[ -# v_template_category]), -# phase_space=phase_space, measurement=m, -# ) -# m.addShapeForSample('V+Jets', m_vjets, False) - -# inputs['channel'] = channel -# base_path = 'config/measurements/{norm_method}/{energy}TeV/' -# base_path += '{channel}/{variable}/{phase_space}/' -# if category == 'central': -# path = base_path + '{category}.json' -# m.toJSON(path.format(**inputs)) -# else: -# if m.type == Systematic.SHAPE: -# inputs['type'] = 'shape_systematic' -# else: -# inputs['type'] = 'rate_systematic' -# if category in config.met_systematics_suffixes and category not in ['JES_up', 'JES_down', 'JER_up', 'JER_down']: -# inputs['category'] = met_type -# path = base_path + '{category}_{type}.json' -# m.toJSON(path.format(**inputs)) - - -# @cml.trace() -# def get_met_type(category, config): -# met_type = config.translate_options['type1'] -# if category == 'JES_up': -# met_type += 'JetEnUp' -# elif category == 'JES_down': -# met_type += 'JetEnDown' -# elif category == 'JER_up': -# met_type += 'JetResUp' -# elif category == 'JER_down': -# met_type += 'JetResDown' - -# isJetSystematic = 'JetEn' in category or 'JetRes' in category -# isJetSystematic = isJetSystematic or 'JES' in category -# isJetSystematic = isJetSystematic or 'JER' in category - -# if category in config.met_systematics_suffixes: -# # already done them -# if not isJetSystematic: -# met_type = met_type + category - -# return met_type - - -# @cml.trace() -# def get_file(config, sample, category, channel): -# use_trees = True if config.centre_of_mass_energy == 13 else False -# if channel == 'electron': -# qcd_template = config.electron_QCD_MC_category_templates[category] -# data_template = config.data_file_electron -# qcd_template_tree = config.electron_QCD_MC_category_templates_trees[ -# category] -# data_template_tree = config.data_file_electron_trees -# else: -# qcd_template = config.muon_QCD_MC_category_templates[category] -# data_template = config.data_file_muon -# qcd_template_tree = config.muon_QCD_MC_category_templates_trees[ -# category] -# data_template_tree = config.data_file_muon_trees - -# tree_files = { -# 'TTJet': config.ttbar_category_templates_trees[category], -# 'V+Jets': config.VJets_category_templates_trees[category], -# 'SingleTop': config.SingleTop_category_templates_trees[category], -# 'QCD': qcd_template_tree, -# 'data': data_template_tree -# } -# files = { -# 'TTJet': config.ttbar_category_templates[category], -# 'V+Jets': config.VJets_category_templates[category], -# 'SingleTop': config.SingleTop_category_templates[category], -# 'QCD': qcd_template, -# 'data': data_template, -# } - -# if use_trees: -# return tree_files[sample] -# else: -# return files[sample] - - -# @cml.trace() -# def get_qcd_template(config, variable, category, channel): -# qcd_inputs = { -# 'channel': config.analysis_types[channel], -# 'met_type': config.translate_options['type1'], # always central MET -# 'selection': 'Ref selection', -# 'btag': config.translate_options['2m'], # 2 or more -# 'energy': config.centre_of_mass_energy, -# 'variable': variable, -# 'category': 'central', # always central -# 'lepton': channel.title(), -# } - -# qcd_template = config.variable_path_templates[ -# variable].format(**qcd_inputs) -# if channel == 'electron': -# qcd_template = qcd_template.replace( -# 'Ref selection', config.electron_control_region) -# if category == 'QCD_shape': -# qcd_template = qcd_template.replace( -# config.electron_control_region, -# config.electron_control_region_systematic) -# else: -# qcd_template = qcd_template.replace( -# 'Ref selection', config.muon_control_region) -# if category == 'QCD_shape': -# qcd_template = qcd_template.replace( -# config.muon_control_region, -# config.muon_control_region_systematic) - -# return qcd_template - - -# @cml.trace() -# def create_input(config, sample, variable, category, channel, template, -# input_file=None, phase_space=None, **kwargs): -# tree, branch, hist = None, None, None -# selection = '1' -# if not input_file: -# input_file = get_file(config, sample, category, channel) - -# if config.centre_of_mass_energy == 13: -# branch = template.split('/')[-1] -# tree = template.replace('/' + branch, '') - -# if 'absolute_eta' in branch: -# branch = 'abs(lepton_eta)' - -# if sample != 'data': -# if category in config.met_systematics_suffixes and not variable in config.variables_no_met: -# branch = template.split('/')[-1] -# branch += '_METUncertainties[%s]' % config.met_systematics[ -# category] - -# if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: -# tree += config.categories_and_prefixes[category] - -# if not sample == 'data': -# if 'JES_down' in category: -# input_file = input_file.replace('tree', 'minusJES_tree') -# elif 'JES_up' in category: -# input_file = input_file.replace('tree', 'plusJES_tree') -# elif 'JER_up' in category: -# input_file = input_file.replace('tree', 'plusJER_tree') -# elif 'JER_down' in category: -# input_file = input_file.replace('tree', 'minusJER_tree') - -# selection = '{0} >= 0'.format(branch) -# if variable == 'abs_lepton_eta': -# selection += ' && {0} <= 3'.format(branch) -# else: -# hist = template - -# lumi_scale = config.luminosity_scale -# scale = 1. - -# m = kwargs['measurement'] -# if m.type == Systematic.RATE: -# if 'luminosity' in m.name: -# lumi_scale = lumi_scale * m.scale -# else: -# if sample in m.affected_samples: -# scale = m.scale -# if sample == 'data': # data is not scaled in any way -# lumi_scale = 1. -# scale = 1. - -# edges = variable_binning.reco_bin_edges_full[variable] -# if phase_space == 'VisiblePS': -# edges = variable_binning.reco_bin_edges_vis[variable] - -# weight_branches = [] -# if sample == 'data': -# weight_branches.append('1') -# else: -# weight_branches.append('EventWeight') - -# if 'PileUp' not in category: -# weight_branches.append('PUWeight') -# elif category == 'PileUp_up': -# weight_branches.append('PUWeight_up') -# elif category == 'PileUp_down': -# weight_branches.append('PUWeight_down') -# else: -# weight_branches.append('1') - -# if category == 'BJet_down': -# weight_branches.append('BJetDownWeight') -# elif category == 'BJet_up': -# weight_branches.append('BJetUpWeight') -# elif category == 'LightJet_down': -# weight_branches.append('LightJetDownWeight') -# elif category == 'LightJet_up': -# weight_branches.append('LightJetUpWeight') -# else: -# weight_branches.append('BJetWeight') - - # if not 'QCD' in tree: - # if channel == 'muon': - # if category == 'Muon_down': - # weight_branches.append('MuonDown') - # elif category == 'Muon_up': - # weight_branches.append('MuonUp') - # else: - # weight_branches.append('MuonEfficiencyCorrection') - # elif channel == 'electron': - # if category == 'Electron_down': - # weight_branches.append('ElectronDown') - # elif category == 'Electron_up': - # weight_branches.append('ElectronUp') - # else: - # weight_branches.append('ElectronEfficiencyCorrection') - -# i = Input( -# input_file=input_file, -# hist=hist, -# tree=tree, -# branch=branch, -# selection=selection, -# bin_edges=edges, -# lumi_scale=lumi_scale, -# scale=scale, -# weight_branches=weight_branches, -# ) -# return i - if __name__ == '__main__': main() From daab8ea0ff9f3ce56ca41b64cd53fb40e35fdee0 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 28 Nov 2016 14:40:15 +0000 Subject: [PATCH 31/90] Normalisatoin script now reads all configs --- .../xsection/01_get_ttjet_normalisation2.py | 47 ++++++++++++------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index f4e27fd2..babdae26 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -91,26 +91,41 @@ def main(): results = {} # construct categories from files: - input_template = 'TESTING/' + input_template = 'TESTING/config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' # Create measuremewnt_filepath measurement_filepath = input_template - # Loop over channels - measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') - print measurement_files - - for f in sorted(measurement_files): - print('Processing file ' + f) - # Read in Measurement JSON - config = read_data_from_JSON(f) - # print config - # Create Measurement Class using JSON - measurement = Measurement(config) - # measurement.qcd_from_data() - measurement.calculate_normalisation() - measurement.save() - break + if args.visiblePS: + ps = 'VisiblePS' + else: + ps = 'FullPS' + + for ch in ['electron', 'muon']: + for var in measurement_config.variables: + + # Create measurement_filepath + measurement_filepath = input_template.format( + com = args.CoM, + ch = ch, + var = var, + ps = ps, + ) + print measurement_filepath + + # Loop over channels + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + + for f in sorted(measurement_files): + print('Processing file ' + f) + # Read in Measurement JSON + config = read_data_from_JSON(f) + # print config + # Create Measurement Class using JSON + measurement = Measurement(config) + measurement.calculate_normalisation() + measurement.save() + return def parse_arguments(): parser = ArgumentParser(__doc__) From 06972d9db25f48727dd2146cd8d1fb3babd6b43e Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 29 Nov 2016 09:14:48 +0000 Subject: [PATCH 32/90] Add in running over single var. Now saves wrt to PS too --- dps/analysis/xsection/01_get_ttjet_normalisation2.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index babdae26..95ecced2 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -103,6 +103,7 @@ def main(): for ch in ['electron', 'muon']: for var in measurement_config.variables: + if args.variable not in var: continue # Create measurement_filepath measurement_filepath = input_template.format( @@ -111,8 +112,7 @@ def main(): var = var, ps = ps, ) - print measurement_filepath - + # Loop over channels measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') @@ -124,7 +124,8 @@ def main(): # Create Measurement Class using JSON measurement = Measurement(config) measurement.calculate_normalisation() - measurement.save() + measurement.save(ps) + # break return def parse_arguments(): From 0bf716d5d2c7369fabbe59e00d61ed53a9c85afa Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 29 Nov 2016 09:15:34 +0000 Subject: [PATCH 33/90] JER and JES samples can now access their trees --- .../xsection/create_measurement2p0.py | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index e01fedc9..a7452f7d 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -87,6 +87,7 @@ def main(): ) +@cml.trace() def create_measurement(options, norm_method): ''' Create the config file @@ -107,6 +108,7 @@ def create_measurement(options, norm_method): return measurement +@cml.trace() def get_samples(options, xsec_config): ''' Return the dictionary of all sample information @@ -118,6 +120,8 @@ def get_samples(options, xsec_config): return samples + +@cml.trace() def get_sample_info(options, xsec_config, sample): ''' Generate each measurements information @@ -216,26 +220,31 @@ def get_sample_info(options, xsec_config, sample): weight_branches.append('ElectronEfficiencyCorrection') sample_info["weight_branches"] = weight_branches - # Input File + # Input File and Tree + # QCD Contorol Regions (Shape) JES and JER sample_info["input_file"] = get_file(xsec_config, sample, options) + sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) if sample != 'data': if options['category'] == 'JES_up': sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJES_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESUp') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESUp') elif options['category'] == 'JES_down': sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJES_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESDown') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESDown') elif options['category'] == 'JER_up': sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJER_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERUp') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERUp') elif options['category'] == 'JER_down': sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJER_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERDown') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERDown') - # Input Trees - # QCD Shape and QCD Control Regions - sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) - return sample_info - @cml.trace() def get_file(config, sample, options): ''' @@ -257,6 +266,7 @@ def get_file(config, sample, options): } return files[sample] + @cml.trace() def get_tree(config, options): ''' @@ -276,7 +286,7 @@ def get_tree(config, options): return tree, qcd_tree - +@cml.trace() def write_measurement(options, measurement, norm_method): ''' Write the config From 3fdb1d830949b5113688bed89ebc54c3c71d2c86 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 29 Nov 2016 09:16:36 +0000 Subject: [PATCH 34/90] Outputs to correct Testing folder structure --- dps/utils/measurement2.py | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 549adba9..90cfa2ce 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -108,10 +108,11 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa if useQCDControl: tree = qcd_tree # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) - weights = [x if not 'Electron' in x and not 'Muon' in x for x in weights]: + for weight in weights: + if 'Electron' in weight: weights.remove(weight) + elif 'Muon' in weight: weights.remove(weight) weights = "*".join(weights) - print weights scale *= lumi_scale root_file = File( f ) @@ -134,12 +135,18 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa def __background_subtraction(self, histograms): from dps.utils.hist_utilities import clean_control_region + print histograms ttjet_hist = clean_control_region( histograms, subtract=['QCD', 'V+Jets', 'SingleTop'] ) self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + self.normalisation['data'] = hist_to_value_error_tuplelist(histograms['data']) + # self.normalisation['TTBar'] = hist_to_value_error_tuplelist(histograms['TTBar']) + self.normalisation['SingleTop'] = hist_to_value_error_tuplelist(histograms['SingleTop']) + self.normalisation['V+Jets'] = hist_to_value_error_tuplelist(histograms['V+Jets']) + self.normalisation['QCD'] = hist_to_value_error_tuplelist(histograms['QCD']) return def calculate_normalisation(self): @@ -155,21 +162,31 @@ def calculate_normalisation(self): for sample, values in self.normalisation.items(): new_values = [(round(v, 0), round(e, 0)) for v, e in values] self.normalisation[sample] = new_values - print self.normalisation self.is_normalised = True return - def save(self): + def save(self, phase_space): from dps.utils.file_utilities import write_data_to_JSON # If normalisation hasnt been calculated - then go calculate it! if not self.is_normalised: self.calculate_normalisation() + output_folder = 'TESTING/data/normalisation/background_subtraction/{com}TeV/{var}/{ps}/{cat}/' + output_folder = output_folder.format( + com = self.com, + var = self.variable, + ps = phase_space, + cat = self.name, + ) + file_template = '{type}_{channel}.txt' - output_folder = '' + f = file_template.format( + type='normalisation', + channel=self.channel + ) write_data_to_JSON( self.normalisation, - output_folder + file_template.format(type='normalisation', channel=self.channel) + output_folder + f ) return From 18cab24c272d5be32ac1e04606d3779bbab8ed41 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 30 Nov 2016 12:35:02 +0000 Subject: [PATCH 35/90] Remove Normalisation class. --- .../xsection/01_get_ttjet_normalisation2.py | 78 +------------------ 1 file changed, 3 insertions(+), 75 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index 95ecced2..36b31799 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -11,75 +11,6 @@ # define logger for this module mylog = log["01b_get_ttjet_normalisation"] -class TTJetNormalisation(object): - ''' - Determines the normalisation for top quark pair production. - Unless stated otherwise all templates and (initial) normalisations - are taken from simulation, except for QCD where the template is - extracted from data. - - Subtracts the known backgrounds from data to obtain TTJet template - and normalisation - ''' - @mylog.trace() - def __init__(self, measurement_config): - self.config = measurement_config - # self.variable = measurement.variable - # self.category = measurement.name - # self.channel = measurement.channel - # self.phase_space = phase_space - - self.have_normalisation = False - # normalisation for current config - self.normalisation = {} - - # @mylog.trace() - # def calculate_normalisation(self): - # ''' - # ''' - # # normalisation already calculated - # if self.have_normalisation: return - - - # histograms = self.measurement.histograms - - # for sample, hist in histograms.items(): - # hist = fix_overflow(hist) - # histograms[sample] = hist - # self.normalisation[sample] = self.initial_normalisation[sample] - - # self.background_subtraction(histograms) - - # # next, let's round all numbers (they are event numbers after all) - # for sample, values in self.normalisation.items(): - # new_values = [(round(v, 1), round(e, 1)) for v, e in values] - # self.normalisation[sample] = new_values - - # self.have_normalisation = True - - # @mylog.trace() - # def background_subtraction(self, histograms): - # ttjet_hist = clean_control_region( - # histograms, - # subtract=['QCD', 'V+Jets', 'SingleTop'] - # ) - # self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - - # @mylog.trace() - # def save(self): - # # If normalisation hasnt been calculated - then go calculate it! - # if not self.have_normalisation: - # self.calculate_normalisation() - - # file_template = '{type}_{channel}.txt' - # output_folder = '' - - # write_data_to_JSON( - # self.normalisation, - # output_folder + file_template.format(type='normalisation', channel=self.channel) - # ) - # return - def main(): ''' 1 - Create config file reading in templates @@ -90,12 +21,9 @@ def main(): ''' results = {} - # construct categories from files: + # config file template input_template = 'TESTING/config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' - # Create measuremewnt_filepath - measurement_filepath = input_template - if args.visiblePS: ps = 'VisiblePS' else: @@ -113,14 +41,14 @@ def main(): ps = ps, ) - # Loop over channels + # Get all config files in measurement_filepath measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') for f in sorted(measurement_files): print('Processing file ' + f) # Read in Measurement JSON config = read_data_from_JSON(f) - # print config + # Create Measurement Class using JSON measurement = Measurement(config) measurement.calculate_normalisation() From 4b8907a85f332e43729ebdf7b513abd5d6c439c0 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 30 Nov 2016 12:36:04 +0000 Subject: [PATCH 36/90] Add selection to config such that abs(lepton_eta) histogram can be drawn --- .../xsection/create_measurement2p0.py | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index a7452f7d..766a9c06 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -13,10 +13,7 @@ from dps.config.xsection import XSectionConfig from dps.config import variable_binning from dps.utils.logger import log -from copy import deepcopy from dps.utils.file_utilities import write_data_to_JSON -import pprint -pp = pprint.PrettyPrinter(indent=4) # define logger for this module create_measurement_log = log["01b_get_ttjet_normalisation"] @@ -131,6 +128,12 @@ def get_sample_info(options, xsec_config, sample): # Branch (variable) sample_info["branch"] = options['variable'] + if 'abs_lepton_eta' in options['variable']: + sample_info["branch"] = 'abs(lepton_eta)' + + # Selections + sample_info["selection"] = get_selection(options['variable']) + # MET Systematics # Only Met Variables if options['variable'] not in xsec_config.variables_no_met: @@ -244,6 +247,16 @@ def get_sample_info(options, xsec_config, sample): return sample_info +@cml.trace() +def get_selection(var): + ''' + Return a selection for the branch used by ROOT.Tree.Draw() + ''' + sel = str(var)+" >= 0" + if 'abs_lepton_eta' in var: + sel = "abs(lepton_eta) >= 0 && abs(lepton_eta) <= 3" + return sel + @cml.trace() def get_file(config, sample, options): @@ -293,7 +306,6 @@ def write_measurement(options, measurement, norm_method): ''' base_path = 'TESTING/config/measurements/{norm_method}/{energy}TeV/{channel}/{variable}/{phase_space}/' path = base_path + '{category}.json' - # pp.pprint(measurement) path = path.format( norm_method = norm_method, From 1d19b787b74ec6cde7c1aaa83d000a3c718b5202 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 30 Nov 2016 12:37:16 +0000 Subject: [PATCH 37/90] Add selection to tree.Draw(), add some comments --- dps/utils/measurement2.py | 50 ++++++++++++++++++++++++++------------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 90cfa2ce..b39dae81 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -3,10 +3,8 @@ ''' from __future__ import division from . import log -import copy -from dps.utils.file_utilities import make_folder_if_not_exists, read_data_from_JSON -from dps.utils.input import Input -from dps.utils.hist_utilities import hist_to_value_error_tuplelist +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, clean_control_region +from dps.utils.file_utilities import write_data_to_JSON # define logger for this module meas_log = log["dps.utils.measurement"] @@ -38,20 +36,23 @@ def __setFromConfig(self): self.samples = self.measurement["samples"] self.name = self.measurement["name"] data_driven_qcd = self.measurement["data_driven_qcd"] + # Is this central or a systematic? if "central" in self.name: self.central = True + # Retrieve histograms from files for SR and CR for sample, histogram_info in self.samples.iteritems(): self.histograms[sample] = self.__return_histogram(histogram_info) if data_driven_qcd: self.cr_histograms[sample] = self.__return_histogram(histogram_info, useQCDControl=True) - # print hist_to_value_error_tuplelist(self.histograms[sample]) - # print hist_to_value_error_tuplelist(self.cr_histograms[sample]) + # print(hist_to_value_error_tuplelist(self.histograms[sample])) + # print(hist_to_value_error_tuplelist(self.cr_histograms[sample])) + + # Replace QCD MC with data-driven MC if data_driven_qcd: self.__qcd_from_data() - return def __qcd_from_data(self): @@ -65,32 +66,34 @@ def __qcd_from_data(self): Shape normalise to scale from SR mc qcd mc qcd to dd qcd ''' - from dps.utils.hist_utilities import clean_control_region - # Get the shape of the data driven qcd in the control region qcd_shape = clean_control_region( self.cr_histograms, subtract=['TTBar', 'V+Jets', 'SingleTop'] ) + # print(hist_to_value_error_tuplelist(qcd_shape)) # Now to normalise the qcd shape to the MC in the Signal Region # n_dd_cr= Number of datadriven qcd from Control Region n_mc_sr = self.histograms['QCD'].Integral() n_dd_cr = qcd_shape.Integral() qcd_shape.Scale( n_mc_sr/n_dd_cr ) + # print "scaling to normalisation in SR MC : ", n_mc_sr/n_dd_cr # Now to scale from mc qcd to datadriven qcd n_mc_cr = self.cr_histograms['QCD'].Integral() qcd_shape.Scale( n_dd_cr/n_mc_cr ) + # print "scaling from MC to datadriven : ", n_dd_cr/n_mc_cr + # print "Total scaling : ", n_mc_sr/n_mc_cr # Replace QCD histogram with datadriven one self.histograms['QCD'] = qcd_shape return - def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False): ''' Takes basic histogram info and returns histo. + Maybe this can move to ROOT_utilities? ''' from rootpy.io.file import File from rootpy.plotting import Hist @@ -104,8 +107,10 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa lumi_scale = d_hist_info['lumi_scale'] scale = d_hist_info['scale'] weights = d_hist_info['weight_branches'] + selection = d_hist_info['selection'] if useQCDControl: + # replace SR tree with CR tree tree = qcd_tree # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) for weight in weights: @@ -113,13 +118,17 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa elif 'Muon' in weight: weights.remove(weight) weights = "*".join(weights) + # Selection will return a weight 0 or 1 depending on whether event passes selection + weights_and_selection = '( {0} ) * ( {1} )'.format(weights, selection) + scale *= lumi_scale root_file = File( f ) root_tree = root_file.Get( tree ) root_histogram = Hist( bins ) - root_tree.Draw(var, weights, hist = root_histogram) + # Draw histogram of var for selection into root_histogram + root_tree.Draw(var, selection = weights_and_selection, hist = root_histogram) root_histogram.Scale(scale) # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it @@ -127,16 +136,18 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa root_histogram.SetBinContent(0, 0) root_histogram.SetBinError(0,0) - # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not | | but |--> ) + # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not |..| but |--> ) root_histogram = fix_overflow(root_histogram) root_file.Close() return root_histogram - def __background_subtraction(self, histograms): - from dps.utils.hist_utilities import clean_control_region - print histograms + def __background_subtraction(self, histograms): + ''' + Subtracts the backgrounds from data to give amount of ttbar in data. + Also adds all backgrounds to normalisation output + ''' ttjet_hist = clean_control_region( histograms, subtract=['QCD', 'V+Jets', 'SingleTop'] @@ -151,6 +162,7 @@ def __background_subtraction(self, histograms): def calculate_normalisation(self): ''' + Calls the normalisation of the ttbar samples ''' # normalisation already calculated if self.is_normalised: return @@ -160,14 +172,18 @@ def calculate_normalisation(self): # next, let's round all numbers (they are event numbers after all) for sample, values in self.normalisation.items(): - new_values = [(round(v, 0), round(e, 0)) for v, e in values] + new_values = [(round(v, 1), round(e, 1)) for v, e in values] self.normalisation[sample] = new_values self.is_normalised = True return def save(self, phase_space): - from dps.utils.file_utilities import write_data_to_JSON + ''' + Saves the normalisation output into a JSON. + I would like to change this to a pandas Dataframe at somepoint after + a few issues have been worked out + ''' # If normalisation hasnt been calculated - then go calculate it! if not self.is_normalised: self.calculate_normalisation() From 058531f8301cb762176b0184b11a2bccc78ccb15 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 30 Nov 2016 12:40:07 +0000 Subject: [PATCH 38/90] WIP what is actually needed in the xsection config? --- dps/config/xsection.py | 133 ++++++++++++++++++----------------------- 1 file changed, 57 insertions(+), 76 deletions(-) diff --git a/dps/config/xsection.py b/dps/config/xsection.py index c2cbc21c..96cb6bc4 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -49,6 +49,7 @@ class XSectionConfig(): 'vjets_theory_systematic_prefix' ] + # Used in 01 samples = [ 'data', 'TTBar', @@ -57,6 +58,7 @@ class XSectionConfig(): 'QCD' ] + # Used in 01 variables = [ 'HT', 'MET', @@ -67,6 +69,7 @@ class XSectionConfig(): 'abs_lepton_eta' ] + # Used in 01 variables_no_met = [ 'HT', 'NJets', @@ -87,18 +90,16 @@ def __init__( self, centre_of_mass_energy ): def __fill_defaults__( self ): self.met_type = 'patType1CorrectedPFMet' - # self.path_to_files = self.current_analysis_path + str( self.centre_of_mass_energy ) + 'TeV/2016/' self.path_to_files = self.current_analysis_path + path_to_files = self.path_to_files # self.path_to_unfolding_histograms = '/hdfs/TopQuarkGroup/run2/unfolding/13TeV/2016/' self.path_to_unfolding_histograms = 'unfolding/13TeV/' - - path_to_files = self.path_to_files path_to_unfolding_histograms = self.path_to_unfolding_histograms self.luminosity = self.luminosities[self.centre_of_mass_energy] - # general + # Used in 01 self.met_systematics = { 'JER_up' : 0, 'JER_down' : 1, @@ -112,22 +113,31 @@ def __fill_defaults__( self ): 'TauEnDown' : 9, 'UnclusteredEnUp' : 10, 'UnclusteredEnDown' : 11, - # 'ElectronEn_up' : 6, - # 'ElectronEn_down' : 7, - # 'MuonEn_up' : 4, - # 'MuonEn_down' : 5, - # 'TauEn_up' : 8, - # 'TauEn_down' : 9, - # 'UnclusteredEn_up' : 10, - # 'UnclusteredEn_down' : 11, } + # Remove? + self.met_systematics_suffixes = self.met_systematics.keys() + + # Used in 01 - combine with self.met_systematics? + self.met_specific_systematics = [ + 'ElectronEnUp', + 'ElectronEnDown', + 'MuonEnUp', + 'MuonEnDown', + 'TauEnUp', + 'TauEnDown', + 'UnclusteredEnUp', + 'UnclusteredEnDown', + ] + + self.analysis_types = { - 'electron':'EPlusJets', - 'muon':'MuPlusJets', - 'combined':'combined' + 'electron' :'EPlusJets', + 'muon' :'MuPlusJets', + 'combined' : 'combined' } + # Needed? Where? # measurement script options self.translate_options = { 'all':'', @@ -145,6 +155,7 @@ def __fill_defaults__( self ): 'type1':'patType1CorrectedPFMet', } + # Needed? self.ttbar_theory_systematic_prefix = 'TTJets_' self.vjets_theory_systematic_prefix = 'VJets_' # files @@ -156,24 +167,6 @@ def __fill_defaults__( self ): self.data_file_muon = '/hdfs/TopQuarkGroup/db0268/data_muon_tree.root' self.data_file_electron = '/hdfs/TopQuarkGroup/db0268/data_electron_tree.root' - - - -# -# -# - self.muon_QCD_file = path_to_files + 'QCD_data_mu.root' - self.SingleTop_file = path_to_files + 'SingleTop.root' - self.electron_QCD_MC_file = path_to_files + 'QCD_Electron.root' - self.muon_QCD_MC_file = path_to_files + 'QCD_data_mu.root' - - self.SingleTop_tree_file = path_to_files + 'SingleTop_tree.root' - self.muon_QCD_tree_file = path_to_files + 'QCD_Muon_tree.root' - self.electron_QCD_MC_tree_file = path_to_files + 'QCD_Electron_tree.root' - self.muon_QCD_MC_tree_file = path_to_files + 'QCD_Muon_tree.root' -# -# -# self.higgs_file = path_to_files + 'central/TTH_Inclusive_M-125' + middle + '.root' self.categories_and_prefixes = { @@ -256,7 +249,7 @@ def __fill_defaults__( self ): 'TTJets_alphaSdown' : '', } - + # Used in 01 self.normalisation_systematics = [ 'central', @@ -343,24 +336,10 @@ def __fill_defaults__( self ): # 'Top_eta_reweight' : ['Top_eta_reweight_up', 'Top_eta_reweight_down'], } - self.met_specific_systematics = [ - 'ElectronEnUp', - 'ElectronEnDown', - 'MuonEnUp', - 'MuonEnDown', - 'TauEnUp', - 'TauEnDown', - 'UnclusteredEnUp', - 'UnclusteredEnDown', - ] - - - - self.met_systematics_suffixes = self.met_systematics.keys() - # now fill in the centre of mass dependent values self.__fill_defaults_13TeV__() + # Needed? self.generator_systematics = [ 'scaleup', 'scaledown', 'massup', 'massdown', @@ -437,22 +416,23 @@ def __fill_defaults__( self ): categories_and_prefixes = self.categories_and_prefixes generator_mcsamples = self.generator_mcsamples + # Used in 01 self.general_trees = {category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} - self.ttbar_trees = {category: path_to_files + '/TTJets_PowhegPythia8_tree.root' + self.ttbar_trees = {category: path_to_files + 'TTJets_PowhegPythia8_tree.root' for category in self.normalisation_systematics} - self.SingleTop_trees = {category: path_to_files + '/SingleTop_tree.root' + self.SingleTop_trees = {category: path_to_files + 'SingleTop_tree.root' for category in self.normalisation_systematics} - self.VJets_trees = {category: path_to_files + '/VJets_tree.root' + self.VJets_trees = {category: path_to_files + 'VJets_tree.root' for category in self.normalisation_systematics} - self.electron_QCD_MC_trees = {category: path_to_files + '/QCD_Electron_tree.root' + self.electron_QCD_MC_trees = {category: path_to_files + 'QCD_Electron_tree.root' for category in self.normalisation_systematics} - self.muon_QCD_MC_trees = {category: path_to_files + '/QCD_Muon_tree.root' + self.muon_QCD_MC_trees = {category: path_to_files + 'QCD_Muon_tree.root' for category in self.normalisation_systematics} - self.ttbar_generator_trees = {category: path_to_files + '/TTJets_' + category + '_tree.root' + self.ttbar_generator_trees = {category: path_to_files + 'TTJets_' + category + '_tree.root' for category in generator_mcsamples} - + # Need with generator_mcsamples???? self.ttbar_amc_trees = path_to_files + '/TTJets_amc_tree.root' self.ttbar_madgraph_trees = path_to_files + '/TTJets_madgraph_tree.root' self.ttbar_powhegpythia8_trees = path_to_files + '/TTJets_powhegPythia8_tree.root' @@ -467,20 +447,21 @@ def __fill_defaults__( self ): self.ttbar_jerup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJER_tree.root' self.ttbar_jerdown_trees = path_to_files + '/TTJets_PowhegPythia8_minusJER_tree.root' + # Needed? + self.data_muon_category_templates = { + 'central': self.data_file_muon, + 'JES_up': self.data_file_muon, + 'JES_down': self.data_file_muon + } + self.data_muon_category_templates_trees = self.data_file_muon - # self.data_muon_category_templates = { - # 'central': self.data_file_muon, - # 'JES_up': self.data_file_muon, - # 'JES_down': self.data_file_muon - # } - # self.data_muon_category_templates_trees = self.data_file_muon + self.data_electron_category_templates = { + 'central': self.data_file_electron, + 'JES_up': self.data_file_electron, + 'JES_down': self.data_file_electron, + } + self.data_electron_category_templates_trees = self.data_file_electron - # self.data_electron_category_templates = { - # 'central': self.data_file_electron, - # 'JES_up': self.data_file_electron, - # 'JES_down': self.data_file_electron, - # } - # self.data_electron_category_templates_trees = self.data_file_electron # Unfolding MC Different Generator Samples self.unfolding_powheg_pythia8_raw = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV.root' % self.centre_of_mass_energy @@ -548,22 +529,21 @@ def __fill_defaults__( self ): self.pdfWeightMax = 100 self.unfolding_pdfweights = {index : path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_pdfWeight_%d.root' % (self.centre_of_mass_energy, index) for index in range( self.pdfWeightMin, self.pdfWeightMax )} + # Used in 01 self.tree_path = { 'electron' : 'TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables', 'muon' : 'TTbar_plus_X_analysis/MuPlusJets/Ref selection/FitVariables', } - self.qcd_control_region = { - 'electron' : 'QCDConversions', - 'muon' : 'QCD non iso mu+jets 1p5to3', + 'electron' : 'QCDConversions', + 'muon' : 'QCD non iso mu+jets 1p5to3', } - self.qcd_shape_syst_region = { - 'electron' : 'QCD non iso e+jets', - 'muon' : 'QCD non iso mu+jets 3toInf', + 'electron' : 'QCD non iso e+jets', + 'muon' : 'QCD non iso mu+jets 3toInf', } - + # Needed? self.variable_path_templates = { 'MET' : 'TTbar_plus_X_analysis/{channel}/{selection}/FitVariables/MET', 'HT' : 'TTbar_plus_X_analysis/{channel}/{selection}/FitVariables/HT', @@ -579,9 +559,9 @@ def __fill_defaults__( self ): 'abs_bjets_eta': 'TTbar_plus_X_analysis/{channel}/{selection}/Jets/abs(bjet_eta)', } + # Needed? self.electron_control_region = 'QCDConversions' self.electron_control_region_systematic = 'QCD non iso e+jets' - self.muon_control_region = 'QCD non iso mu+jets 1p5to3' self.muon_control_region_systematic = 'QCD non iso mu+jets 3toInf' @@ -589,6 +569,7 @@ def __fill_defaults__( self ): self.luminosity_scale = self.new_luminosity / self.luminosity + # Needed? # structure # { summary_name : [(Electron_down, Electron_up)), (TTJets_hadronisation, TTJets_hadronisation) self.typical_systematics_summary = { From 99f73bc06349307940cdbc5f66680899a00a797e Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 5 Dec 2016 14:12:51 +0000 Subject: [PATCH 39/90] Ouput to pandas --- .../xsection/01_get_ttjet_normalisation2.py | 35 +++++--- dps/utils/measurement2.py | 85 +++++++++++-------- dps/utils/pandas_utilities.py | 26 +++++- 3 files changed, 97 insertions(+), 49 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index 36b31799..c98970bf 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -2,9 +2,7 @@ from argparse import ArgumentParser from dps.utils.logger import log from dps.config.xsection import XSectionConfig -from dps.utils.file_utilities import write_data_to_JSON, get_files_in_path, make_folder_if_not_exists, read_data_from_JSON -from dps.utils.hist_utilities import clean_control_region, hist_to_value_error_tuplelist, fix_overflow -from dps.utils.Calculation import combine_complex_results +from dps.utils.file_utilities import get_files_in_path, read_data_from_JSON from dps.utils.measurement2 import Measurement from dps.utils.ROOT_utils import set_root_defaults @@ -13,11 +11,9 @@ def main(): ''' - 1 - Create config file reading in templates - 2 - Create 'jobs' for each config - 3 - Read in config - 4 - Differentiate between Syst and Central - 5 - Work in QCD from data + 1 - Read Config file for normalisation measurement + 2 - Run measurement + 3 - Combine measurement before unfolding ''' results = {} @@ -45,15 +41,28 @@ def main(): measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') for f in sorted(measurement_files): + if args.test: + if 'central' not in f: continue print('Processing file ' + f) # Read in Measurement JSON config = read_data_from_JSON(f) - # Create Measurement Class using JSON - measurement = Measurement(config) - measurement.calculate_normalisation() - measurement.save(ps) + if 'electron' in ch: + # Create Measurement Class using JSON + electron_measurement = Measurement(config) + electron_measurement.calculate_normalisation() + electron_measurement.save(ps) + elif 'muon' in ch: + # Create Measurement Class using JSON + muon_measurement = Measurement(config) + muon_measurement.calculate_normalisation() + muon_measurement.save(ps) # break + + # Combining the channels before unfolding + combined_measurement = electron_measurement + combined_measurement.combine(muon_measurement) + combined_measurement.save(ps) return def parse_arguments(): @@ -64,6 +73,8 @@ def parse_arguments(): help="set the centre of mass energy for analysis. Default = 13 [TeV]") parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", help="Unfold to visible phase space") + parser.add_argument('--test', dest="test", action="store_true", + help="Unfold to visible phase space") args = parser.parse_args() return args diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index b39dae81..5d9be9bd 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -4,7 +4,7 @@ from __future__ import division from . import log from dps.utils.hist_utilities import hist_to_value_error_tuplelist, clean_control_region -from dps.utils.file_utilities import write_data_to_JSON + # define logger for this module meas_log = log["dps.utils.measurement"] @@ -16,25 +16,25 @@ class Measurement(): ''' @meas_log.trace() def __init__(self, measurement): - self.measurement = measurement - self.histograms = {} - self.cr_histograms = {} - self.normalisation = {} - self.variable = None - self.com = None - self.channel = None - self.name = None - self.is_normalised = False - self.central = False - self.samples = {} + self.measurement = measurement + self.histograms = {} + self.cr_histograms = {} + self.normalisation = {} + self.variable = None + self.com = None + self.channel = None + self.name = None + self.is_normalised = False + self.central = False + self.samples = {} self.__setFromConfig() def __setFromConfig(self): - self.variable = self.measurement["variable"] - self.com = self.measurement["com"] - self.channel = self.measurement["channel"] - self.samples = self.measurement["samples"] - self.name = self.measurement["name"] + self.variable = self.measurement["variable"] + self.com = self.measurement["com"] + self.channel = self.measurement["channel"] + self.samples = self.measurement["samples"] + self.name = self.measurement["name"] data_driven_qcd = self.measurement["data_driven_qcd"] # Is this central or a systematic? @@ -99,15 +99,15 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa from rootpy.plotting import Hist from dps.utils.hist_utilities import fix_overflow - f = d_hist_info['input_file'] - tree = d_hist_info['tree'] - qcd_tree = d_hist_info["qcd_control_region"] - var = d_hist_info['branch'] - bins = d_hist_info['bin_edges'] - lumi_scale = d_hist_info['lumi_scale'] - scale = d_hist_info['scale'] - weights = d_hist_info['weight_branches'] - selection = d_hist_info['selection'] + f = d_hist_info['input_file'] + tree = d_hist_info['tree'] + qcd_tree = d_hist_info["qcd_control_region"] + var = d_hist_info['branch'] + bins = d_hist_info['bin_edges'] + lumi_scale = d_hist_info['lumi_scale'] + scale = d_hist_info['scale'] + weights = d_hist_info['weight_branches'] + selection = d_hist_info['selection'] if useQCDControl: # replace SR tree with CR tree @@ -115,7 +115,7 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) for weight in weights: if 'Electron' in weight: weights.remove(weight) - elif 'Muon' in weight: weights.remove(weight) + elif 'Muon' in weight: weights.remove(weight) weights = "*".join(weights) # Selection will return a weight 0 or 1 depending on whether event passes selection @@ -152,12 +152,12 @@ def __background_subtraction(self, histograms): histograms, subtract=['QCD', 'V+Jets', 'SingleTop'] ) - self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - self.normalisation['data'] = hist_to_value_error_tuplelist(histograms['data']) - # self.normalisation['TTBar'] = hist_to_value_error_tuplelist(histograms['TTBar']) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + self.normalisation['data'] = hist_to_value_error_tuplelist(histograms['data']) + # self.normalisation['TTBar'] = hist_to_value_error_tuplelist(histograms['TTBar']) self.normalisation['SingleTop'] = hist_to_value_error_tuplelist(histograms['SingleTop']) - self.normalisation['V+Jets'] = hist_to_value_error_tuplelist(histograms['V+Jets']) - self.normalisation['QCD'] = hist_to_value_error_tuplelist(histograms['QCD']) + self.normalisation['V+Jets'] = hist_to_value_error_tuplelist(histograms['V+Jets']) + self.normalisation['QCD'] = hist_to_value_error_tuplelist(histograms['QCD']) return def calculate_normalisation(self): @@ -174,7 +174,6 @@ def calculate_normalisation(self): for sample, values in self.normalisation.items(): new_values = [(round(v, 1), round(e, 1)) for v, e in values] self.normalisation[sample] = new_values - self.is_normalised = True return @@ -184,6 +183,8 @@ def save(self, phase_space): I would like to change this to a pandas Dataframe at somepoint after a few issues have been worked out ''' + from dps.utils.pandas_utilities import write_normalisation_to_df + from dps.utils.file_utilities import make_folder_if_not_exists # If normalisation hasnt been calculated - then go calculate it! if not self.is_normalised: self.calculate_normalisation() @@ -194,6 +195,7 @@ def save(self, phase_space): ps = phase_space, cat = self.name, ) + make_folder_if_not_exists(output_folder) file_template = '{type}_{channel}.txt' f = file_template.format( @@ -201,8 +203,23 @@ def save(self, phase_space): channel=self.channel ) - write_data_to_JSON( + write_normalisation_to_df( self.normalisation, output_folder + f ) return + + def combine(self, other): + ''' + Combines the electron and muon measurements + ''' + from dps.utils.Calculation import combine_complex_results + if not self.is_normalised or not other.is_normalised: + mylog.warn( + 'One of the TTJetNormalisations does not have a normalisation, aborting.') + return + + self.normalisation = combine_complex_results( + self.normalisation, other.normalisation) + self.channel = 'combined' + return diff --git a/dps/utils/pandas_utilities.py b/dps/utils/pandas_utilities.py index 291ab140..f196f850 100644 --- a/dps/utils/pandas_utilities.py +++ b/dps/utils/pandas_utilities.py @@ -20,13 +20,13 @@ def list_to_series(l): s = pd.Series( l ) return s -def df_to_file(filename, df): +def df_to_file(filename, df, index=True): ''' Save a dataframe to an output text file Nicely human readable ''' with open(filename,'w') as f: - df.to_string(f, index=True) + df.to_string(f, index=index) f.write('\n') print('DataFrame written to {}'.format(f)) f.close() @@ -64,4 +64,24 @@ def divide_by_series(s1, s2): Divide one series by another ''' s = s1.div(s2) - return s \ No newline at end of file + return s + +def write_normalisation_to_df( d_norm, filename ): + ''' + Writing the output of 01 to dataframe + ''' + # First create the dataframe + df = dict_to_df(d_norm) + + # pandas really cant handle reading in tuples. Have to split here + for col in df.columns: + df[[col, col+'_Unc']] = df[col].apply(pd.Series) + # Make columns alphabetical for easy reading + l=df.columns.tolist() + l.sort() + print l + df = df[l] + + # Write dataframe + df_to_file(filename, df, index=False) + return \ No newline at end of file From 23ed2b9c9501fcbeed3bff9b79b721d038fc15be Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 5 Dec 2016 14:13:24 +0000 Subject: [PATCH 40/90] Adds MC lumi scaling to config creation --- dps/analysis/xsection/create_measurement2p0.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py index 766a9c06..dc5e0eb6 100644 --- a/dps/analysis/xsection/create_measurement2p0.py +++ b/dps/analysis/xsection/create_measurement2p0.py @@ -150,12 +150,17 @@ def get_sample_info(options, xsec_config, sample): sample_info["bin_edges"] = None # Lumi Scale (Rate) - sample_info["lumi_scale"]=1.0 + # Normal lumi scale + ls = 1.0 + # If want to rescale MC to new lumi + if 'data' not in sample: + ls = xsec_config.luminosity_scale + sample_info["lumi_scale"]=ls lumi_scale = xsec_config.rate_changing_systematics['luminosity'] if options['category'] == 'luminosity+': - sample_info["lumi_scale"]= 1.0 + 1.0*lumi_scale + sample_info["lumi_scale"]= ls*(1+lumi_scale) elif options['category'] == 'luminosity-': - sample_info["lumi_scale"]= 1.0 - 1.0*lumi_scale + sample_info["lumi_scale"]= ls*(1-lumi_scale) # Generator Scale (Rate) sample_info["scale"]=1.0 From 3f59bde61443f4b3ae91b0e4c2939ec81d237e5c Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 5 Dec 2016 14:25:00 +0000 Subject: [PATCH 41/90] correctly find files, update lumi scalin --- dps/config/xsection.py | 106 +++++++++++++++++++++-------------------- 1 file changed, 54 insertions(+), 52 deletions(-) diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 96cb6bc4..716a211e 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -134,7 +134,7 @@ def __fill_defaults__( self ): self.analysis_types = { 'electron' :'EPlusJets', 'muon' :'MuPlusJets', - 'combined' : 'combined' + 'combined' : 'combined', } # Needed? Where? @@ -164,8 +164,8 @@ def __fill_defaults__( self ): # self.data_file_muon = path_to_files + 'data_muon_tree.root' # self.data_file_electron = path_to_files + 'data_electron_tree.root' - self.data_file_muon = '/hdfs/TopQuarkGroup/db0268/data_muon_tree.root' - self.data_file_electron = '/hdfs/TopQuarkGroup/db0268/data_electron_tree.root' + self.data_file_muon = '/hdfs/TopQuarkGroup/db0268/0.1.2/atOutput/combined/data_muon_tree.root' + self.data_file_electron = '/hdfs/TopQuarkGroup/db0268/0.1.2/atOutput/combined/data_electron_tree.root' self.higgs_file = path_to_files + 'central/TTH_Inclusive_M-125' + middle + '.root' @@ -361,39 +361,39 @@ def __fill_defaults__( self ): 'madgraph' ] - # self.rate_changing_systematics_values = {} - # for systematic in self.rate_changing_systematics.keys(): - # affected_samples = XSectionConfig.samples # all samples - # if 'SingleTop' in systematic: - # affected_samples = ['SingleTop'] - # if 'TTJet' in systematic: - # affected_samples = ['TTJet'] - # if 'VJets' in systematic: - # affected_samples = ['V+Jets'] - # if 'QCD' in systematic: - # affected_samples = ['QCD'] - - # sp = dps.utils.measurement.Systematic( - # systematic + '+', - # # systematic + '_up', - # stype = dps.utils.measurement.Systematic.RATE, - # affected_samples = affected_samples, - # scale = 1 + self.rate_changing_systematics[systematic], - # ) - # scale = 1 - self.rate_changing_systematics[systematic] - # if scale <= 0: scale = 10e-5 - - # sm = dps.utils.measurement.Systematic( - # systematic + '-', - # # systematic + '_down', - # stype = dps.utils.measurement.Systematic.RATE, - # affected_samples = affected_samples, - # scale = scale, - # ) - # self.rate_changing_systematics_values[sp.name] = sp - # self.rate_changing_systematics_values[sm.name] = sm - - # self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() + self.rate_changing_systematics_values = {} + for systematic in self.rate_changing_systematics.keys(): + affected_samples = XSectionConfig.samples # all samples + if 'SingleTop' in systematic: + affected_samples = ['SingleTop'] + if 'TTJet' in systematic: + affected_samples = ['TTJet'] + if 'VJets' in systematic: + affected_samples = ['V+Jets'] + if 'QCD' in systematic: + affected_samples = ['QCD'] + + sp = dps.utils.measurement.Systematic( + systematic + '+', + # systematic + '_up', + stype = dps.utils.measurement.Systematic.RATE, + affected_samples = affected_samples, + scale = 1 + self.rate_changing_systematics[systematic], + ) + scale = 1 - self.rate_changing_systematics[systematic] + if scale <= 0: scale = 10e-5 + + sm = dps.utils.measurement.Systematic( + systematic + '-', + # systematic + '_down', + stype = dps.utils.measurement.Systematic.RATE, + affected_samples = affected_samples, + scale = scale, + ) + self.rate_changing_systematics_values[sp.name] = sp + self.rate_changing_systematics_values[sm.name] = sm + + self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() self.topMass_systematics = [ 'TTJets_massup', 'TTJets_massdown'] @@ -404,6 +404,7 @@ def __fill_defaults__( self ): ] self.topMassUncertainty = 1.0 # GeV from https://twiki.cern.ch/twiki/bin/view/LHCPhysics/TtbarNNLO + # Needed? self.central_general_template = path_to_files + 'central/%s' + middle + '.root' self.generator_systematic_vjets_templates = {} for systematic in self.generator_systematics: @@ -417,21 +418,21 @@ def __fill_defaults__( self ): generator_mcsamples = self.generator_mcsamples # Used in 01 - self.general_trees = {category: path_to_files + category + '/%s' + middle + prefix + '.root' - for category, prefix in categories_and_prefixes.iteritems()} - self.ttbar_trees = {category: path_to_files + 'TTJets_PowhegPythia8_tree.root' - for category in self.normalisation_systematics} - self.SingleTop_trees = {category: path_to_files + 'SingleTop_tree.root' - for category in self.normalisation_systematics} - self.VJets_trees = {category: path_to_files + 'VJets_tree.root' - for category in self.normalisation_systematics} - self.electron_QCD_MC_trees = {category: path_to_files + 'QCD_Electron_tree.root' - for category in self.normalisation_systematics} - self.muon_QCD_MC_trees = {category: path_to_files + 'QCD_Muon_tree.root' - for category in self.normalisation_systematics} - self.ttbar_generator_trees = {category: path_to_files + 'TTJets_' + category + '_tree.root' - for category in generator_mcsamples} - + self.general_trees = { + category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} + self.ttbar_trees = { + category: path_to_files + 'TTJets_PowhegPythia8_tree.root' for category in self.normalisation_systematics} + self.SingleTop_trees = { + category: path_to_files + 'SingleTop_tree.root' for category in self.normalisation_systematics} + self.VJets_trees = { + category: path_to_files + 'VJets_tree.root' for category in self.normalisation_systematics} + self.electron_QCD_MC_trees = { + category: path_to_files + 'QCD_Electron_tree.root' for category in self.normalisation_systematics} + self.muon_QCD_MC_trees = { + category: path_to_files + 'QCD_Muon_tree.root' for category in self.normalisation_systematics} + self.ttbar_generator_trees = { + category: path_to_files + 'TTJets_' + category + '_tree.root' for category in generator_mcsamples} + # Need with generator_mcsamples???? self.ttbar_amc_trees = path_to_files + '/TTJets_amc_tree.root' self.ttbar_madgraph_trees = path_to_files + '/TTJets_madgraph_tree.root' @@ -447,6 +448,7 @@ def __fill_defaults__( self ): self.ttbar_jerup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJER_tree.root' self.ttbar_jerdown_trees = path_to_files + '/TTJets_PowhegPythia8_minusJER_tree.root' + # Needed? self.data_muon_category_templates = { 'central': self.data_file_muon, @@ -631,7 +633,7 @@ def __fill_defaults_13TeV__( self ): middle = self.middle path_to_files = self.path_to_files - self.new_luminosity = 36459 + self.new_luminosity = 36260 self.ttbar_xsection = 831.76 # pb self.rate_changing_systematics = {#TODO check where this is used From 2ea133571e765b94627b55edd2d0cb55d2833b1c Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 5 Dec 2016 14:51:14 +0000 Subject: [PATCH 42/90] 02 options->args --- .../xsection/02_unfold_and_measure.py | 70 +++++++++++-------- 1 file changed, 39 insertions(+), 31 deletions(-) diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index 93a72b8b..ef07e981 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -1,6 +1,6 @@ # general from __future__ import division -from optparse import OptionParser +from optparse import ArgumentParser # from array import array # rootpy from rootpy.io import File @@ -20,7 +20,7 @@ # from ROOT import TGraph, TSpline3, TUnfoldDensity def unfold_results( results, category, channel, tau_value, h_truth, h_measured, h_response, h_fakes, method, visiblePS ): - global variable, path_to_JSON, options + global variable, path_to_JSON, args edges = reco_bin_edges_full[variable] if visiblePS: edges = reco_bin_edges_vis[variable] @@ -35,7 +35,7 @@ def unfold_results( results, category, channel, tau_value, h_truth, h_measured, if not category == 'central': unfoldCfg.error_treatment = 0 else: - unfoldCfg.error_treatment = options.error_treatment + unfoldCfg.error_treatment = args.error_treatment h_unfolded_data = unfolding.unfold() @@ -367,45 +367,53 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ filename = filename.replace( 'xsection_normalised', 'xsection_normalised_to_one' ) write_data_to_JSON( normalised_xsection, filename ) -if __name__ == '__main__': - set_root_defaults( msg_ignore_level = 3001 ) - # setup - parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/normalisation/background_subtraction/', + +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument( "-p", "--path", dest = "path", default = 'data/normalisation/background_subtraction/', help = "set path to JSON files" ) - parser.add_option( "-v", "--variable", dest = "variable", default = 'MET', + parser.add_argument( "-v", "--variable", dest = "variable", default = 'MET', help = "set the variable to analyse (MET, HT, ST, MT)" ) - parser.add_option( "-b", "--bjetbin", dest = "bjetbin", default = '2m', + parser.add_argument( "-b", "--bjetbin", dest = "bjetbin", default = '2m', help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" ) - parser.add_option( "-m", "--metType", dest = "metType", default = 'type1', + parser.add_argument( "-m", "--metType", dest = "metType", default = 'type1', help = "set MET type for analysis of MET, ST or MT" ) - parser.add_option( "-u", "--unfolding_method", dest = "unfolding_method", default = 'TUnfold', + parser.add_argument( "-u", "--unfolding_method", dest = "unfolding_method", default = 'TUnfold', help = "Unfolding method: RooUnfoldSvd (default), TSVDUnfold, RooUnfoldTUnfold, RooUnfoldInvert, RooUnfoldBinByBin, RooUnfoldBayes" ) - parser.add_option( "-e", "--error_treatment", type = 'int', + parser.add_argument( "-e", "--error_treatment", type = 'int', dest = "error_treatment", default = unfoldCfg.error_treatment, help = "parameter for error treatment in RooUnfold" ) - parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, + parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, help = "set the centre of mass energy for analysis. Default = 13 [TeV]", type = int ) - parser.add_option( "-C", "--combine-before-unfolding", dest = "combine_before_unfolding", action = "store_true", + parser.add_argument( "-C", "--combine-before-unfolding", dest = "combine_before_unfolding", action = "store_true", help = "Perform combination of channels before unfolding" ) - parser.add_option( "-w", "--write-unfolding-objects", dest = "write_unfolding_objects", action = "store_true", + parser.add_argument( "-w", "--write-unfolding-objects", dest = "write_unfolding_objects", action = "store_true", help = "Write out the unfolding objects (D, SV)" ) - parser.add_option( '--test', dest = "test", action = "store_true", + parser.add_argument( '--test', dest = "test", action = "store_true", help = "Just run the central measurement" ) - parser.add_option( '--ptreweight', dest = "ptreweight", action = "store_true", + parser.add_argument( '--ptreweight', dest = "ptreweight", action = "store_true", help = "Use pt-reweighted MadGraph for the measurement" ) - parser.add_option( '--visiblePS', dest = "visiblePS", action = "store_true", + parser.add_argument( '--visiblePS', dest = "visiblePS", action = "store_true", help = "Unfold to visible phase space" ) + args = parser.parse_args() + return args + + + + +if __name__ == '__main__': + set_root_defaults( msg_ignore_level = 3001 ) + # setup + args = parse_arguments() - ( options, args ) = parser.parse_args() - measurement_config = XSectionConfig( options.CoM ) - run_just_central = options.test - use_ptreweight = options.ptreweight + measurement_config = XSectionConfig( args.CoM ) + run_just_central = args.test + use_ptreweight = args.ptreweight # caching of variables for faster access translate_options = measurement_config.translate_options ttbar_theory_systematic_prefix = measurement_config.ttbar_theory_systematic_prefix - centre_of_mass = options.CoM + centre_of_mass = args.CoM luminosity = measurement_config.luminosity * measurement_config.luminosity_scale ttbar_xsection = measurement_config.ttbar_xsection path_to_files = measurement_config.path_to_files @@ -464,23 +472,23 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ # file_for_madgraphMLM = File( measurement_config.unfolding_madgraphMLM, 'read') file_for_powheg_herwig = File( measurement_config.unfolding_powheg_herwig, 'read' ) - variable = options.variable + variable = args.variable tau_value_electron = measurement_config.tau_values_electron[variable] tau_value_muon = measurement_config.tau_values_muon[variable] tau_value_combined = measurement_config.tau_values_combined[variable] - visiblePS = options.visiblePS + visiblePS = args.visiblePS phase_space = 'FullPS' if visiblePS: phase_space = "VisiblePS" - unfoldCfg.error_treatment = options.error_treatment - method = options.unfolding_method - combine_before_unfolding = options.combine_before_unfolding - b_tag_bin = translate_options[options.bjetbin] + unfoldCfg.error_treatment = args.error_treatment + method = args.unfolding_method + combine_before_unfolding = args.combine_before_unfolding + b_tag_bin = translate_args[args.bjetbin] path_to_JSON = '{path}/{com}TeV/{variable}/{phase_space}/'.format( - path = options.path, + path = args.path, com = measurement_config.centre_of_mass_energy, variable = variable, phase_space = phase_space, From 7fd4f98c489f9025892b02a61af729614f07bf39 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 6 Dec 2016 14:02:33 +0000 Subject: [PATCH 43/90] rearrange02. Muon channel only for now - Need to deal with combined BeforeUnfolding channel --- .../xsection/02_unfold_and_measure.py | 761 +++++++++--------- 1 file changed, 377 insertions(+), 384 deletions(-) diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index ef07e981..34226dc7 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -1,7 +1,6 @@ # general from __future__ import division -from optparse import ArgumentParser -# from array import array +from argparse import ArgumentParser # rootpy from rootpy.io import File from rootpy.plotting import Hist2D @@ -14,22 +13,82 @@ from dps.utils.hist_utilities import hist_to_value_error_tuplelist, \ value_error_tuplelist_to_hist from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes -from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON -from copy import deepcopy from dps.utils.ROOT_utils import set_root_defaults -# from ROOT import TGraph, TSpline3, TUnfoldDensity +from dps.utils.pandas_utilities import read_tuple_from_file, write_tuple_to_df +from copy import deepcopy + +def get_unfolding_files(measurement_config): + ''' + Return the set of unfolding files to use + ''' + unfolding_files = {} + + unfolding_files['file_for_unfolding'] = File( measurement_config.unfolding_central, 'read' ) + + unfolding_files['files_for_pdfs'] = { + 'PDFWeights_%d' % (index) : File ( measurement_config.unfolding_pdfweights[index] ) for index in range( 0, 100 ) + } + + unfolding_files['file_for_scaledown'] = File( measurement_config.unfolding_scale_down, 'read' ) + unfolding_files['file_for_scaleup'] = File( measurement_config.unfolding_scale_up, 'read' ) + + unfolding_files['file_for_renormalisationdown'] = File( measurement_config.unfolding_renormalisation_down, 'read' ) + unfolding_files['file_for_renormalisationup'] = File( measurement_config.unfolding_renormalisation_up, 'read' ) + unfolding_files['file_for_factorisationdown'] = File( measurement_config.unfolding_factorisation_down, 'read' ) + unfolding_files['file_for_factorisationup'] = File( measurement_config.unfolding_factorisation_up, 'read' ) + unfolding_files['file_for_combineddown'] = File( measurement_config.unfolding_combined_down, 'read' ) + unfolding_files['file_for_combinedup'] = File( measurement_config.unfolding_combined_up, 'read' ) + unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) + unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) + + unfolding_files['file_for_massdown'] = File( measurement_config.unfolding_mass_down, 'read' ) + unfolding_files['file_for_massup'] = File( measurement_config.unfolding_mass_up, 'read' ) + + unfolding_files['file_for_jesdown'] = File( measurement_config.unfolding_jes_down, 'read' ) + unfolding_files['file_for_jesup'] = File( measurement_config.unfolding_jes_up, 'read' ) + unfolding_files['file_for_jerdown'] = File( measurement_config.unfolding_jer_down, 'read' ) + unfolding_files['file_for_jerup'] = File( measurement_config.unfolding_jer_up, 'read' ) + + unfolding_files['file_for_bjetdown'] = File( measurement_config.unfolding_bjet_down, 'read' ) + unfolding_files['file_for_bjetup'] = File( measurement_config.unfolding_bjet_up, 'read' ) + unfolding_files['file_for_lightjetdown'] = File( measurement_config.unfolding_lightjet_down, 'read' ) + unfolding_files['file_for_lightjetup'] = File( measurement_config.unfolding_lightjet_up, 'read' ) + + unfolding_files['file_for_LeptonDown'] = File( measurement_config.unfolding_Lepton_down, 'read' ) + unfolding_files['file_for_LeptonUp'] = File( measurement_config.unfolding_Lepton_up, 'read' ) + + unfolding_files['file_for_ElectronEnDown'] = File( measurement_config.unfolding_ElectronEn_down, 'read' ) + unfolding_files['file_for_ElectronEnUp'] = File( measurement_config.unfolding_ElectronEn_up, 'read' ) + unfolding_files['file_for_MuonEnDown'] = File( measurement_config.unfolding_MuonEn_down, 'read' ) + unfolding_files['file_for_MuonEnUp'] = File( measurement_config.unfolding_MuonEn_up, 'read' ) + unfolding_files['file_for_TauEnDown'] = File( measurement_config.unfolding_TauEn_down, 'read' ) + unfolding_files['file_for_TauEnUp'] = File( measurement_config.unfolding_TauEn_up, 'read' ) + unfolding_files['file_for_UnclusteredEnDown'] = File( measurement_config.unfolding_UnclusteredEn_down, 'read' ) + unfolding_files['file_for_UnclusteredEnUp'] = File( measurement_config.unfolding_UnclusteredEn_up, 'read' ) + + unfolding_files['file_for_PUUp'] = File( measurement_config.unfolding_PUSystematic_up, 'read') + unfolding_files['file_for_PUDown'] = File( measurement_config.unfolding_PUSystematic_down, 'read') + + unfolding_files['file_for_powhegPythia8'] = File( measurement_config.unfolding_powheg_pythia8, 'read') + unfolding_files['file_for_amcatnlo'] = File( measurement_config.unfolding_amcatnlo, 'read') + unfolding_files['file_for_amcatnlo_herwig'] = File( measurement_config.unfolding_amcatnlo_herwig, 'read') + unfolding_files['file_for_madgraphMLM'] = File( measurement_config.unfolding_madgraphMLM, 'read') + unfolding_files['file_for_powheg_herwig'] = File( measurement_config.unfolding_powheg_herwig, 'read' ) + return unfolding_files + def unfold_results( results, category, channel, tau_value, h_truth, h_measured, h_response, h_fakes, method, visiblePS ): - global variable, path_to_JSON, args + global variable, path_to_DF, args edges = reco_bin_edges_full[variable] if visiblePS: edges = reco_bin_edges_vis[variable] h_data = value_error_tuplelist_to_hist( results, edges ) # Remove fakes before unfolding - h_data = removeFakes( h_measured, h_fakes, h_data ) + h_data_no_fakes = removeFakes( h_measured, h_fakes, h_data ) - unfolding = Unfolding( h_data, h_truth, h_measured, h_response, h_fakes, method = method, tau = tau_value ) + # unfold + unfolding = Unfolding( h_data_no_fakes, h_truth, h_measured, h_response, h_fakes, method = method, tau = tau_value ) # turning off the unfolding errors for systematic samples if not category == 'central': @@ -41,99 +100,98 @@ def unfold_results( results, category, channel, tau_value, h_truth, h_measured, # print "h_response bin edges : ", h_response # print "h_unfolded_data bin edges : ", h_unfolded_data + h_data_no_fakes = h_data_no_fakes.rebinned(2) + h_data = h_data.rebinned(2) del unfolding - return hist_to_value_error_tuplelist( h_unfolded_data ), hist_to_value_error_tuplelist( h_data ) + return hist_to_value_error_tuplelist( h_data ), hist_to_value_error_tuplelist( h_unfolded_data ), hist_to_value_error_tuplelist( h_data_no_fakes ) -def data_covariance_matrix( data ): - values = list( data ) - get_bin_error = data.GetBinError - cov_matrix = Hist2D( len( values ), -10, 10, len( values ), -10, 10, type = 'D' ) - for bin_i in range( len( values ) ): - error = get_bin_error( bin_i + 1 ) - cov_matrix.SetBinContent( bin_i + 1, bin_i + 1, error * error ) - return cov_matrix +# def data_covariance_matrix( data ): +# values = list( data ) +# get_bin_error = data.GetBinError +# cov_matrix = Hist2D( len( values ), -10, 10, len( values ), -10, 10, type = 'D' ) +# for bin_i in range( len( values ) ): +# error = get_bin_error( bin_i + 1 ) +# cov_matrix.SetBinContent( bin_i + 1, bin_i + 1, error * error ) +# return cov_matrix def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, tau_value, visiblePS ): - global centre_of_mass, luminosity, ttbar_xsection, method, variable, path_to_JSON - global file_for_unfolding, file_for_ptreweight, files_for_pdfs - global file_for_powhegPythia8, file_for_powheg_herwig, file_for_madgraphMLM, file_for_amcatnlo, file_for_amcatnlo_herwig - global file_for_scaledown, file_for_scaleup - global file_for_massdown, file_for_massup - global pdf_uncertainties + global com, luminosity, ttbar_xsection, method, variable, path_to_DF + global unfolding_files, file_for_ptreweight + # Add in this option? global use_ptreweight - files_for_systematics = { - 'TTJets_scaledown' : file_for_scaledown, - 'TTJets_scaleup' : file_for_scaleup, - 'TTJets_massdown' : file_for_massdown, - 'TTJets_massup' : file_for_massup, + 'TTJets_scaledown' : unfolding_files['file_for_scaledown'], + 'TTJets_scaleup' : unfolding_files['file_for_scaleup'], + 'TTJets_massdown' : unfolding_files['file_for_massdown'], + 'TTJets_massup' : unfolding_files['file_for_massup'], - 'TTJets_factorisationdown' : file_for_factorisationdown, - 'TTJets_factorisationup' : file_for_factorisationup, - 'TTJets_renormalisationdown' : file_for_renormalisationdown, - 'TTJets_renormalisationup' : file_for_renormalisationup, - 'TTJets_combineddown' : file_for_combineddown, - 'TTJets_combinedup' : file_for_combinedup, - 'TTJets_alphaSdown' : file_for_alphaSdown, - 'TTJets_alphaSup' : file_for_alphaSup, - - 'JES_down' : file_for_jesdown, - 'JES_up' : file_for_jesup, - - 'JER_down' : file_for_jerdown, - 'JER_up' : file_for_jerup, - - 'BJet_up' : file_for_bjetup, - 'BJet_down' : file_for_bjetdown, - - 'LightJet_up' : file_for_lightjetup, - 'LightJet_down' : file_for_lightjetdown, - - 'TTJets_hadronisation' : file_for_powheg_herwig, - 'TTJets_NLOgenerator' : file_for_amcatnlo, - - 'ElectronEnUp' : file_for_ElectronEnUp, - 'ElectronEnDown' : file_for_ElectronEnDown, - 'MuonEnUp' : file_for_MuonEnUp, - 'MuonEnDown' : file_for_MuonEnDown, - 'TauEnUp' : file_for_TauEnUp, - 'TauEnDown' : file_for_TauEnDown, - 'UnclusteredEnUp' : file_for_UnclusteredEnUp, - 'UnclusteredEnDown' : file_for_UnclusteredEnDown, - - 'Muon_up' : file_for_LeptonUp, - 'Muon_down' : file_for_LeptonDown, - 'Electron_up' : file_for_LeptonUp, - 'Electron_down' : file_for_LeptonDown, - - 'PileUp_up' : file_for_PUUp, - 'PileUp_down' : file_for_PUDown, + 'TTJets_factorisationdown' : unfolding_files['file_for_factorisationdown'], + 'TTJets_factorisationup' : unfolding_files['file_for_factorisationup'], + 'TTJets_renormalisationdown' : unfolding_files['file_for_renormalisationdown'], + 'TTJets_renormalisationup' : unfolding_files['file_for_renormalisationup'], + 'TTJets_combineddown' : unfolding_files['file_for_combineddown'], + 'TTJets_combinedup' : unfolding_files['file_for_combinedup'], + 'TTJets_alphaSdown' : unfolding_files['file_for_alphaSdown'], + 'TTJets_alphaSup' : unfolding_files['file_for_alphaSup'], + + 'JES_down' : unfolding_files['file_for_jesdown'], + 'JES_up' : unfolding_files['file_for_jesup'], + + 'JER_down' : unfolding_files['file_for_jerdown'], + 'JER_up' : unfolding_files['file_for_jerup'], + + 'BJet_up' : unfolding_files['file_for_bjetup'], + 'BJet_down' : unfolding_files['file_for_bjetdown'], + + 'LightJet_up' : unfolding_files['file_for_lightjetup'], + 'LightJet_down' : unfolding_files['file_for_lightjetdown'], + + 'TTJets_hadronisation' : unfolding_files['file_for_powheg_herwig'], + 'TTJets_NLOgenerator' : unfolding_files['file_for_amcatnlo'], + + 'ElectronEnUp' : unfolding_files['file_for_ElectronEnUp'], + 'ElectronEnDown' : unfolding_files['file_for_ElectronEnDown'], + 'MuonEnUp' : unfolding_files['file_for_MuonEnUp'], + 'MuonEnDown' : unfolding_files['file_for_MuonEnDown'], + 'TauEnUp' : unfolding_files['file_for_TauEnUp'], + 'TauEnDown' : unfolding_files['file_for_TauEnDown'], + 'UnclusteredEnUp' : unfolding_files['file_for_UnclusteredEnUp'], + 'UnclusteredEnDown' : unfolding_files['file_for_UnclusteredEnDown'], + + 'Muon_up' : unfolding_files['file_for_LeptonUp'], + 'Muon_down' : unfolding_files['file_for_LeptonDown'], + 'Electron_up' : unfolding_files['file_for_LeptonUp'], + 'Electron_down' : unfolding_files['file_for_LeptonDown'], + + 'PileUp_up' : unfolding_files['file_for_PUUp'], + 'PileUp_down' : unfolding_files['file_for_PUDown'], } h_truth, h_measured, h_response, h_fakes = None, None, None, None - # Systematics where you change the response matrix + # Uncertainties by changing the response matrix if category in files_for_systematics : print 'Doing category',category,'by changing response matrix' h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( inputfile = files_for_systematics[category], variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) + # PDF Uncertainties elif category in pdf_uncertainties: print 'Doing category',category,'by changing response matrix' h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( - inputfile = files_for_pdfs[category], + inputfile = unfolding_files['files_for_pdfs'][category], variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -142,10 +200,10 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, # Central and systematics where you just change input MC else: h_truth, h_measured, h_response, h_fakes = get_unfold_histogram_tuple( - inputfile = file_for_unfolding, + inputfile = unfolding_files['file_for_unfolding'], variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -153,7 +211,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, ) # Unfold current normalisation measurements - TTJet_normalisation_results_unfolded, TTJet_normalisation_results_withoutFakes = unfold_results( + TTJet_normalisation_results, TTJet_normalisation_results_unfolded, TTJet_normalisation_results_withoutFakes = unfold_results( TTJet_normalisation_results, category, channel, @@ -166,11 +224,11 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, visiblePS, ) - # Store measurements + # Store TTJet yields after background subtraction, after background subtraction without fakes and after Unfolding normalisation_unfolded = { - 'TTJet_measured' : TTJet_normalisation_results, - 'TTJet_measured_withoutFakes' : TTJet_normalisation_results_withoutFakes, - 'TTJet_unfolded' : TTJet_normalisation_results_unfolded + 'TTJet_measured' : TTJet_normalisation_results, + 'TTJet_measured_withoutFakes' : TTJet_normalisation_results_withoutFakes, + 'TTJet_unfolded' : TTJet_normalisation_results_unfolded, } # Return truth of different generators for comparison to data in 04 @@ -179,7 +237,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_massdown, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -189,7 +247,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_massup, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -199,7 +257,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_powhegPythia8, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -209,7 +267,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_amcatnlo, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -219,7 +277,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_madgraphMLM, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -229,144 +287,167 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = file_for_powheg_herwig, variable = variable, channel = channel, - centre_of_mass = centre_of_mass, + com = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) - # h_truth_amcatnlo_herwig, _, _, _ = get_unfold_histogram_tuple( - # inputfile = file_for_amcatnlo_herwig, - # variable = variable, - # channel = channel, - # centre_of_mass = centre_of_mass, - # ttbar_xsection = ttbar_xsection, - # luminosity = luminosity, - # load_fakes = True, - # visiblePS = visiblePS, - # ) - powhegPythia8_results = hist_to_value_error_tuplelist( h_truth_powhegPythia8 ) - # madgraphMLM_results = hist_to_value_error_tuplelist( h_truth_madgraphMLM ) - # amcatnloPythia8_results = hist_to_value_error_tuplelist( h_truth_amcatnlo ) - powheg_herwig_results = hist_to_value_error_tuplelist( h_truth_powheg_herwig ) - # amcatnlo_herwig_results = hist_to_value_error_tuplelist( h_truth_amcatnlo_herwig ) + normalisation_unfolded['powhegPythia8'] = hist_to_value_error_tuplelist( h_truth_powhegPythia8 ) + normalisation_unfolded['amcatnlo'] = hist_to_value_error_tuplelist( h_truth_madgraphMLM ) + normalisation_unfolded['madgraphMLM'] = hist_to_value_error_tuplelist( h_truth_amcatnlo ) + normalisation_unfolded['powhegHerwig'] = hist_to_value_error_tuplelist( h_truth_powheg_herwig ) - massdown_results = hist_to_value_error_tuplelist( h_truth_massdown ) - massup_results = hist_to_value_error_tuplelist( h_truth_massup ) - - normalisation_unfolded['powhegPythia8'] = powhegPythia8_results - # normalisation_unfolded['amcatnlo'] = amcatnloPythia8_results - # normalisation_unfolded['madgraphMLM'] = madgraphMLM_results - normalisation_unfolded['powhegHerwig'] = powheg_herwig_results - # normalisation_unfolded['amcatnloHerwig'] = amcatnlo_herwig_results - - normalisation_unfolded['massdown'] = massdown_results - normalisation_unfolded['massup'] = massup_results + normalisation_unfolded['massdown'] = hist_to_value_error_tuplelist( h_truth_massdown ) + normalisation_unfolded['massup'] = hist_to_value_error_tuplelist( h_truth_massup ) + # Write all normalisations in unfolded binning scheme to dataframes + file_template = '{path_to_DF}/{category}/unfolded_normalisation_{channel}_{method}.txt' + write_02(normalisation_unfolded, file_template, path_to_DF, category, channel, method) return normalisation_unfolded + def calculate_xsections( normalisation, category, channel ): - global variable, path_to_JSON + ''' + Calculate the xsection + ''' + global variable, path_to_DF # calculate the x-sections branching_ratio = 0.15 if 'combined' in channel: branching_ratio = branching_ratio * 2 - TTJet_xsection = calculate_xsection( normalisation['TTJet_measured'], luminosity, branching_ratio ) # L in pb1 - TTJet_withoutFakes_xsection = calculate_xsection( normalisation['TTJet_measured_withoutFakes'], luminosity, branching_ratio ) # L in pb1 - TTJet_xsection_unfolded = calculate_xsection( normalisation['TTJet_unfolded'], luminosity, branching_ratio ) # L in pb1 - - xsection_unfolded = { - 'TTJet_measured' : TTJet_xsection, - 'TTJet_measured_withoutFakes' : TTJet_withoutFakes_xsection, - 'TTJet_unfolded' : TTJet_xsection_unfolded, - } - if category == 'central': - powhegPythia8_xsection = calculate_xsection( normalisation['powhegPythia8'], luminosity, branching_ratio ) # L in pb1 - # amcatnlo_xsection = calculate_xsection( normalisation['amcatnlo'], luminosity, branching_ratio ) # L in pb1 - powhegHerwig_xsection = calculate_xsection( normalisation['powhegHerwig'], luminosity, branching_ratio ) # L in pb1 - # amcatnloHerwig_xsection = calculate_xsection( normalisation['amcatnloHerwig'], luminosity, branching_ratio ) # L in pb1 - madgraphMLM_xsection = calculate_xsection( normalisation['madgraphMLM'], luminosity, branching_ratio ) - - massdown_xsection = calculate_xsection( normalisation['massdown'], luminosity, branching_ratio ) # L in pb1 - massup_xsection = calculate_xsection( normalisation['massup'], luminosity, branching_ratio ) # L in pb1 - - xsection_unfolded['powhegPythia8'] = powhegPythia8_xsection - # xsection_unfolded['amcatnlo'] = amcatnlo_xsection - # xsection_unfolded['madgraphMLM'] = madgraphMLM_xsection - xsection_unfolded['powhegHerwig'] = powhegHerwig_xsection - # xsection_unfolded['amcatnloHerwig'] = amcatnloHerwig_xsection - - xsection_unfolded['massdown'] = massdown_xsection - xsection_unfolded['massup'] = massup_xsection - - file_template = '{path_to_JSON}/{category}/xsection_{channel}_{method}.txt' - filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, + xsection_unfolded = {} + xsection_unfolded['TTJet_measured'] = calculate_xsection( + normalisation['TTJet_measured'], + luminosity, # L in pb1 + branching_ratio + ) + xsection_unfolded['TTJet_measured_withoutFakes'] = calculate_xsection( + normalisation['TTJet_measured_withoutFakes'], + luminosity, + branching_ratio + ) + xsection_unfolded['TTJet_unfolded'] = calculate_xsection( + normalisation['TTJet_unfolded'], + luminosity, + branching_ratio ) - write_data_to_JSON( xsection_unfolded, filename ) + + if category == 'central': + xsection_unfolded['powhegPythia8'] = calculate_xsection( + normalisation['powhegPythia8'], + luminosity, + branching_ratio + ) + xsection_unfolded['amcatnlo'] = calculate_xsection( + normalisation['amcatnlo'], + luminosity, + branching_ratio + ) + xsection_unfolded['madgraphMLM'] = calculate_xsection( + normalisation['powhegHerwig'], + luminosity, + branching_ratio + ) + xsection_unfolded['powhegHerwig'] = calculate_xsection( + normalisation['madgraphMLM'], + luminosity, + branching_ratio + ) + + xsection_unfolded['massdown'] = calculate_xsection( + normalisation['massdown'], + luminosity, + branching_ratio + ) + xsection_unfolded['massup'] = calculate_xsection( + normalisation['massup'], + luminosity, + branching_ratio + ) + + file_template = '{path_to_DF}/{category}/xsection_{channel}_{method}.txt' + write_02(xsection_unfolded, file_template, path_to_DF, category, channel, method) return def calculate_normalised_xsections( normalisation, category, channel, normalise_to_one = False ): - global variable, path_to_JSON, phase_space + ''' + Calculate the normalised cross sections + ''' + global variable, path_to_DF, phase_space binWidths = None if phase_space == 'VisiblePS': binWidths = bin_widths_visiblePS elif phase_space == 'FullPS': binWidths = bin_widths + + normalised_xsection = {} + normalised_xsection['TTJet_measured'] = calculate_normalised_xsection( + normalisation['TTJet_measured'], + binWidths[variable], + normalise_to_one + ) + normalised_xsection['TTJet_measured_withoutFakes'] = calculate_normalised_xsection( + normalisation['TTJet_measured_withoutFakes'], + binWidths[variable], + normalise_to_one + ) + normalised_xsection['TTJet_unfolded'] = calculate_normalised_xsection( + normalisation['TTJet_unfolded'], + binWidths[variable], + normalise_to_one + ) - TTJet_normalised_xsection = calculate_normalised_xsection( normalisation['TTJet_measured'], binWidths[variable], normalise_to_one ) - TTJet_withoutFakes_normalised_xsection = calculate_normalised_xsection( normalisation['TTJet_measured_withoutFakes'], binWidths[variable], normalise_to_one ) - TTJet_normalised_xsection_unfolded = calculate_normalised_xsection( normalisation['TTJet_unfolded'], binWidths[variable], normalise_to_one ) + if category == 'central': + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['powhegPythia8'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['amcatnlo'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['powhegHerwig'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['madgraphMLM'], + binWidths[variable], + normalise_to_one, + ) - normalised_xsection = {'TTJet_measured' : TTJet_normalised_xsection, - 'TTJet_measured_withoutFakes' : TTJet_withoutFakes_normalised_xsection, - 'TTJet_unfolded' : TTJet_normalised_xsection_unfolded - } + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['massdown'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['massdown'] = calculate_normalised_xsection( + normalisation['massup'], + binWidths[variable], + normalise_to_one, + ) - if category == 'central': - powhegPythia8_normalised_xsection = calculate_normalised_xsection( normalisation['powhegPythia8'], binWidths[variable], normalise_to_one ) - # amcatnlo_normalised_xsection = calculate_normalised_xsection( normalisation['amcatnlo'], binWidths[variable], normalise_to_one ) - powhegHerwig_normalised_xsection = calculate_normalised_xsection( normalisation['powhegHerwig'], binWidths[variable], normalise_to_one ) - # amcatnloHerwig_normalised_xsection = calculate_normalised_xsection( normalisation['amcatnloHerwig'], binWidths[variable], normalise_to_one ) - # madgraphMLM_normalised_xsection = calculate_normalised_xsection( normalisation['madgraphMLM'], binWidths[variable], normalise_to_one ) - - fsrdown_normalised_xsection = calculate_normalised_xsection( normalisation['fsrdown'], binWidths[variable], normalise_to_one ) - fsrup_normalised_xsection = calculate_normalised_xsection( normalisation['fsrup'], binWidths[variable], normalise_to_one ) - isrdown_normalised_xsection = calculate_normalised_xsection( normalisation['isrdown'], binWidths[variable], normalise_to_one ) - isrup_normalised_xsection = calculate_normalised_xsection( normalisation['isrup'], binWidths[variable], normalise_to_one ) - uedown_normalised_xsection = calculate_normalised_xsection( normalisation['uedown'], binWidths[variable], normalise_to_one ) - ueup_normalised_xsection = calculate_normalised_xsection( normalisation['ueup'], binWidths[variable], normalise_to_one ) - - massdown_normalised_xsection = calculate_normalised_xsection( normalisation['massdown'], binWidths[variable], normalise_to_one ) - massup_normalised_xsection = calculate_normalised_xsection( normalisation['massup'], binWidths[variable], normalise_to_one ) - - normalised_xsection['powhegPythia8'] = powhegPythia8_normalised_xsection - # normalised_xsection['amcatnlo'] = amcatnlo_normalised_xsection - # normalised_xsection['madgraphMLM' ] = madgraphMLM_normalised_xsection - normalised_xsection['powhegHerwig'] = powhegHerwig_normalised_xsection - # normalised_xsection['amcatnloHerwig'] = amcatnloHerwig_normalised_xsection - - normalised_xsection['massdown'] = massdown_normalised_xsection - normalised_xsection['massup'] = massup_normalised_xsection - - file_template = '{path_to_JSON}/{category}/xsection_normalised_{channel}_{method}.txt' - filename = file_template.format( - path_to_JSON = path_to_JSON, + file_template = '{path_to_DF}/{category}/xsection_normalised_{channel}_{method}.txt' + if normalise_to_one: + file_template = file_template.replace( 'xsection_normalised', 'xsection_normalised_to_one' ) + write_02(normalised_xsection, file_template, path_to_DF, category, channel, method) + +def write_02(tuple_out, f_temp, path_to_DF, category, channel, method): + f = f_temp.format( + path_to_DF = path_to_DF, category = category, channel = channel, method = method, ) - - if normalise_to_one: - filename = filename.replace( 'xsection_normalised', 'xsection_normalised_to_one' ) - write_data_to_JSON( normalised_xsection, filename ) - + write_tuple_to_df( tuple_out, f ) + return f def parse_arguments(): parser = ArgumentParser(__doc__) @@ -374,21 +455,15 @@ def parse_arguments(): help = "set path to JSON files" ) parser.add_argument( "-v", "--variable", dest = "variable", default = 'MET', help = "set the variable to analyse (MET, HT, ST, MT)" ) - parser.add_argument( "-b", "--bjetbin", dest = "bjetbin", default = '2m', - help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" ) - parser.add_argument( "-m", "--metType", dest = "metType", default = 'type1', - help = "set MET type for analysis of MET, ST or MT" ) parser.add_argument( "-u", "--unfolding_method", dest = "unfolding_method", default = 'TUnfold', - help = "Unfolding method: RooUnfoldSvd (default), TSVDUnfold, RooUnfoldTUnfold, RooUnfoldInvert, RooUnfoldBinByBin, RooUnfoldBayes" ) - parser.add_argument( "-e", "--error_treatment", type = 'int', + help = "Unfolding method: TUnfold" ) + parser.add_argument( "-e", "--error_treatment", type = int, dest = "error_treatment", default = unfoldCfg.error_treatment, - help = "parameter for error treatment in RooUnfold" ) - parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, + help = "parameter for error treatment in RooUnfold") + parser.add_argument( "-c", "--centre-of-mass-energy", dest = "com", default = 13, help = "set the centre of mass energy for analysis. Default = 13 [TeV]", type = int ) parser.add_argument( "-C", "--combine-before-unfolding", dest = "combine_before_unfolding", action = "store_true", help = "Perform combination of channels before unfolding" ) - parser.add_argument( "-w", "--write-unfolding-objects", dest = "write_unfolding_objects", action = "store_true", - help = "Write out the unfolding objects (D, SV)" ) parser.add_argument( '--test', dest = "test", action = "store_true", help = "Just run the central measurement" ) parser.add_argument( '--ptreweight', dest = "ptreweight", action = "store_true", @@ -399,179 +474,117 @@ def parse_arguments(): return args - - if __name__ == '__main__': set_root_defaults( msg_ignore_level = 3001 ) # setup args = parse_arguments() - measurement_config = XSectionConfig( args.CoM ) - run_just_central = args.test - use_ptreweight = args.ptreweight - # caching of variables for faster access - translate_options = measurement_config.translate_options - ttbar_theory_systematic_prefix = measurement_config.ttbar_theory_systematic_prefix - - centre_of_mass = args.CoM - luminosity = measurement_config.luminosity * measurement_config.luminosity_scale - ttbar_xsection = measurement_config.ttbar_xsection - path_to_files = measurement_config.path_to_files - file_for_unfolding = File( measurement_config.unfolding_central, 'read' ) - - files_for_pdfs = { 'PDFWeights_%d' % (index) : File ( measurement_config.unfolding_pdfweights[index] ) for index in range( 0, 100 ) } - - ### - file_for_scaledown = File( measurement_config.unfolding_scale_down, 'read' ) - file_for_scaleup = File( measurement_config.unfolding_scale_up, 'read' ) - ### - file_for_renormalisationdown = File( measurement_config.unfolding_renormalisation_down, 'read' ) - file_for_renormalisationup = File( measurement_config.unfolding_renormalisation_up, 'read' ) - file_for_factorisationdown = File( measurement_config.unfolding_factorisation_down, 'read' ) - file_for_factorisationup = File( measurement_config.unfolding_factorisation_up, 'read' ) - file_for_combineddown = File( measurement_config.unfolding_combined_down, 'read' ) - file_for_combinedup = File( measurement_config.unfolding_combined_up, 'read' ) - # file_for_alphaSdown = File( measurement_config.unfolding_alphaS_down, 'read' ) - # file_for_alphaSup = File( measurement_config.unfolding_alphaS_up, 'read' ) - ### - file_for_massdown = File( measurement_config.unfolding_mass_down, 'read' ) - file_for_massup = File( measurement_config.unfolding_mass_up, 'read' ) - file_for_jesdown = File( measurement_config.unfolding_jes_down, 'read' ) - file_for_jesup = File( measurement_config.unfolding_jes_up, 'read' ) - ### - file_for_jerdown = File( measurement_config.unfolding_jer_down, 'read' ) - file_for_jerup = File( measurement_config.unfolding_jer_up, 'read' ) - ### - file_for_bjetdown = File( measurement_config.unfolding_bjet_down, 'read' ) - file_for_bjetup = File( measurement_config.unfolding_bjet_up, 'read' ) - ### - file_for_lightjetdown = File( measurement_config.unfolding_lightjet_down, 'read' ) - file_for_lightjetup = File( measurement_config.unfolding_lightjet_up, 'read' ) - ### - file_for_LeptonDown = File( measurement_config.unfolding_Lepton_down, 'read' ) - file_for_LeptonUp = File( measurement_config.unfolding_Lepton_up, 'read' ) - ### - file_for_ElectronEnDown = File( measurement_config.unfolding_ElectronEn_down, 'read' ) - file_for_ElectronEnUp = File( measurement_config.unfolding_ElectronEn_up, 'read' ) - ### - file_for_MuonEnDown = File( measurement_config.unfolding_MuonEn_down, 'read' ) - file_for_MuonEnUp = File( measurement_config.unfolding_MuonEn_up, 'read' ) - ### - file_for_TauEnDown = File( measurement_config.unfolding_TauEn_down, 'read' ) - file_for_TauEnUp = File( measurement_config.unfolding_TauEn_up, 'read' ) - ### - file_for_UnclusteredEnDown = File( measurement_config.unfolding_UnclusteredEn_down, 'read' ) - file_for_UnclusteredEnUp = File( measurement_config.unfolding_UnclusteredEn_up, 'read' ) - ### - file_for_PUUp = File( measurement_config.unfolding_PUSystematic_up, 'read') - file_for_PUDown = File( measurement_config.unfolding_PUSystematic_down, 'read') - - file_for_powhegPythia8 = File( measurement_config.unfolding_powheg_pythia8, 'read') - # file_for_amcatnlo = File( measurement_config.unfolding_amcatnlo, 'read') - # file_for_amcatnlo_herwig = File( measurement_config.unfolding_amcatnlo_herwig, 'read') - # file_for_madgraphMLM = File( measurement_config.unfolding_madgraphMLM, 'read') - file_for_powheg_herwig = File( measurement_config.unfolding_powheg_herwig, 'read' ) - - variable = args.variable - - tau_value_electron = measurement_config.tau_values_electron[variable] - tau_value_muon = measurement_config.tau_values_muon[variable] - tau_value_combined = measurement_config.tau_values_combined[variable] - - visiblePS = args.visiblePS + # Cache arguments + run_just_central = args.test + use_ptreweight = args.ptreweight + variable = args.variable + com = args.com + unfoldCfg.error_treatment = args.error_treatment + method = args.unfolding_method + combine_before_unfolding = args.combine_before_unfolding + visiblePS = args.visiblePS + + # Cache arguments from xsection config + measurement_config = XSectionConfig( com ) + luminosity = measurement_config.luminosity * measurement_config.luminosity_scale + ttbar_xsection = measurement_config.ttbar_xsection + tau_value_electron = measurement_config.tau_values_electron[variable] + tau_value_muon = measurement_config.tau_values_muon[variable] + tau_value_combined = measurement_config.tau_values_combined[variable] + phase_space = 'FullPS' if visiblePS: phase_space = "VisiblePS" - unfoldCfg.error_treatment = args.error_treatment - method = args.unfolding_method - combine_before_unfolding = args.combine_before_unfolding - b_tag_bin = translate_args[args.bjetbin] - path_to_JSON = '{path}/{com}TeV/{variable}/{phase_space}/'.format( - path = args.path, - com = measurement_config.centre_of_mass_energy, - variable = variable, - phase_space = phase_space, - ) - - all_measurements = deepcopy( measurement_config.measurements_and_prefixes.keys() ) - - # ### ttbar pt reweightingng systematic - # ttbar_theory_systematics = [] #[ 'TTJets_ptreweight' ] - # all_measurements.extend( ttbar_theory_systematics ) + unfolding_files = get_unfolding_files(measurement_config) + path_to_DF = 'TESTING/{path}/{com}TeV/{variable}/{phase_space}/'.format( + path = args.path, + com = measurement_config.com_energy, + variable = variable, + phase_space = phase_space, + ) - pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 0, 100 )] + # Core Systematics + all_measurements = deepcopy( measurement_config.measurements_and_prefixes.keys() ) + # Adding PDF Systematics + pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 0, 100 )] all_measurements.extend( pdf_uncertainties ) - + # # TTBar Reweighting Systematics + # ttbar_theory_systematics = [ 'TTJets_ptreweight', 'TTJets_etareweight' ] + # all_measurements.extend( ttbar_theory_systematics ) print 'Performing unfolding for variable', variable for category in all_measurements: - if run_just_central and not category == 'central': + if run_just_central and not category == 'central': continue - # Don't need to consider MET uncertainties for HT, abs_lepton_eta, lepton_pt and NJets if ( variable in measurement_config.variables_no_met ) and (category in measurement_config.met_specific_systematics): continue print 'Unfolding category "%s"' % category # read normalisation results from JSON - electron_file = path_to_JSON + '/' + category + '/normalisation_electron.txt' - muon_file = path_to_JSON + '/' + category + '/normalisation_muon.txt' + electron_file = path_to_DF + '/' + category + '/normalisation_electron.txt' + muon_file = path_to_DF + '/' + category + '/normalisation_muon.txt' + combined_file = path_to_DF + '/' + category + '/normalisation_combined.txt' # don't change normalisation input for ttbar generator/theory systematics and PDF weights # For systematics not run in 01 [PDF and TTJet_] then use the central normalisations - if ttbar_theory_systematic_prefix in category or category in pdf_uncertainties: - electron_file = path_to_JSON + '/central/normalisation_electron.txt' - muon_file = path_to_JSON + '/central/normalisation_muon.txt' - # combined_file = path_to_JSON + '/central/normalisation_combined.txt' + if category not in measurement_config.normalisation_systematics: + electron_file = path_to_DF + '/central/normalisation_electron.txt' + muon_file = path_to_DF + '/central/normalisation_muon.txt' + combined_file = path_to_DF + '/central/normalisation_combined.txt' # Read the normalisations - normalisation_results_electron = None - normalisation_results_muon = None + normalisation_results_electron = None + normalisation_results_muon = None + normalisation_results_combined = None + + # Read the normalisation files + # For LeptonUp/Down return other lepton type to central normailsation + # THINK HOW TO READ MUON:ELECTRON/UP:DOWN WITH COMBINEDBEFOREUNFOLDING if category == 'Muon_up' or category == 'Muon_down': - normalisation_results_electron = read_data_from_JSON( path_to_JSON + '/central/normalisation_electron.txt' ) - normalisation_results_muon = read_data_from_JSON( muon_file ) + normalisation_results_electron = read_tuple_from_file( path_to_DF + '/central/normalisation_electron.txt' ) + normalisation_results_muon = read_tuple_from_file( muon_file ) + # normalisation_results_combined = read_tuple_from_file( combined_file ) elif category == 'Electron_up' or category == 'Electron_down': - normalisation_results_electron = read_data_from_JSON( electron_file ) - normalisation_results_muon = read_data_from_JSON( path_to_JSON + '/central/normalisation_muon.txt' ) + normalisation_results_electron = read_tuple_from_file( electron_file ) + normalisation_results_muon = read_tuple_from_file( path_to_DF + '/central/normalisation_muon.txt' ) + # normalisation_results_combined = read_tuple_from_file( combined_file ) else: - normalisation_results_electron = read_data_from_JSON( electron_file ) - normalisation_results_muon = read_data_from_JSON( muon_file ) + normalisation_results_electron = read_tuple_from_file( electron_file ) + normalisation_results_muon = read_tuple_from_file( muon_file ) + # normalisation_results_combined = read_tuple_from_file( combined_file ) # Combine the normalisations (beforeUnfolding) - normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) - TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] + # normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) + # TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] TTJet_normalisation_results_muon = normalisation_results_muon['TTJet'] - TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] - - file_template = '{path_to_JSON}/{category}/unfolded_normalisation_{channel}_{method}.txt' - filename = '' - - # get unfolded normalisations and xsections - unfolded_normalisation_electron = {} - unfolded_normalisation_muon = {} - - - # Electron channel - channel = 'electron' - unfolded_normalisation_electron = get_unfolded_normalisation( - TTJet_normalisation_results_electron, - category, - channel, - tau_value_electron, - visiblePS = visiblePS - ) - filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) - write_data_to_JSON( unfolded_normalisation_electron, filename ) - # measure xsection - calculate_xsections( unfolded_normalisation_electron, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_electron, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_electron, category, channel , True ) - + # TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] + + # # get unfolded normalisations and xsections + unfolded_normalisation_electron = {} + unfolded_normalisation_muon = {} + unfolded_normalisation_combined = {} + unfolded_normalisation_combinedBeforeUnfolding = {} + + + # # Electron channel + # channel = 'electron' + # unfolded_normalisation_electron = get_unfolded_normalisation( + # TTJet_normalisation_results_electron, + # category, + # channel, + # tau_value_electron, + # visiblePS = visiblePS + # ) + # # measure xsection + # calculate_xsections( unfolded_normalisation_electron, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_electron, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_electron, category, channel , True ) # Muon channel channel = 'muon' @@ -582,52 +595,32 @@ def parse_arguments(): tau_value_muon, visiblePS = visiblePS ) - filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) - write_data_to_JSON( unfolded_normalisation_muon, filename ) # measure xsection calculate_xsections( unfolded_normalisation_muon, category, channel ) calculate_normalised_xsections( unfolded_normalisation_muon, category, channel ) calculate_normalised_xsections( unfolded_normalisation_muon, category, channel , True ) + # # Results where the channels are combined before unfolding (the 'combined in the response matrix') + # channel = 'combinedBeforeUnfolding' + # unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( + # TTJet_normalisation_results_combined, + # category, + # 'combined', + # tau_value=tau_value_combined, + # visiblePS=visiblePS, + # ) + # # measure xsection + # calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) + + # # Results where the channels are combined after unfolding + # channel = 'combined' + # unfolded_normalisation_combined = combine_complex_results( unfolded_normalisation_electron, unfolded_normalisation_muon ) + # # measure xsection + # calculate_xsections( unfolded_normalisation_combined, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_combined, category, channel ) + # calculate_normalised_xsections( unfolded_normalisation_combined, category, channel , True ) + + - # Results where the channels are combined after unfolding - unfolded_normalisation_combined = combine_complex_results( unfolded_normalisation_electron, unfolded_normalisation_muon ) - channel = 'combined' - filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) - write_data_to_JSON( unfolded_normalisation_combined, filename ) - # measure xsection - calculate_xsections( unfolded_normalisation_combined, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_combined, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_combined, category, channel , True ) - - - # Results where the channels are combined before unfolding (the 'combined in the response matrix') - unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( - TTJet_normalisation_results_combined, - category, - 'combined', - tau_value=tau_value_combined, - visiblePS=visiblePS, - ) - channel = 'combinedBeforeUnfolding' - filename = file_template.format( - path_to_JSON = path_to_JSON, - category = category, - channel = channel, - method = method, - ) - write_data_to_JSON( unfolded_normalisation_combinedBeforeUnfolding, filename ) - # measure xsection - calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) From 7216cf4a0f1552d113f725a57d58b0926a49f2e0 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 6 Dec 2016 14:03:46 +0000 Subject: [PATCH 44/90] Make pandas tuple writer/reader more generic --- dps/utils/measurement2.py | 4 +-- dps/utils/pandas_utilities.py | 57 ++++++++++++++++++++++++++++++++--- 2 files changed, 55 insertions(+), 6 deletions(-) diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 5d9be9bd..7b7a396d 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -183,7 +183,7 @@ def save(self, phase_space): I would like to change this to a pandas Dataframe at somepoint after a few issues have been worked out ''' - from dps.utils.pandas_utilities import write_normalisation_to_df + from dps.utils.pandas_utilities import write_tuple_to_df from dps.utils.file_utilities import make_folder_if_not_exists # If normalisation hasnt been calculated - then go calculate it! if not self.is_normalised: self.calculate_normalisation() @@ -203,7 +203,7 @@ def save(self, phase_space): channel=self.channel ) - write_normalisation_to_df( + write_tuple_to_df( self.normalisation, output_folder + f ) diff --git a/dps/utils/pandas_utilities.py b/dps/utils/pandas_utilities.py index f196f850..dfa70f85 100644 --- a/dps/utils/pandas_utilities.py +++ b/dps/utils/pandas_utilities.py @@ -25,6 +25,11 @@ def df_to_file(filename, df, index=True): Save a dataframe to an output text file Nicely human readable ''' + # Make the folder if it doesnt exist + import os + from dps.utils.file_utilities import make_folder_if_not_exists + make_folder_if_not_exists(os.path.dirname(filename)) + with open(filename,'w') as f: df.to_string(f, index=index) f.write('\n') @@ -66,9 +71,25 @@ def divide_by_series(s1, s2): s = s1.div(s2) return s -def write_normalisation_to_df( d_norm, filename ): +def tupleise_cols(vals, errs): + ''' + tupleising two cols in pandas ''' - Writing the output of 01 to dataframe + vals_errs = [ (v, e) for v,e in zip(vals, errs)] + return vals_errs + +def write_tuple_to_df( d_norm, filename ): + ''' + Writing tuples to a dataframe + + Takes a pandas dataframe of tuples of the form: + A | B + (v,e) | (v,e) + + Write a pandas output file of the form: + A | A_Unc | B | B_Unc + (v) | (e) | (v) | (e) + ''' # First create the dataframe df = dict_to_df(d_norm) @@ -79,9 +100,37 @@ def write_normalisation_to_df( d_norm, filename ): # Make columns alphabetical for easy reading l=df.columns.tolist() l.sort() - print l df = df[l] # Write dataframe df_to_file(filename, df, index=False) - return \ No newline at end of file + return + +def read_tuple_from_file( filename ): + ''' + Reading the output of 01 to a dataframe + + Reads a pandas output file of the form: + A | A_Unc | B | B_Unc + (v) | (e) | (v) | (e) + + Returns a pandas dataframe of the form: + A | B + (v,e) | (v,e) + + ''' + from dps.config.xsection import XSectionConfig + config = XSectionConfig(13) + + # First read the dataframe + df = file_to_df(filename) + l=df.columns.tolist() + + # Now to retupleise the columns + for sample in l: + if '_Unc' in sample: continue + vals = df[sample] + errs = df[sample+'_Unc'] + df[sample] = tupleise_cols(vals, errs) + del df[sample+'_Unc'] + return df \ No newline at end of file From 4c4d5554748307999ed2fd8481f9e3b121206bf8 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 9 Dec 2016 10:52:26 +0000 Subject: [PATCH 45/90] path_to_JSON -> path_to_DF --- .../xsection/03_calculate_systematics.py | 172 +++++++++++------- dps/utils/systematic.py | 42 +++-- 2 files changed, 129 insertions(+), 85 deletions(-) diff --git a/dps/analysis/xsection/03_calculate_systematics.py b/dps/analysis/xsection/03_calculate_systematics.py index a5e7005b..a4972253 100644 --- a/dps/analysis/xsection/03_calculate_systematics.py +++ b/dps/analysis/xsection/03_calculate_systematics.py @@ -15,10 +15,9 @@ 2) can be used to compare systematics (both in tables and plots) 3) + 4) for more fine-grained analysis ''' -from optparse import OptionParser +from argparse import ArgumentParser from dps.config.xsection import XSectionConfig from dps.config.variable_binning import bin_edges_vis -from dps.utils.file_utilities import make_folder_if_not_exists from dps.utils.systematic import append_PDF_uncertainties, print_dictionary,\ get_normalised_cross_sections, get_symmetrised_systematic_uncertainty,\ generate_covariance_matrices,\ @@ -26,65 +25,79 @@ write_normalised_xsection_measurement,\ write_systematic_xsection_measurement +def parse_arguments(): + parser = ArgumentParser() + parser.add_argument( "-p", "--path", + dest = "path", + default = 'data/normalisation/background_subtraction/', + help = "set path to JSON files" ) + parser.add_argument( "-v", "--variable", + dest = "variable", + default = 'MET', + help = "set variable to plot (MET, HT, ST, MT)" ) + parser.add_argument( "-c", "--centre-of-mass-energy", + dest = "CoM", + default = 13, type = int, + help = "set the centre of mass energy for analysis. Default = 13 [TeV]" ) + parser.add_argument( "-s", "--symmetrise_errors", + action = "store_true", + dest = "symmetrise_errors", + help = "Makes the errors symmetric" ) + parser.add_argument( '--visiblePS', + dest = "visiblePS", + action = "store_true", + help = "Unfold to visible phase space" ) + parser.add_argument( "-u", "--unfolding_method", + dest = "unfolding_method", + default = 'TUnfold', + help = "Unfolding method: TUnfold (default)" ) + args = parser.parse_args() + return args + if __name__ == '__main__': ''' 1) read all background subtraction results (group by MET, PDF, other) 2) calculate the difference to central measurement 3) ''' - parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/normalisation/background_subtraction/', - help = "set path to JSON files" ) - parser.add_option( "-v", "--variable", dest = "variable", default = 'MET', - help = "set variable to plot (MET, HT, ST, MT)" ) - parser.add_option( "-b", "--bjetbin", dest = "bjetbin", default = '2m', - help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" ) - parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, type = int, - help = "set the centre of mass energy for analysis. Default = 13 [TeV]" ) - parser.add_option( "-s", "--symmetrise_errors", action = "store_true", dest = "symmetrise_errors", - help = "Makes the errors symmetric" ) - parser.add_option( '--visiblePS', dest = "visiblePS", action = "store_true", - help = "Unfold to visible phase space" ) - parser.add_option( "-u", "--unfolding_method", dest = "unfolding_method", default = 'TUnfold', - help = "Unfolding method: TUnfold (default)" ) - - ( options, args ) = parser.parse_args() - measurement_config = XSectionConfig( options.CoM ) + args = parse_arguments() + measurement_config = XSectionConfig( args.CoM ) # caching of variables for shorter access - translate_options = measurement_config.translate_options - met_specific_systematics = measurement_config.met_specific_systematics - variables_no_met = measurement_config.variables_no_met - method = options.unfolding_method - symmetrise_errors = options.symmetrise_errors - variable = options.variable - topMasses = measurement_config.topMasses - topMassUncertainty = measurement_config.topMassUncertainty - visiblePS = options.visiblePS + method = args.unfolding_method + symmetrise_errors = args.symmetrise_errors + variable = args.variable + visiblePS = args.visiblePS + met_specific_systematics = measurement_config.met_specific_systematics + variables_no_met = measurement_config.variables_no_met + topMasses = measurement_config.topMasses + topMassUncertainty = measurement_config.topMassUncertainty + phase_space = 'VisiblePS' if not visiblePS: phase_space = 'FullPS' - path_to_JSON = '{path}/{com}TeV/{variable}/{phase_space}' - path_to_JSON = path_to_JSON.format( - path = options.path, - com = options.CoM, + path_to_DF = 'TESTING/{path}/{com}TeV/{variable}/{phase_space}' + path_to_DF = path_to_DF.format( + path = args.path, + com = args.CoM, variable = variable, phase_space = phase_space, - ) + ) number_of_bins=len(bin_edges_vis[variable])-1 - # List of options to pass to systematic functions - opts={ - 'met_specific_systematics' : met_specific_systematics, - 'variables_no_met' : variables_no_met, - 'symmetrise_errors' : symmetrise_errors, - 'path_to_JSON' : path_to_JSON, - 'method' : method, - 'variable' : variable, - 'number_of_bins' : number_of_bins, - 'topMasses' : topMasses, - 'topMassUncertainty' : topMassUncertainty + # List of args to pass to systematic functions + args={ + 'met_specific_systematics' : met_specific_systematics, + 'variables_no_met' : variables_no_met, + 'symmetrise_errors' : symmetrise_errors, + 'path_to_DF' : path_to_DF, + 'method' : method, + 'variable' : variable, + 'number_of_bins' : number_of_bins, + 'topMasses' : topMasses, + 'topMassUncertainty' : topMassUncertainty, + 'phase_space' : phase_space } # Get list of all systematics @@ -95,47 +108,68 @@ list_of_systematics = all_systematics # If you want different lists of systematics can just do some manipulation here - for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - # for channel in ['muon']: - print("Channel in use is {0} : ".format(channel)) - - # Output folder of covariance matrices - covariance_matrix_output_path = 'plots/covariance_matrices/{phase_space}/{channel}/{variable}/' - covariance_matrix_output_path = covariance_matrix_output_path.format( - variable = variable, - channel = channel, - phase_space = phase_space, - ) - make_folder_if_not_exists(covariance_matrix_output_path) + # for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: + for channel in ['muon']: + print("Calculating {0} channel systematic uncertainties : ".format(channel)) - # Add channel specific options to list of options - opts['channel'] = channel - opts['covariance_matrix_output_path'] = covariance_matrix_output_path + # Add channel specific args to list of args + args['channel'] = channel # Retreive the normalised cross sections, for all groups in list_of_systematics. - systematic_normalised_uncertainty, unfolded_systematic_normalised_uncertainty = get_normalised_cross_sections(opts, list_of_systematics) + systematic_normalised_uncertainty, unfolded_systematic_normalised_uncertainty = get_normalised_cross_sections( + args, + list_of_systematics + ) # print_dictionary("Normalised cross sections of the systematics in use", systematic_normalised_uncertainty) # print_dictionary("Unfolded normalised cross sections of the systematics in use", unfolded_systematic_normalised_uncertainty) # Get and symmetrise the uncertainties - x_sec_with_symmetrised_systematics = get_symmetrised_systematic_uncertainty(systematic_normalised_uncertainty, opts) - unfolded_x_sec_with_symmetrised_systematics = get_symmetrised_systematic_uncertainty(unfolded_systematic_normalised_uncertainty, opts) + x_sec_with_symmetrised_systematics = get_symmetrised_systematic_uncertainty( + args, + systematic_normalised_uncertainty, + ) + unfolded_x_sec_with_symmetrised_systematics = get_symmetrised_systematic_uncertainty( + args, + unfolded_systematic_normalised_uncertainty + ) # print_dictionary("Normalised cross sections of the systematics with symmetrised uncertainties", x_sec_with_symmetrised_systematics) # print_dictionary("Unfolded normalised cross sections of the systematics with symmetrised uncertainties", unfolded_x_sec_with_symmetrised_systematics) # Create covariance matrices - generate_covariance_matrices(opts, x_sec_with_symmetrised_systematics) - generate_covariance_matrices(opts, unfolded_x_sec_with_symmetrised_systematics) + generate_covariance_matrices( + args, + x_sec_with_symmetrised_systematics + ) + generate_covariance_matrices( + args, + unfolded_x_sec_with_symmetrised_systematics + ) # Combine all systematic uncertainties for each of the groups of systematics # Currently returns (Value, SysUp, SysDown) - Need to include stat? - full_measurement = get_measurement_with_total_systematic_uncertainty(opts, x_sec_with_symmetrised_systematics) - full_unfolded_measurement = get_measurement_with_total_systematic_uncertainty(opts, unfolded_x_sec_with_symmetrised_systematics) + full_measurement = get_measurement_with_total_systematic_uncertainty( + args, + x_sec_with_symmetrised_systematics + ) + full_unfolded_measurement = get_measurement_with_total_systematic_uncertainty( + args, + unfolded_x_sec_with_symmetrised_systematics + ) # print_dictionary("Measurement with total systematic error for each systematic group", full_measurement) # print_dictionary("Unfolded measurement with total systematic error for each systematic group", full_unfolded_measurement) # Write central +- error to JSON. Group of systematics in question is included in outputfile name. # Summary if you want to specify specific list. e.g. GeneratorOnly etc - write_normalised_xsection_measurement(opts, full_measurement, full_unfolded_measurement, summary = '' ) - write_systematic_xsection_measurement(opts, unfolded_x_sec_with_symmetrised_systematics, full_unfolded_measurement, summary = '' ) + write_normalised_xsection_measurement( + args, + full_measurement, + full_unfolded_measurement, + summary = '' + ) + write_systematic_xsection_measurement( + args, + unfolded_x_sec_with_symmetrised_systematics, + full_unfolded_measurement, + summary = '' + ) diff --git a/dps/utils/systematic.py b/dps/utils/systematic.py index e2344dae..d1cb0c20 100644 --- a/dps/utils/systematic.py +++ b/dps/utils/systematic.py @@ -1,7 +1,7 @@ from __future__ import division, print_function -from dps.utils.file_utilities import read_data_from_JSON, write_data_to_JSON, deprecated +from dps.utils.file_utilities import write_data_to_JSON, deprecated from dps.utils.Calculation import combine_errors_in_quadrature -from dps.utils.pandas_utilities import dict_to_df, list_to_series, df_to_file, divide_by_series +from dps.utils.pandas_utilities import read_tuple_from_file, dict_to_df, list_to_series, df_to_file, divide_by_series from copy import deepcopy from math import sqrt import numpy as np @@ -12,13 +12,13 @@ def write_normalised_xsection_measurement(options, measurement, measurement_unfo [Central Value, Lower Systemtic, Upper Systematic] to a json. Different combinations of systematic uncertainty are stored as different json by appending different 'summary' ''' - path_to_JSON=options['path_to_JSON'] + path_to_DF=options['path_to_DF'] method=options['method'] channel=options['channel'] - output_file = '{path_to_JSON}/central/xsection_normalised_{channel}_{method}_with_errors.txt' + output_file = '{path_to_DF}/central/xsection_normalised_{channel}_{method}_with_errors.txt' output_file = output_file.format( - path_to_JSON = path_to_JSON, + path_to_DF = path_to_DF, channel = channel, method = method, ) @@ -36,13 +36,13 @@ def write_systematic_xsection_measurement(options, systematic, total_syst, summa ''' Write systematics to a df. ''' - path_to_JSON=options['path_to_JSON'] + path_to_DF=options['path_to_DF'] method=options['method'] channel=options['channel'] - output_file = '{path_to_JSON}/central/xsection_normalised_{channel}_{method}_summary_absolute.txt' + output_file = '{path_to_DF}/central/xsection_normalised_{channel}_{method}_summary_absolute.txt' output_file = output_file.format( - path_to_JSON = path_to_JSON, + path_to_DF = path_to_DF, channel = channel, method = method, ) @@ -66,7 +66,7 @@ def write_systematic_xsection_measurement(options, systematic, total_syst, summa d_abs = dict_to_df(all_uncertainties) df_to_file(output_file, d_abs) - # Create Relative Paths + # Create Relative Uncertainties output_file = output_file.replace('absolute', 'relative') for uncertainty, vals in all_uncertainties.iteritems(): if uncertainty == 'central': continue @@ -97,27 +97,27 @@ def read_normalised_xsection_measurement(options, category): variable=options['variable'] variables_no_met=options['variables_no_met'] met_specific_systematics=options['met_specific_systematics'] - path_to_JSON=options['path_to_JSON'] + path_to_DF=options['path_to_DF'] method=options['method'] channel=options['channel'] filename = '{path}/{category}/xsection_normalised_{channel}_{method}.txt' # Disregarding Met Uncertainties if variable does not use MET if (category in met_specific_systematics) and (variable in variables_no_met): filename = filename.format( - path = path_to_JSON, + path = path_to_DF, channel = channel, category = 'central', method = method, ) else: filename = filename.format( - path = path_to_JSON, + path = path_to_DF, channel = channel, category = category, method = method ) - normalised_xsection = read_data_from_JSON( filename ) - measurement = normalised_xsection['TTJet_measured_withoutFakes']#should this be measured without fakes??? + normalised_xsection = read_tuple_from_file( filename ) + measurement = normalised_xsection['TTJet_measured_withoutFakes'] measurement_unfolded = normalised_xsection['TTJet_unfolded'] return measurement, measurement_unfolded @@ -288,7 +288,7 @@ def calculate_total_PDFuncertainty(options, central_measurement, pdf_uncertainty return pdf_sym -def get_symmetrised_systematic_uncertainty(norm_syst_unc_x_secs, options): +def get_symmetrised_systematic_uncertainty(options, norm_syst_unc_x_secs ): ''' Returns the symmetrised uncertainties on the normalised cross sections. @@ -511,6 +511,7 @@ def make_covariance_plot( options, systematic, matrix, label='Covariance' ): Saves to plots/covariance_matrices/{PhaseSpace}/{Channel}/{Variable}/ ''' from dps.config.variable_binning import bin_edges_vis + from dps.utils.file_utilities import make_folder_if_not_exists from ROOT import TH2F, TCanvas, TPad, gROOT, gStyle from array import array gROOT.SetBatch(True) @@ -518,7 +519,16 @@ def make_covariance_plot( options, systematic, matrix, label='Covariance' ): variable = options['variable'] channel = options['channel'] - covariance_matrix_output_path = options['covariance_matrix_output_path'] + phase_space = options['phase_space'] + + # Output folder of covariance matrices + covariance_matrix_output_path = 'plots/covariance_matrices/{phase_space}/{channel}/{variable}/' + covariance_matrix_output_path = covariance_matrix_output_path.format( + variable = variable, + channel = channel, + phase_space = phase_space, + ) + make_folder_if_not_exists(covariance_matrix_output_path) x_binning = array ( 'f' , bin_edges_vis[variable] ) y_binning = array ( 'f', bin_edges_vis[variable] ) From 92487069dcc4b6bbad874fec6bc7112006a2e2d8 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 9 Dec 2016 10:58:35 +0000 Subject: [PATCH 46/90] measurementsandprefixes->measurements --- .../xsection/02_unfold_and_measure.py | 2 +- dps/config/xsection.py | 150 +++++++----------- 2 files changed, 54 insertions(+), 98 deletions(-) diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index 34226dc7..a7eab7f7 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -510,7 +510,7 @@ def parse_arguments(): ) # Core Systematics - all_measurements = deepcopy( measurement_config.measurements_and_prefixes.keys() ) + all_measurements = deepcopy( measurement_config.measurements ) # Adding PDF Systematics pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 0, 100 )] all_measurements.extend( pdf_uncertainties ) diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 716a211e..50a11829 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -169,87 +169,39 @@ def __fill_defaults__( self ): self.higgs_file = path_to_files + 'central/TTH_Inclusive_M-125' + middle + '.root' - self.categories_and_prefixes = { - 'central':'', - 'Electron_down':'ElectronDown', - 'Electron_up':'ElectronUp', - 'Muon_down':'MuonDown', - 'Muon_up':'MuonUp', - 'BJet_down':'BJetDown', - 'BJet_up':'BJetUp', - 'LightJet_down':'LightJetDown', - 'LightJet_up':'LightJetUp', - 'JES_down':'_JESDown', - 'JES_up':'_JESUp', - # 'JES_down_alphaCorr':'_JESDown_alphaCorr', - # 'JES_up_alphaCorr':'_JESUp_alphaCorr', - 'JER_down':'_JERDown', - 'JER_up':'_JERUp', - - 'PileUp_up' : '', - 'PileUp_down' : '', - - # Other MET uncertainties not already included - 'ElectronEnUp' : '', - 'ElectronEnDown' : '', - 'MuonEnUp' : '', - 'MuonEnDown' : '', - 'TauEnUp' : '', - 'TauEnDown' : '', - 'UnclusteredEnUp' : '', - 'UnclusteredEnDown' : '', - } - - self.measurements_and_prefixes = { - 'central' : '', - 'JER_up' : '', - 'JER_down' : '', - 'JES_up' : '', - 'JES_down' : '', - 'ElectronEnUp' : '', - 'ElectronEnDown' : '', - 'MuonEnUp' : '', - 'MuonEnDown' : '', - 'TauEnUp' : '', - 'TauEnDown' : '', - 'UnclusteredEnUp' : '', - 'UnclusteredEnDown' : '', - 'PileUp_up' : '', - 'PileUp_down' : '', - 'LightJet_up' : '', - 'LightJet_down' : '', - 'BJet_up' : '', - 'BJet_down' : '', - 'Electron_up' : '', - 'Electron_down' : '', - 'Muon_up' : '', - 'Muon_down' : '', - 'luminosity+' : '', - 'luminosity-' : '', - 'SingleTop_cross_section+' : '', - 'SingleTop_cross_section-' : '', - 'V+Jets_cross_section+' : '', - 'V+Jets_cross_section-' : '', - 'QCD_cross_section+' : '', - 'QCD_cross_section-' : '', - 'QCD_shape' : '', - 'TTJets_scaleup' : '', - 'TTJets_scaledown' : '', - 'TTJets_massup' : '', - 'TTJets_massdown' : '', - 'TTJets_hadronisation' : '', - 'TTJets_NLOgenerator' : '', - 'TTJets_factorisationup' : '', - 'TTJets_factorisationdown' : '', - 'TTJets_renormalisationup' : '', - 'TTJets_renormalisationdown' : '', - 'TTJets_combinedup' : '', - 'TTJets_combineddown' : '', - 'TTJets_alphaSup' : '', - 'TTJets_alphaSdown' : '', - } + # self.categories_and_prefixes = { + # 'central':'', + # 'Electron_down':'ElectronDown', + # 'Electron_up':'ElectronUp', + # 'Muon_down':'MuonDown', + # 'Muon_up':'MuonUp', + # 'BJet_down':'BJetDown', + # 'BJet_up':'BJetUp', + # 'LightJet_down':'LightJetDown', + # 'LightJet_up':'LightJetUp', + # 'JES_down':'_JESDown', + # 'JES_up':'_JESUp', + # # 'JES_down_alphaCorr':'_JESDown_alphaCorr', + # # 'JES_up_alphaCorr':'_JESUp_alphaCorr', + # 'JER_down':'_JERDown', + # 'JER_up':'_JERUp', + + # 'PileUp_up' : '', + # 'PileUp_down' : '', + + # # Other MET uncertainties not already included + # 'ElectronEnUp' : '', + # 'ElectronEnDown' : '', + # 'MuonEnUp' : '', + # 'MuonEnDown' : '', + # 'TauEnUp' : '', + # 'TauEnDown' : '', + # 'UnclusteredEnUp' : '', + # 'UnclusteredEnDown' : '', + # } # Used in 01 + # Rename to normalisation_measurements? self.normalisation_systematics = [ 'central', @@ -293,8 +245,25 @@ def __fill_defaults__( self ): 'QCD_shape', ] - - + # Rename to generator_measurements? + self.generator_systematics = [ + 'TTJets_scaleup', + 'TTJets_scaledown', + 'TTJets_massup', + 'TTJets_massdown', + 'TTJets_hadronisation', + 'TTJets_NLOgenerator', + 'TTJets_factorisationup', + 'TTJets_factorisationdown', + 'TTJets_renormalisationup', + 'TTJets_renormalisationdown', + 'TTJets_combinedup', + 'TTJets_combineddown', + 'TTJets_alphaSup', + 'TTJets_alphaSdown', + ] + + self.measurements = self.normalisation_systematics + self.generator_systematics self.list_of_systematics = { # Theoretical Uncertainties (Rate Changing) @@ -339,19 +308,6 @@ def __fill_defaults__( self ): # now fill in the centre of mass dependent values self.__fill_defaults_13TeV__() - # Needed? - self.generator_systematics = [ - 'scaleup', 'scaledown', - 'massup', 'massdown', - 'hadronisation', - 'NLOgenerator', - 'factorisationup', 'factorisationdown', - 'renormalisationup', 'renormalisationdown', - 'combinedup', 'combineddown', - 'fsrup', 'fsrdown', - 'isrup', 'isrdown', - 'ueup', 'uedown' - ] self.generator_mcsamples = [ 'PowhegPythia8', @@ -414,12 +370,12 @@ def __fill_defaults__( self ): tmp = tmp.format(systematic, self.luminosity) self.generator_systematic_vjets_templates[systematic] = tmp - categories_and_prefixes = self.categories_and_prefixes + # categories_and_prefixes = self.categories_and_prefixes generator_mcsamples = self.generator_mcsamples # Used in 01 - self.general_trees = { - category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} + # self.general_trees = { + # category: path_to_files + category + '/%s' + middle + prefix + '.root' for category, prefix in categories_and_prefixes.iteritems()} self.ttbar_trees = { category: path_to_files + 'TTJets_PowhegPythia8_tree.root' for category in self.normalisation_systematics} self.SingleTop_trees = { From 69e28a31d72af5a1f80ea6b98ef45f6c96f3a3b1 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 9 Dec 2016 12:13:52 +0000 Subject: [PATCH 47/90] Move a few more scripts to legacy/ --- TESTING/test_config.json | 35 --- Testing.json | 212 ------------------ dps/analysis/search/test.root | Bin 3552230 -> 0 bytes .../__init__.py | 0 .../make_HLT_plots_Sergey.py | 0 .../make_jet_response_plot_pt_bins.py | 0 ...ke_jet_response_plots_correction_levels.py | 0 dps/{analysis => legacy}/check_CRAB_jobs.py | 0 dps/{analysis => legacy}/estimate_QCD_rate.py | 0 .../generate_QCD_template_from_data.py | 0 dps/{analysis => legacy}/grid.py | 0 .../make_CRAB_configuration.py | 0 dps/{analysis => legacy}/make_HLT_plots.py | 0 .../make_ntuples_CRAB_configurations.sh | 0 .../make_unfolding_CRAB_configurations.sh | 0 .../read_processed_events.py | 0 .../read_skim_information.py | 0 dps/{analysis => legacy}/search/__init__.py | 0 .../search/translate_results_to_theta.py | 0 .../search/validate_systematic_method.py | 0 dps/{analysis => legacy}/trash/README | 0 .../xsection/06_compare_energies.py | 0 .../xsection/98_fit_cross_checks.py | 0 .../xsection/98b_fit_cross_checks.py | 0 .../xsection/98c_fit_cross_checks.py | 0 .../xsection/make_control_plots.py | 0 .../xsection/make_cutflow_8TeV.py | 0 .../xsection/make_fit_variable_plots.py | 0 .../xsection/make_new_physics_plots_8TeV.py | 0 .../zprime_analysis/__init__.py | 0 .../zprime_analysis/estimate_QCD_rate.py | 0 .../zprime_analysis/make_control_plots.py | 0 .../make_control_region_plots.py | 0 .../make_control_region_plots_2.py | 0 34 files changed, 247 deletions(-) delete mode 100644 TESTING/test_config.json delete mode 100644 Testing.json delete mode 100644 dps/analysis/search/test.root rename dps/{analysis => legacy}/HLT_scripts_for_Sergeys_thesis/__init__.py (100%) rename dps/{analysis => legacy}/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py (100%) rename dps/{analysis => legacy}/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py (100%) rename dps/{analysis => legacy}/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py (100%) rename dps/{analysis => legacy}/check_CRAB_jobs.py (100%) rename dps/{analysis => legacy}/estimate_QCD_rate.py (100%) rename dps/{analysis => legacy}/generate_QCD_template_from_data.py (100%) rename dps/{analysis => legacy}/grid.py (100%) rename dps/{analysis => legacy}/make_CRAB_configuration.py (100%) rename dps/{analysis => legacy}/make_HLT_plots.py (100%) rename dps/{analysis => legacy}/make_ntuples_CRAB_configurations.sh (100%) rename dps/{analysis => legacy}/make_unfolding_CRAB_configurations.sh (100%) rename dps/{analysis => legacy}/read_processed_events.py (100%) rename dps/{analysis => legacy}/read_skim_information.py (100%) rename dps/{analysis => legacy}/search/__init__.py (100%) rename dps/{analysis => legacy}/search/translate_results_to_theta.py (100%) rename dps/{analysis => legacy}/search/validate_systematic_method.py (100%) rename dps/{analysis => legacy}/trash/README (100%) rename dps/{analysis => legacy}/xsection/06_compare_energies.py (100%) rename dps/{analysis => legacy}/xsection/98_fit_cross_checks.py (100%) rename dps/{analysis => legacy}/xsection/98b_fit_cross_checks.py (100%) rename dps/{analysis => legacy}/xsection/98c_fit_cross_checks.py (100%) rename dps/{analysis => legacy}/xsection/make_control_plots.py (100%) rename dps/{analysis => legacy}/xsection/make_cutflow_8TeV.py (100%) rename dps/{analysis => legacy}/xsection/make_fit_variable_plots.py (100%) rename dps/{analysis => legacy}/xsection/make_new_physics_plots_8TeV.py (100%) rename dps/{analysis => legacy}/zprime_analysis/__init__.py (100%) rename dps/{analysis => legacy}/zprime_analysis/estimate_QCD_rate.py (100%) rename dps/{analysis => legacy}/zprime_analysis/make_control_plots.py (100%) rename dps/{analysis => legacy}/zprime_analysis/make_control_region_plots.py (100%) rename dps/{analysis => legacy}/zprime_analysis/make_control_region_plots_2.py (100%) diff --git a/TESTING/test_config.json b/TESTING/test_config.json deleted file mode 100644 index 693067da..00000000 --- a/TESTING/test_config.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "com": 13, - "channel": "electron", - "variable": "HT", - "name": "central", - "samples": { - "QCD": { - "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], - "branch": "HT", - "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined//QCD_Electron_tree.root", - "lumi_scale": 1.0, - "scale": 1.0, - "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", - "weight_branches": ["EventWeight", "PUWeight", "BJetWeight"] - }, - "TTBar": { - "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], - "branch": "HT", - "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/TTJets_PowhegPythia8_tree.root", - "lumi_scale": 1.0, - "scale": 1.0, - "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", - "weight_branches": ["EventWeight", "PUWeight", "BJetWeight"] - }, - "data": { - "bin_edges": [0.0, 17.0, 34.0, 49.0, 64.0, 91.5, 119.0, 151.0, 183.0, 343.0, 503.0], - "branch": "HT", - "input_file": "/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/data_electron_tree.root", - "lumi_scale": 1.0, - "scale": 1.0, - "tree": "TTbar_plus_X_analysis/EPlusJets/Ref selection/FitVariables", - "weight_branches": ["E1"] - } - } -} \ No newline at end of file diff --git a/Testing.json b/Testing.json deleted file mode 100644 index 2015e942..00000000 --- a/Testing.json +++ /dev/null @@ -1,212 +0,0 @@ -{ - "QCD": [ - [ - 3429.2999999999997, - 88.07190244340133 - ], - [ - 6243.7, - 131.69153351677548 - ], - [ - 4392.6, - 138.64270626325788 - ], - [ - 2654.3, - 124.37105772646625 - ], - [ - 1680.3, - 144.71672329071023 - ], - [ - 326.0, - 98.60182554090974 - ], - [ - 174.79999999999998, - 59.501344522624024 - ], - [ - 57.5, - 38.200523556621576 - ], - [ - 42.2, - 57.20489489545453 - ], - [ - 2.5, - 13.6 - ] - ], - "SingleTop": [ - [ - 395.5, - 13.152946437965905 - ], - [ - 1036.4, - 21.286850401127925 - ], - [ - 1039.0, - 21.286850401127925 - ], - [ - 1019.6, - 21.156795598577776 - ], - [ - 1341.9, - 24.26211037811839 - ], - [ - 753.1, - 18.179383927955314 - ], - [ - 462.8, - 14.289856542317002 - ], - [ - 242.60000000000002, - 10.339245620450265 - ], - [ - 344.3, - 12.37457069962429 - ], - [ - 49.8, - 6.296824596572465 - ] - ], - "TTJet": [ - [ - 17820.4, - 317.0337836887419 - ], - [ - 44894.9, - 489.2253672899638 - ], - [ - 49550.2, - 508.31590571218607 - ], - [ - 46563.7, - 450.07077221254883 - ], - [ - 59673.8, - 485.85701600367986 - ], - [ - 29404.7, - 347.3057010761557 - ], - [ - 15403.2, - 245.13771639631466 - ], - [ - 7162.3, - 164.60938612363512 - ], - [ - 6297.299999999999, - 200.89544544364367 - ], - [ - 348.9, - 82.56791144264217 - ] - ], - "V+Jets": [ - [ - 1049.7, - 264.28577335906675 - ], - [ - 3681.9, - 407.02122794763426 - ], - [ - 3277.2, - 424.84757266577384 - ], - [ - 2485.4, - 365.9343247086832 - ], - [ - 1732.0, - 387.4737926621619 - ], - [ - 1404.2, - 280.5469122981039 - ], - [ - 863.2, - 198.64684744541 - ], - [ - 202.4, - 133.6726224774542 - ], - [ - 787.2, - 171.55395652680238 - ], - [ - 137.8, - 75.45130880243232 - ] - ], - "data": [ - [ - 22695.0, - 150.66947268773458 - ], - [ - 55857.0, - 236.30971203063152 - ], - [ - 58259.0, - 241.3578256448297 - ], - [ - 52723.0, - 229.6385420612141 - ], - [ - 64428.0, - 253.807998297926 - ], - [ - 31888.0, - 178.55220525101336 - ], - [ - 16904.0, - 129.97557462846623 - ], - [ - 7665.0, - 87.54164723147491 - ], - [ - 7471.0, - 86.4916759000541 - ], - [ - 539.0, - 30.076070222022025 - ] - ] -} \ No newline at end of file diff --git a/dps/analysis/search/test.root b/dps/analysis/search/test.root deleted file mode 100644 index 51645386d697dc86642bd93cbe0514523b4c6a39..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3552230 zcmeEP30RG5*M4`qNh8sK24sk&6e>c-j0s6ZgNjNj&7=`0V}=q9N~V;g0gW0|L@AU) zg+fs@C^9ts&)({N_uC#CwDWcS&$%w=+n1w#&Ude8t#z-p?q@yLmX@{%@+1U75F=!k zLM0;OeH1~s^AUtF1^nJDR`53hg7E$TziEvSqJFPro%40ryR*Us0^*ud2mWb~{<20$ zU!=AovXv8&CVe6Jw}HRdG?$v18xOHHwy~XNP5LjP_=vEP{xtaYAHv~$2K?b!oj-+4 zyR){t{}jR%(!Y@<4gCBNWHtHUSVa1HR?TJ8mrgY{H#W4jwzSaOsG+H+r>VKZ*j8_s zv4M@9wXu<&-YU}_Cbp#C(z7zRvq3hm;#@;O(pV5jf{niZtBkadX=yVKyO+7Uryp@2 z#_8_n=kC5}(#%OS6D~{5mz}3FX8gQ)<1?jikDsSFGdlj3)XWL@Y2)&c* zH|HXSj|oM*GYHw+_n#vg?Vc`2cscGnFVC2?^0c(5lkUTpaqZtgJm_XWJxXi5mXQbV zS_6Fpef<^=-@+K(edlw0=Pxa&tO;>BNf~a}n1s<)ja;sk-GGn3aVuBCrqwU*v8%+rFV)dMmz3Ml=hIh0E-wT=R7Y`e!y>%%c zmdsb2o`NX%|NexBUVr(Qki|v-?MzoD2-X|la%#f9tKWV}sG?K}_ zX9;{fJRUp|X`E~<8dK5`eQ6a(g*AH|E!y9Q=-w)7&AnPsJG1Ut>9;z!nFvx=y;cEW zj&}?e1R;{V7&^xPlw+8a9V3|>y_S=r7n=`iB&l6?_{hb`DMk^HUKsFj2y^C)jUwb) z9XjxJxagP`O{wEgxYr^R+++N-MK>)EmrO6`lrLx_78*_UA=>AJn`|2?7q6$ZOn*0< zPzZ6jVcJExQLBe=j0(1mBog}+g zfAIm+w(zXuMV4o*z6u;7s4LbS0O}5hpu(C_mRQwmtb+smwYO~c8H89LtFy}4nfjt3 z_zGXUZ{*Lr=8E1K-j~>37Wj`BjZ|(+DR+7Bu>7Kq(U)U)Ch!TL%Myt34LtCPcxB`Q zb}jt_ECeY*eI!cIV%E;{Wk)8d+wV+r$bFol{ABgG+Q^!-Q>;c15q0YwWxzT%T~h>S z5Ne&NhL*dwnOcB!K#|88d4pK@DMER^;$#I`kPfgs{wGDSNeOjvh)Se}-5)WEU64zN zi5EHNWOHziG_RMuv`-y2MtP&Yr3N_GBGmJUuEygm2nV-$nYwbLB&ae3+!9`XfwZREY3J zWCe?{LbOuMX-zec3<0C7tsK`LYD;l^_TBI_Q(oUH)#_wg1=rMt924)mEistkFTgir z-lr`b?Cdzm#qZ`AsdyDxeAU*C%aDw3yUk&9_~`Ge&SybZ_Pe`Qiz-8a;KMbHG31P# zq6gH82IPOJ6X)QAT;xr8(&yrvHWIqlBU(h8R&BKmm0W-Iu2<7~e$Sg_g85!{SxWnc zZn;y%A8;tcsU#;%JitD!$^F?eT-1tto_eHoo?edKj-k#qci!B3e#7;RdR%gp!q!$r*yosJZWuj6}t#Ny&q|9WeN2(x;JT;8zgolhh@LnWUW3CHxWp z{An2e7^j+n34g%MSFk#yUp9FzT8O@E#8=kkZ{%0z`j7MS!?=#MZ;Q|4h zq6{AXCpCKBXx50K0@AZMXDypb6bN+Evr7ms*yZ5FB9I>*9$CAwuGFE{xw*_KXV2cY zoj#X%LX3YNxHDmmUu^qQ>D>QPOf^@v2kzW+z;cp_O^}I5*$4M&fl1su9_+a0+H^el z-9w&V=BFmf3f8IrN{RLe{is(c_)SgNg2iXj-jghT@+{s-XT!LwY6_1xmWv!Yl!)AR zYWa9;>)J;9p?hso)~;Qma%F|rFxHRJ7BxZsfphok-rX}Uc4?DHae#-Sw4?0!hP~YK z2mKZ#Nae5deK*oTO?70h$y=$D7H<>vK8`*zT~#9gu#kdYo7>`R?o$&}fbEOVYBKFoP9mvBhTR}+KRM%JHIHwE3~`S3OFk@V}CD??^yJE&=z zczW&Ob3pD z3p@+Ur5{X_@t^nE?(p&|wWQs*%oA)Z9OvW?XPX=nb;Ll%++I=YXychsyE4PAGkw44 ze+;iUIwfGi!#LrE`I9bg+}*;bmg6hEsv<;XZP?PX*!W0-&1(I~);NPm?%Z$uY>isS z6U7Z?ZFp4id5G!FiO*l`SUs|BLJn81ReO3vbaeFEH5PmiH_MDlneuqFi-=UjmIr~2 z+MX{zrd`=nbN+nuz8hJxi&V*Cu6|+0+Sun@vi=haf&zrsydI`KljS<+i$J%#fvh#p^@lsHGf8iqqB8tef`Pu$ z!su(KkvAjW`bZvgI~#Y!?b{5so9jF;Ck_joVCBGCYhoP~(4=y9lKqUV8qo%q118+t zmJfaK?2vGjy!7)&wI9QUctgz|d2F&umb+D!pdR{?@7fnL4QFTe3pLgymRp-z9vxfj zFlWo8yCi29_uQgG{2AI&)arO+MO~{PE}dO_@BX*nStqAxz0wJTrHwH^kJy+d{QR!7enXCY>!#oC%0r#C2o`dV{9fM|9<2*C zQN3k(HfY-d*%rg)LVG3C$`l3Gm1-t$jktC(n{C*As|e+?$KU1}j+pD@txz#kWlfXC zdv`ORtTN(SwJA}rx2=9zYi@F2$36+)=ryDFzmDFuk?jZDfmahg%U`W?a=CD*=tS~= zZZ2QnkK{RSFjrh;)~P+(-p9r^s>=U&>Ks3-$J%{s+FNRNN+YVOL6a{ip8hX)Pg+*U zIG5qJv!a!49go%Qs=l{LZMUh8tVyh>!0l@}XQv5X)O0^|)hgoU>4ysvmbR)jE)EWz znrF#*JS<|$j@>TuBPNWqA9>`K%;=RNu}kbG-AkCFbe2smOT|F$^Y6eip2e!Ct8G2f z&6G3?%w9;Fy%6H(KkX!W|KnGQs{7NYHL0$7BDVK}>x}BLNBKlf`2TG4iMG5G8UMXC ze2>N69pfD~)NVKuS|#_&e`jRXin&3amBzc?T%B5P$9{(EgmqN4+zYK+i`Zn8jHBO~ zUGP;&dVcV(q zCfOy+iVHdanyN0h#>uOySvkU#58ysn+1neX3~Yf63v%rSs8~4dzZ-=d|~0z+JtuQX*@% zWDGaIbW<@#Gj44mPv+92rDD1#8c#?zq%EGI|7nGL{S46!ya&b`CmGv}-1;i+vh@Mq zrpU=^x@yB?*vH-C^Sj4$WLTcvqYCx2j_Y)j=lctKvbNfZhz)yhB6jwaxBRH4Q@_`) z27mMVXt;CL=%$dB#HK5EPfVOH{>V#N;(W-F(z2x+crFl^{%)UqeD_nslWRsth({m4 zQ8U^p;^>x9uVVZS8fDIH%h_6RES|G2Mz|Lgbadn{afc1j~$6c79-aPa+D2dRLY!U5}lkE`CCTbw;&;T-n;k?}#t zXI_7?-~RL-?WfyTY#+s|t&scb(1t3n{Vh^uELy+0q}7(mlr74e&42Gs(WVlqyu=TO z#1BuIc!y(ROqP7^So1^Dj*nwXcD|gob(72v=ehnxiyl=TPLZ1TJvKTvZ7A{3w<(pk z)HNQ=Z5DZCYR7V_G9+*7bh~PCRrduZqpls$;aZ|u5VtZ}>c8D-HMcD0t`DkOmK7$n zBq#?2;-BMq?KxYOYQps{`<3U;Rgv3upz^5433-(irHi>Ayv;smMmHQboTPrO;js4u zy?M&_KXLy~uwTF8oMtKgyT#^odThI}0NtWhTLeZv;91UA;7&2!dQO9#&M*t4AP zsd8M>Rp%zFqwyZCB9FaNzOFvT{h+W_vB@)_w$0{JK&=|bQdbevMuPEfVJ_E#0(t8ajjP>7O(SJaztlb*2?9Re@zYY z`~0fpn?0}4qGZiess(R1ImFm3G8C9Jex|QAOY3Gs79|nleY2dUCh;>YcQ4+#blJ|# zUE4-0Tzki}S!p>}65+yj@i`lOM4zmGcChxuM*)XNLdCp-dw2Xe9xyd&JL}bj661;H z??g|Tmse!|cWTNo_ld@hfeMZG0pTuRS!B!RSSH$(w;YlbW?xk6x@JSY{UMo4-eEIW za<9LW-Nu^`G11!S%(6QRTLo?|%Cy^196HP|w|ur={s!M`OWKQGv?;PIl{*&vJag`|HCc0O56$JA<^4EX@=E#VHEpbd4Me>xVxYs_ zvX2tRztj2UY$q4p+IQFA`%zich+A4O#L6;`ZjBtWZuAY^>kp%+rm~LET~%;tdxK&f za_ei2XN%cvPxUuTb_liH)$)+NlwoMAePLRl%qGQPuOVYw3>O#_CG$_1;~2(S)wZ^D z!|FJrQI38YVNOkV{R3woe_eCum*#<@H$_dFE)&-O)aK#V8m<3{|BX{E3xANSkWIC0 zg!;$f*Co!ZP+a|wXCs3&90|FY`@2X=iJ*x~0AAQ*{HMU6z&D`yml;9R1Y za|wX8#5wv{Gjawvmtgu_0$6A4EYv!1E-?Ue2>|)HFvgHGatb(?V9s3P5}D)5V;pz9 zI>|`1=MrFK!QDn~J)ftZP>jD0rW6-R|5RyV2N%YtAJD!g{TX_*3YdQWYAlZDz-$OI zQ5eOVrhtkO-mvcIzV2vmZ~I7go=V_60)J$3lYkhnZ_Wd}ZhSa-&=-ih5hrk^1*G3o z1101#C(2iL)bpDmFoo7vsPu z0_z0uE0Od`o&zB$%p!fVJPCe%Abm=dLBj&6 zGTXv>?62!FP&7*?qmBXVF=pz-fJ4Yg(Z`yRGr)R`>Gc@!glYFt>%e+!0O~OSxtua% z$Qd~WtjCy8k5Ng60F9si8XBpG`pJP2^HK3zb_7w_j~R*9WffR!VV70w#{9prIJYBF4gB%uqjJFi@OW_AFUs%r= z^S_Kmu<-lq!Vef}F=++JpZWx{Rc5kPfb~)^=B0-MZC0-zkru>42m=J4qK5%dhq7xQJ#b}^2LJ=-q! zA@BQz33K=I@%zpx8&cu)&gzZ*nNPRU;`q+amwCHV=i7_&RmOFvO!DvY?TAcy)m&F; zc5a@A|1cwcwxv=--PRG<5uNghb2Fab7=Q7_jgJrQ6=MU(D}J9mX{%o88y2wIXWnFU zF<>bBu7*-CVQeT`6)kEgi9Q*MdcoMBF%<0rt9YWL(jbNX07cJWpPG8CeYuWuC zfz>1*N;O2N+yl@JjkSzXhwjm3`Q8AzhXvU^?vmr!3Q`{& z{!qD<{Jc&a%{n=}WpgUGCkDs_u!CTvesuLT;1(CUqP-p{X**Io71|74Vs)?4rd~8Q zXwU|1*YJVY%OsDgIW0Na&*xR_s;hT=e#K@@-9)7=8;;xXv_?NH_pq$KJtx#rG_UQY zW85!yj#Uy}mr=&vkg=04FRTy{J}=b(*OP*~u}A0CtV>u+ZN%mcyJ~W5nTo-8JU??2h<@ z_Z}*~sqq!V_T9|gzo%WL`SBMA)o{Y~oVELb;ysH5I)N30AfZ2scak9eFBR`3(R|0n z5n#ctiua8X|2vBJrEfbG?_!UD>isH7`~F)IVQbk4K5_6qLO6!8hO0#Et@{oMQnH4@G0 zjA0yyq@TQ2xLeYXM;$aQw=9LIlQ(Yns3z{0QVXEI6}$gI^Q_YfNGOYkod~9NBJiaa z2uga!sC!V6e}(}GmRh|nwE)`s1@zHo=n}BhVt%Ow?6Zw@&GXN67nWLmTI<|RsRejZ zD6YN*pc{T<3_3%{(CAZrOT}IXO??aQf{dbqd9Sv|Jb83#cXi3^^`}%l zWw-K+@A{HdULX0?EOD-@e@4l%u%q^t8gA4J&vz!)COS{&v)yqm+qxt*EPIznl$VZ+ z>6~RV5q=>PYhWgPn+T+I;}fWvs4@jZ6T#Tk@3HHzWb$|i1nt%xrl@6UMm>hsg$~Fh z2gFk~(4I0KikgYa0U=dm`1(foR1IG@0*!&03yL<)3Wg!#uv+g~wT_=)1JrjmF-DzX zD1vq40M(7aP`kjUBzi+5hOPnY#_od=o_hhbExGBV&Cn$v_v&}<1-LPOLoJT$XXqG^wlayfV$}r~qK<&rW%B@A+kv|Md&a0UjA4-PGL7$QkeietKo7D2SxAzkk(Lp5 z@ZGUwN!n`k#a_%aG&W#w^|BF=kPa!DKUq$znMabMUC2kJXy-^$w7SvM4_-02&9Moh8PlL4zh5};mk-MzQR-W(=zbH zC2^SBY4Ct6c;j;8^6+)lJcy)CbSTey(lKOsM5LWMR%mw@Fp^UG`C4Q^bhH^B0GK*q8F zbaz+Apfhv~SY9)!yvABj9Tmj>iHyyJvC~ziJ4|fJ@3^8$E7*GKQR@j`ECapmfdCA2 zSS<5v^kqRtw^B}QI7B+?0QLW{_rD=~?Y3r+OUW4U`&(Sd3X-8b6H)h|;!_Oc5gZQn zhP`&6eOQV<+6^cqS0=I7KZEUL&{pL_ zmXq45$Fcsm6fGi!nQ6t6b$WUzaaAWE$(byK1@7&(epp+8|;1BUqGU6P}yh3yiZ#=*x47bYv~_gAxH`8BcQBE-ZWD@X!u0OYf1BL z=EpYqJKcD*!-05n7P#qKAf#@LN^CpP21@9YUm2qfyC>ov#?#+RfcFFu0utXDsqT>@RTeo?WulSlH2swXlwOcUC}Jxd0-{tCgWefSL_;<8BeJ&&vaI8@9;E71%WMk2ok_Kkg zrREklY^C!`YK!t(UOkpH^A1@2L%8_nW_@L3&JYttV3u8EvvdY0aQM$)`hO|LYY6ZT z0BXAKQgvW(fREo<5caWD{U;T$j5ykC&3oiT$MuZr_ec$EH9OeJv7NzAj=9Y7f%3lk zz=_v0zly8nj92j$p4P;fclT|2>6MQuFHH#GvW3^^)oEb;KpBHON7#|?+Q0)=!qhfH zK#%0Fy0%o`_E%SPsJ;!HpuXQT0N;km;YnpL5UiY&K*I!VGB9sM)`7MkrhlM9oBjy} z%imy=f%#1apt1{)LA?Ps83v@u076h8sF@gD7iH)eu*txrCIh~R1z9X;Icg?Y#QJV{ znklbum1=b|t%7UnLXL^|-If@T%BnsVv3Rrr0|}0UXkTsZQwZv9%K)GqGJ!GLSO~(i za@as)N&^kwG61pXB2YheqzKqDU{1>bpnMtIe?^5dJr95_1Ln32fLX5GLk~Y}8HD%Z zwS?|k2B01e#`R|a*rBqF!KUX2pg`+Gf96m7jCHW}XO7j?pT9n)>BXsuYUw$%0+Rza zi~8~S=xmEDv;7gCa>})``6XZ79H+{Wb2OEb=MT2TIDv1j9R+-9H0oc*l{Xd#u)AIw!R++(8@oftbqGGZ4uc$&4Oay&YI^fAC z7~0&bz#V1LI=+nnhV*gvC~hz?)60Sb)<(da z2ZW3SL1>;fVxiGgAEG^7(Zhh52OrVFL^$ylW*!*P`+3xeZzlk>lgBYe8;kL9_%#6P zaXgCz_PLAo{~?Q{>Us8&MdJ0c0OeHd{udR>^gIBz6PVjh0GT>16$?&KFU!o~7x005 zQyGIz&ktZbff?-tymlOzCnJZkdFU0oP&?j@b{rK(cMA2v?dvQh3~b0jnaV){+V`(A zMw?#bfg`N}7-<3fWG_YSL&c8hg&jE3>eWaKp!^WLW=T)xM$ZEv2W2h?1!l=vgqnqB zGmoAFz`Wai-o>}(0NA-vjKQYo2atm@gM(r_^tr)(G0@n~TZ5WsGA&w!nP(w>NfxkA zcaMrQ-Zj8laldMNjrW{Nw3RY_DtL`dF>Ww}9= z*@dA`#<3S2$vmnD#emp4Uc3So2s;OQP&EJ5+DB1?uV!AuN66d^#Jj{L;RNZAj6A0Ze{EbTQLksS!+(r)bFL}@GO(f1Dfc%;+Pq%b$J`MJ{;$8@0=1#=X-n9x$r=T+L|N zfCJ0!cQy)*D~^tI+pL_n+5MHf`K&`@RfU9!!%xa=dy|H+h=NV&+#{9TFUx7q)ZjE3 zM%=k%+meYdh6=I^v9sTI)n6`ClU1Bx{?l;At`zZVj*-YVqVX-a*)QBgctpR7U6{&l zFOreYcF&sA98nM*7t+GY!8u=Wg=Evh5vt2Cm`@Z^(>h(f%u3ZG_f|VHb0qf%x3ZWi z{@Uz3wZeP?+FRV0d`!y}iIO_Dbp1W|`;*Ssq@|6)CEiPGcAtJQy=JwJfW7iKhH{^+7Q2_2vgZ*oyzpjmFh;-1ue zp;^{+U)0OEt*$@%Qr2v#aMAAo@t_Q$Y3Uga2z%EtT{2R7X`m>b+`Vr^J{X`0y^D!260-9jc zb%Ygt>RX{p7zR_{itujEzE;`Hb7rS1q|U{@ytli0p4$Duxvu9T)`_KzsDBtS<=t+_ zv!lfGQlp-h-zbjW_TknB>Iu`X$3{!yWapkJ9qE5I;miCj?`JLenyBL<{XzuE*k=e{ zk?+#%1$nw!12qm+9-wO&7@_(*LgCpeDBWs63z*R;3f2aHQ5)ddDnR>-7Gt#OdIDst z%x9}0O4WWx?L!r%=z0Pi{Pkw=2fUzu3w@O7dH~D?{hbT&gFkTk8$jmn%mr}p*N=lg zDz*y1ZZu#FHeE-6Y_;FnD!yw0B2-fwY8;pc`m1Y!=bm76_T30Y8TO^=%AjcWh`R4f z}a!-(PTd@6K zG@EyHt1l=zV=OwO;+_EPR&4(lO|a=Y0_2|k&OPyMY!IQ^_o5LB8i&rzV4&LAc*@F- z5C}lY+*u;iy2nB{Ou>5+y``)GZI*+K(WV=xAZ6`$$_m8GW+0dR@9C290hE9cSd*|* zL#vHN_te1b*6-O3-(v(W&92SZICKLj19%a-Es8FWs0kFZs+y)D<3-43ZE;P$Z zn0WhmtBI{<#~5t7jsPiWzf;gZ6-yHNcK0%HyPGhy1U1eiTC4~&&Wa`L_4KUF z?Q9TmX$$O(O6t7dokzakokPCg{Rq9^9YkZ(0mbd(52HjS^iHndoQo7@6N)$sdBAh> z$B>wKk#lb5!+x`~4}EK;udkh*He~$4^ly_8tF-u!t{?j=B4g1R^T&;${7PHS zH@vOLPujy*b>d;EZ+yT9E0_Eu`LDg}GMe0<9UDWGmRsnK_;AS~FSrzy-;9e37k*~( z+(N zLQW8&ZV@+a;aR16=a4gz$Z4e{CpXpTE$iuf`p;FzUJ&Go5IijCr@u&3L|a5eq}_eO z?FeDLs+bQap5FSo_nL$ea){UGDND6Jahse~wu4)Y$O{7=4q?umu~CFvt3wCA4i_EM zqA7L!3HMrLf_sdgw&#_bH~|4K}=+ z)VguQNi#(I%F?bkG$Dx4tx%5H4b}{ZX z4|m1cI?ldg^-uR)r>vn<(Q=ieFvDy90y46s{@=E5-unn zntO9^gq_NVVL6vSr2qVK`ghO@s}V%R-eYw#fcj6nv@-{QATzR>G1PRNU{#->MlVea zDyYGe2CY7Muf(?86g&8Jz3$5Q6P4wEFLRxD-{MQX%t6Iclj@D0{PShy_I^Ka{{zst zwe~k@JzSZp_wq~gs(B7V!7Jj1U$nL;c$Jc{fkT`P?B(JdVVDMtgC|2EL67#tj6;j7 z((waGhPo$1c&v#CL9`KK-5nX~7vc%97T05pHJzXXYcQtNV0hvQ!j1%{f2ATtbV3d! zp1mQS0OPUSP#2)Wn9lKs#FM$i6If;357a72tscTLU}Z5zQF)MMphPB2@Q9H1n4E8s4cmPeT>4YapQJG9pI|9^{ z5Y#+W0SZ!7+ejjDKW-G$tKKLHV-RkpveCzw&hdv7m3b7E6qrD2mI*+u0x2q!C~61P zvh0kZrsD*VqB4P^Voi#U=p=UuH4e-GT_|c_reomQ?k5&N>wpdkI(IAe`Nj~{H}(16 zJJmM=9CcMTF$eR@u(KAVf zHB>*;P#{s`M=1c~`77yTOeYV(E*f*YXdpwYmZMgonW3?24VR&z!mt+#LlPMR4zO?) zW2osk0VG3AAVZkaw!=7!F#R+1gMRHM<9R2z6}ChZ^8?5`naDc<)=SeEV@)UK!0GJ( zOmBgCmSOr&$UB+AI|0VaGf)?x!kEtShrE-yyc1Yuh2cyh{1FV`|JEq3XPV!_Kx)% zTB^oR6ZYiD4Qgm-6<6N!bZ&Ki>x$>)5iMV=+TP@d<*92wvwR(a6mYCi@ZgC^<78ve zn39I*OP5|&h$&mab+7cuEV~3h)AtF{!c&q`O!lyVH=yO+jedfj#h*1zq)TBP)>+ez zxlVUfmBKoreXxoZ-UhaT#r z8+dqE07B@s=8VCnj8ha~LswviE8t}&AW*I6LajqZqbQq16qfnRyD+E={LfzyrYSQ? zc~T|rHOEP%F#lV^n<*tVub+M^C|lJ$3tacz{Al$;sun50Skr<&#&k~qUsBb z$6oSOe*aU^({*>moE0ZztsN^WMm>9&UT!s0<5y90i@GOwF<)XsrHX@6zWqcE&9?%p zB%~g5%15zCj^I=cLx^rStYYmpeb!QG_0UOrG+Xh+%gq)7g0Wju*>92J(i(wnZ-G_t z{R&_eEg<~qR6uuk37g{d_u%qrH^Ln-pj*DN$61IdTS$r zd7rg}HdWOd>!m+lzviIm>j%r6rO&w3)aUK|)pY%vDUG-5R%<7f2n!iFuqOpu{2rbk z|IIa_&bQf28iXf-QsXFK96TEW33?MR<_M4t^`ddt12%*Q8_1hC_hSDKMbv@Rvj@9896NIjWLJ%Ls9 zU9pS{sb|lxVfUJP0;~;c7-LPx3n2Am0`LoO52Fegdfi+a54uB#Rn8&&SH4noeMq!%2HkZnI(RT1u(2B^NbShbZfCH#L@Ps6GF6Jx--C7q6r`f}b2@T>2Z1rx zbi4qvAtrQO@s0pe!2Te{#(|E&K`RrS=z%;2oAr|%0o)=yh~597*|(#EHJtVLYS!NY z_J0o8!KOv?=|mmKJekitfh!ymM6E-`is-ljWS%{p^^=$dp;{}j;GJ$!9kjsK(a50=0Sx&mP?lwKySc#uej7>8r#~bV4JqUW zj}zxIcs|{UInSqO^zixgnELs|^yB$rwW%i^t!MqZoKdzxCw3j5u62CV$&lAhkBOcI zT)x*|*XY?O*=@aaJ$}6a^~1S`@BXUuEo*xyQ*-omfbP8|itI^8Bjd_~Jiw(*zC6>V zpmSl-)j%f_7#oN(Qc)TR@YOyTsLM=$&=`p3P(M(*a<9^SjV29w0f+e&R)urhf3i9} zzrG1Pp#JdVYOqa*fOiL=oj)~+G1`=|iV|(;4$N_fKMVQe$?<9lDPEno4OHaBBiiD^lE7(L3^Me5g*@JZ=&!p;jZ}1Da8+S7xt;#7fKgG?CfLK{= z1NWsm?k!U!>oZxXS){t_mr_P2KwG=Z1>H1Ey-wsm&Xor5s8_#7YFMjD@2GEQ@Q(Ui z=J-H)Uwz<2WO$i4$JO(V+T$BCGo)i5wC*V3XcoNwNHv@gAEE(fk%T|`E();E$}4hH1 zLI;9{g5Yyr9W@YSp*?y~y4Nfepd56IKFai50J2c#vQS`_3)uSykcEbu^kU~}Z&@e+ zJ0u?U0h(ab^8?62nZZKwg)+$0mo}p2p-KewOantye}^bMT?M-A%eAOkXm+(>aqt(# z0iLb`w6AVqj5a+-fOK^r=qiX(p_u-cDoW9F1=uw0Rnru>L6{-x22?21a{-tO`a2i= z(R+3F@xb8^|Am@`CWE5q0I+G=Te=Fsj@ZE%YwfLR2I>Y96W(Mb8gl zBIqU&;Mpg*7JC!h{|?!w?*=++IQwN}573T!g`R1eXwwThkbMpW`veAx!Sr7+=rE^Q z3Q&&4-oKbn(9Ne&!IOs7#KxGdwW|$A6^3iLJVxp0%;EtDEa!b%8lxKz@(!uYm2GAr;jgh zfbYTI69cC;3Df_;dt$xe@V&k#2Ea}ZMneNluqh)q#Z6!kxZ&$@{r-9!ejy;RPRd-= zI`E!YKP&_U;Re5<0AQRNLLXx~rytPwxK3SjE?L+7kgRLYMs>}BU3ASj#Vy=Wz~IR& z(%mJ%DAN^DtH66=y-+&z;9@pLBs#FZcZo69bi4rG6Jx?XF>H#$2kwc14F#F1sBz#F zg&udOxuc-_DGJs=CV>s^3}NXH>}b3mEDeaE2&zW^jrZ>t-6_7VN$jZlpl?wk1{M?8g&PAE3!{==lL`ATy(Z97w*Fng}lc@*!%Z%fETp{l|%nGquJBJv}S)KdTBr zfsoi)Cf_HQ$rXtY?k`3A*mUym#*r#md>(?7pVRI`)-<=XYA%*zjdd?D+Qi!E2wTv zd_(=H?&`!vIX4#1etbj9;(CCe?Df%3$1c`zPf$N;hHMNs)&u2x1#gKa4oOtzsZcu3y4O+_V6)9IS$+4$g};nli2LqA2j#<7B%kI4Mh(*e?W zF9415r#Xy~#)1?j($EW-;04>rUT}vTp%#%MQ~{=c8%KXV0i|7p2*{nz=#)v0PU+<6 zl!itppDs;2nx{X5P(&=`0FlX-!D<3t=3L7j+t+K^{3#2a`6pGeiB~JmN_rEMV^=3- zW2^Y?Au)^{@%*>~Jg`CV?uu~XL-h1hAx!(^tMwXTtW8Q0U;gJ225SivE?AbldXvnT zdN!Ohi9Kw#iov5-6{31ytn8lpjBfiH#3y`5=lSP^_2pK&PmC}5?wz|zLh3Q6d=!i1 z2u{^7gy?p|D)#x_I<52>4{w~0D|&osZ)m{chw{ZYOWzJ`WY=H;O9JkLNku$um72($B2l$d!EH z6Y?jZ|Pg5TAfU*;F`LS zW8!_cB?hD`bRuuc2d3V_Gffb0%CP+>R6K~b2SBFT3#JJWexrb%ekz1%CtAofnaMPP zN#1tDe`p_k$TWM&Gy&2Ta_HfQOj9mik4m>cU`!KV0fVgk4&y&CF0l!P5QiJ4U6dQO zdI-nAsepk@=z}iiEwqSIpW_mZ#x%|L&;NX5x?dui1nidYVLu_Q8jgw3n0=ApIF^ z?Cy{O(L|cI1Hk5I|2IFeW&}U72=u54wHW`Qic7Q|07ia>ksoV%@&oAuxE@fKh8=#$ zFln{o{+eNSAl)#9G19cd3}l#0WtgNm1H9lX1O5Z!43W5h3{hHcp*!NkC5OD=>eYC? z1K}nq^z>6ja@r>!GE8PMOj1As$@N_iyWu~y^K1_)VK`<7%fMJjH|ZY^QC{7COG^&6!R}ybcS~5hfUD_Z-QbQviQL^G6KTFi}9aswj`1++M(}X zgS7ltTM|i^AUmJH&L$Lb7V?05rpFlEfZxfS8}R#kdISEazp!U!qnP-B3j}eYUfX)OH`n#d<;0jgY1l zzx+W};u+TGNrH_^Nqz~gR)+k|_RbB05h4iN9OHa&0nD6yH!q1ynaIZB--ad)n#RFL zDAYLMnUOvi2QLC0G{&JlqhBJeq9P&FtfBX$%B6DOEvFkhJ(3pKvo4#7NL8-}WQSts zEXfgA5SF2NjIqYYC=_ex2uyGUY+EQlvX~U51a_jvVX!R}7omiEy~&cc17s44SPMBp zbh^dh7Q7&HZUQgp>rG((d6ARFy}VrK0N3bkKKL&AT$dv@ zvZZRSvaI5p^5wH?YVx=@^KCp4nt8ph+$P@Y{BL3IOQ9JFIdyk@>df|2FVVVl%(Im1 z<-O8Z`*$SmG2iok;?u(NU$-Smt1X9T8nytd;8Osw%J{3ORl0d@0~eNhkZ!vh^W;Cy zl?J=y%-$nASQs#)&i`9aQ$k4^+W;2Skv)>9z3~318dwexj}<9*zvJz`J-|VPjjz@>yuvb znfNNpAO0S6t~B~p`Sk;B0jf8{`05s2=2x4{s+1gS`K!%zxSUH!n!EdglkS2dA*#p< zHW9@9V9BWcBQ1i9j1m;ym)ex*o&8kQUU^0*rECgl@FhAM*HOhMkf0|;q1K_w0dy<_ z;}dh^6Dz_pc`0fYnzRX)0|I>gGUEYQPl;iSH61U26qV@|6@;g$Yf$4*g(o^r0B1Z5 zW;|3W062h5EPagW9DbMrn41E?$(LP@S_M+n-nmb=XuYdT&4DJnB4Dt^un z;*DUEJRSwWe(vYe$m#TsUz1SGF3Z$ycq^iIJmSFGy(8QXK z7eK1Y45~^+hSV&fCv-zqY26~|iI4@4HMp?6tOUav>PDHU8wFUe01tq6;{m8x5jRK5UFQbn!ox=}_Cv%A>u!{O?)G9Q|5S`Nx zb)&r<$^)!5Rxrkzju$}U$qeG@L{{~Nfh`n6ky)h6_iI0*))B;$Asa&jb7MOzWb-Oc z6#^30@#g&IN2E9BH6N4SoWGFU`R4psf80hiy}-b8-kUe4A)5CITI`x!;DY@O2J+~T zKl13TF^e=sv_(Wj+TADIju6(XiurKj>8+o8uSpmohj@LSvQ+C6x5-&$JGjM&yfEP5 z5a!Go8%4;qI&|RcaM3X>no`G~aIZxsxX1Wui*8yTE}8DmE?>|_?1okztINK%&}M9Y z!Y8jS(rv3go^LNZwd?Mxy9ur)L>wb=3J;Lj%@5uFD=oqX^gkPrTS^x&Vu9sJ+`?bCs$ z{Lbo<&i+G!)(}lzaQ2%Roc#nQo&EA2p8cp$<%NaUo}OyGpZ7Cvn}FD+h&|g~W?!hS zZ!Ecd+Jl%QHgDM{p06RRJ}?p1q! zlIq%HWyu_zm1sq2#a4*n{^W^+cnX1+3lzrq-VsP^MkFQ^5z7?Nd zJQ`e{8JV_a2C#uA*#=Q$8_XryVB&Y*A(L!{ zEYu3#I4ej9RGir?(7ANw9-{e}pv7BAAUOjAJG^9)9SnMA2a)HW95bXpT{IMF&N@S= zoG+G9eN5MpqfjZa=23}Qfuhxn!uHEyBg8&l%d>hk|Mq&3tNbI%Ja{6~IN4YN^!sHj{XGlD8}R$t*_1ZRu63}~j!rLo z_?&c)uUcvAA^`m!vJYG%hoYIJP_)GYg?zCu ziH4xl_z?6~qPDy`57EpbXt8My0imam!ICFGW`&-f<>C44kV<5uTtVk3mpixCAmy#6 zXOpdZ^E=s5WhHaI$2%1_xPvpVqoWJ@|Ac*H;KRwlE0TcUT7?2Hga>~0g2ig<7HBN% zIP~G2(BCIRpFxKH9tyoL4*I&9j}_wYMc$?(nhywC9Gcvu{L6p@g)s^IkRF2Hc;YGh zgxwp|t{(O}uN%0TuYI&1mr(w`FZB~DQUcSit9Taw-d~sM-SVk^hSkRLI}eG`0N=&w zi@}~(#f$DJXSUu_3wPRhG{!00Neu*kn@w9r0PJ1J*k31OuSCLL599yBc*{N20?lt$Ir9axT#=ooJHMd(8+X=K?;{uGoG*M7GSX*T zDmBz?9f2LuDNldlQke0zZ4`gP6UXL)eCyJr$WaL#mISrQi0d8Qc7VP$8T~6{^yNwD z>zAV^f7mWNOAEb?8k%}~6h*Hh0q|SIkzk{*zd4(Xdmah*G5IL&=XCsog(bv?3P@lD z?>&MRizYijd<27FR5@1FM?ZVpE^+4r-_6sziWiS}rO{~Eesn1R^-5Q9>yWbAP_vz7a|CM2S|gfC zL4KQu-WUM9bJrkmn2R2DA!^_$;!Jlzo(lSoB5N=N`Ruoex9Wnn9*}hUa5eazmztPg zs;%YLjWTuhHoD#q?v&_=&Qn*e`Bm5uno*o`gA^bTXCy2)= z1eUFtyfo3FOx=b*)sY2I8~&YoK+r~TC4*1VYB0v`e~Dn=Q&(6Z%y-vnK;fsVt_DM^ zK}5#8HR8?vjOp84GTvD}Eba>gO?xnd}lhE|J#H1GRqC|Ne;JWB?e4(mmG}@S)ii;;HT>JY|lo|J{A0wl~oTq zJze0ZsHz47{8q8sT~D?gOyX;Qgp%{Qie8;c!^cFt{bh_^b{yz=dt z4SZEDt92!#i-f46eyRCE#dyJB3!9zATR&G$-*F>g@uy+MH~G$wd5$=+Y$J6!ZMx=J z6YTs`GPRjbqBf?<=$Rjig&eGsnnge^cQTtaGPAi)W;PE{W^)od>z!3pIzfw| z$wsQ8Sm-y>_ArUql(Y4ykyd}fq$v5zYGuCsqvo4ZYA!3Ov^~iR@Nv#6uRKt_pSMOv zY}#IDe3Q}a%s8k z>Ez^dBk6?iI6!O`M`)`9ylu&NUnS$MK*D?HR1|Lk3~$N;3Y_t)orq_W5l<&`_%sx8 z9~5zl;wgimMbzW~m}@d1lZTmv`PiP3$@-(|Go!-YN=})kWDMt`PQ`BnkN-cppjPJ#l?whB^Rg|O-e`#ULjyt_aY&E?)xwH zpm@6TbyFq4+@6eiC>iq^B+Sh(pqLNGFvk^6$^;~|6ZBj%=nu)DXQQA8qM%b2O_>BO zR!!1UXdecspJEc~3wnlnw$!fFU1|oCKYG2!@8vgt_;cuo# z?}-SnzE*Lr#Aaej-rBDH5OKKdqm&sRoEZ}WYi zl4U2ZE2As^c}-n>@C{|s{;T7Q^+*i^s*%-hZ2#3L+6L&Xq*S{fS1FBhxwJ+)fqIern7gRl%mEf#E zRa6azk(IWc>GJ!pgyY0=h*!MwUtOry)@=xAeY*dH*C&T}y3>_kD6z9!6v!snWjuA( zUDs5%D2%#!_SDHW;lWdd&Eh$VpZ2icJpNTbC_2+QcHD4~2%_>0-q13HtpsC3P?lDG zGXzC>H5d$0zpKJa@wwt1C9WWUXZdyExu+WP<4l*b-4soU2_5ph?e+Wm*Coyi3&%Qn zI?Rin6fZh~dUCSsxZ#Bo;iaw)?8Q|UzmM{i{R$3E`Y>0E1q{-?zv}-4hM>yGcGplt zbjipRC01`UGFD~{hKyXlE3SU6w5x;Kf2Xx8`Goj9^;SEnU;P9R*|OTY)0Yg%j|7|m$ZBMq$_ z$P|=fXyqV@LO%i@jIdG*aM13CR`?*)S#AxAex^otYW)`V3zAL`=baJX&+nBhyZ)f} zk`7rjC)wZ!pB4PYYh{BSqlAm% zgHu?(vm!~~H?0P+Q*|>P{ZZJ3@UT->T%euST{ja4JVn(t7`mBKKl6W;9J3vwc4Z`= z^TJa)q8~=5&TE^PQ72X5yeT=uG)B_lcmB_xUc0>_+JeQ_oSo}NC0RRh-_laWK4tRP zb0v0p88zWIII5C2)^2?L+zqsn-+tU$48W(FS?v`@!57AZk1M@;Ftfs;Pg#BqhMCnh zsaC`2CH3}d$G^u#&d*Yu9jE%8*s6tyq%5ABdfuz_gop3hJU=$- z8sPZxZQ^s1vQ#P`3hw#A*|f*%=x3YME#5-_1zo*n^Lg-!kEHpZXM6(4k%#o!Dj#@l zm52~NXP{P?h_}KY8q}G6_4G(?y44GTi3ko)_u0gnn5-|x%U;J1l@+H#*Y`ZZLy z5%B+q(D%8ZQl7a=d0-V6PSh$;DbLI+ngDB8TgF(^@dBunXNFQ9U;l&Ug6@bhcmyQT?AiAd*#lwFCC;cwqH^7l~f)`j0sH2Uq`bfNm?``;P$n1H;hZ z1SRtWE|~`gICvU00F=xRO#H!1=0Ohd#O5DRGCzdlSlvI6%mXtVI*+;nO*3>7B`BF^ zreq!%;t00?u1h}dUH1`}j|W%x@t;WM0rW>hP#=Jj`2my61K^L3LV<^p`GJ(o1MIyp z{tqSd11p&a=${1F7&;^f6jDBl$_q;7`%^Lx5I^-Fia3g0Dl@tKst^(l*|vP zWFCO;vkwKIO0!LQDH%%U`(H8-;6L-5KKzuGRS%SAsJ`-#xQ0U0F&LC)|3oqm3~&}Z z{}g$aqN0M5`F@qm1IW*TM-@BbJ(SEdTQU#u_SZx4hLZV#kjw+j0}4>gp=5qwB=Z3E z^Vt2TE&@G@!l}0cy}>oI`X`ck0DTbl{sWZE54dC=7~sMZG~z(X{J)UQ0~>@qL~Q^i z^Q4j0KatD>LtI+N*btPZ6_m^~OEM1(aT(kH-6bPalvusZ$b)NW^-m=80Q#$1s1HEN z`~XSj0qmjRns{e76R#JBl6mGz<^lL&*#2WEnP;kG9$Z+!D|UctDjeJYK~pM1*-wT` zF8gcAC0@7*UiyhxgGQh(LXqB8!8oDF;HrZEiEtA@f8!AfJrr&ZxNs8~;O0uy08qI3 zFNB-G22t4iZ!it~BjF~<#xWTGr)h>>*1))IKs90<4A;j0$*yByh*)r|zoR1Pl8<{= z1LG_(xN6{kBHRSf-`zhte|j{8M}^w0n)MaPmyOSDk>=4>{sC?fcze|{|pK@nJwG|cxQs; zq#ca|DBK(f;U>U5>llhT6mAZTa1)^ZfCWXpi%f)~aO$m0WN?kF{)uoCK%ec5LJx(T z11{VI26)(p8UPA6|AlZ9*dWIPwE+}v{xjhwFvMeS#)hCQt)Os|S;9?Vh&&(E5M44d zMTynhj6Ar8R{un}37~(%k3tWHn*$`=1h7BF?*GGG#{;_S7=T|uM9(}FZZccAiGS(= zD0T|nQS_lyghYS-iByCgVJYH54bep^(z`ksClwi7b?`rtZUX3w52DaR>E?h-H-Q0O zvZDro(#?M%-2^sxbp*8m%|rkd{2A59bR9Vgl@eq+{d3#kr+WWVH29&uymJhG ztWPiWf}g{`@5X|#G&Gw-N|joACQUHZBd)qW4uO}M@;oU?{Hd8!?E zI$0@t(hPC9t$T{<>;veVzQD7;nxrwvjtW&CfU}Q(TNMPK?-r;5;GFee%vpgAn!lnp zpgG`!b5`chS%DdT?qO^O%5Fdp)W*1M0OzbdowEW%{HD17gMaUtqASqbd<^HT4wl-{ z>17X}vtAyBR4Z*=1oH77gcU(puC0h{9g56E0Q4=`{U11I9k4ko0KWA*8hqfKbs*=g z0Q+`~|HC=!z|L7&5F$de|4UJM!8vPx&RGHCEZfjn1Lv#*HfIIE6S4jGaLziQb5;O8 zn>Kpt;hc4V=d1vJ_G0?*Q&v`R&dQ8AD=+{jcK+d<)$FKJtW(ZRPwu5_vRos6Ra-Sx zR3Bg8uX9!a`H&p+%)>b=v*)Y;Z|*rL-f+%35OY?5InPZLb2w*Z%A6JSY49T}fI8m< z6!k6_@+k_Z9<;~;g9Cj0&iV^Thz}Kz7+fQ(f3hM6K+k^xg&wZR8SoW3zyQO>q6UB~ za{k4N9AE>1E2s_NikyG8A_o{^#5Bf+pe(I=FtozugF!j8s^3-NrTAR&juKapzq9s_QE*ZIZLo1vG2G`K)pRC9M&(8Cou1GFLs zz%CMo!VXvD4Csm+0RE_nDEM$i4zpL};L9&|ggE*#ihdWxVDExGP9rk7g8cy91jj?q zfe^(c=t1AR$`=Ry;HrH8$x<8uy*Rf201A}{T&xKUFlID*_MuqwUx+n<4aNqeHh^MH zQspc1{F7sb^rwr4BF$N62$l22m@n1@W{|}A4^1=lvhKxYgF#vM{*xtjz!2lH{y(Hb zM;ydjtZo#RYuz1}Q?)!!blSO3$bb!!kVD{pJe`zVNG^n zO;EqZ7WCy)AQDRDg<;LAgf&6&R+RbQ_&viJ*5A)GUi$6JKX%cD2Hnq8f0tGiTvU4a zT2;Si2*S4}=RaXslT}y~#JAQ$`wWIPS%x)1ewzid<)>Cw7}jJ0)&v9Ek>5Wh&QcDS z@lc#9g*8F)_QB|z$FL^TuqNnj`yAF6Ab8d1sQ;0O;x~};6az+$O8;( zstncyBf66JKcyN$Ev+!D$rP*!Ms)W^D?%b7Q%WplBeHt&J1T)SL3+oBNO}xwvIJ{_ z>^+^4>=@Q$71jjtdy(^BF|5fntVy3~1cXBGVd&GxZE^blkH7lK~3DWoLjikq@ChMps7|{PbG618Rsz5ctgMkB(2T~b9JXe!>R1<6%lqcT? zC97UqG{C5)qH{IDh#_N;5fbrOxvH1ufO=KEEZth6=b#|{&>Kj4oP#o$gU%KWG~Fi} zXlh45@;__{5+0+PY@(W={ctk>$EYT|s3xe->xaI43PM7uyfCU+m8d2tKJp||9M9Ec zZLTH=KdL0j}itPl>aXiVCBe zRf=kYWAz6)o>(-8%hl!E+t{@7ex$kPtj7(`QELfxHEJ1O&V9%CfVJ- zpT{&U)9qYiFToC*dZKp`U1s?7KYzDQl0^=8_ln%R+g`Z1z1j35FXGvX;-DvPZ5?jg zYis*mw@#UnkvGR}`1_Jx@iud|Jva6C(CTAVGi*3Vn{X}23!RsemDhfZWsY5Oh~}Z? zZ#}jiJ~!v+AIAMU5Q({-0HjI0RSQN;{71eK)Y9trjELP7<1%(kl)4z9lDV2-gb#WD zSt24+N-Sj~vU)YGRDx=P^plsMHGok~mQYQQ-Is%8$EYT&s3wR%buJPgqnb>kn)Gfh zw8{OxBlU5&mf3DCoz|>IaQWo@X9+q)xzd+Lhp1QStCGQ*ApNv==(ESPChN2&7!VMM z48XLeDrimcAdo!&!L+7|25W*1Gv}eDAk_vX3tw6^z_g~Kv?dr4tc#40h{wtmzBC8a ztMF9`tqIbHL?G!gt;rg#3Bu39`Tw$#(swKKw3|*F$!z{VS47J07m>2|1k_%$$^0MF zn(We=p#GeXXv?8cB$Ubv)0$ODYl7nQCLzT!t;rg#3BrdJAmK5s$ttZ0;?H+O;$vEq zWm*&Dk9Z(kerjcfX-y_*O)y{~`TbMkETy8tv}TpknjrZ<PuT9)g{;vO5vm|8=_t@*~C!((TG2_G=KqoB$7xF;@i71p;1<;I%-o?V><;furD( zK+tF_jZqL4U@>dlw`LK%Y>u-oPam$R93y03Q8~=e6_s>tg|4V%TOFOY`C~l;8s(-GuNd3% z=b-0X${wtqdV0|5o=f&;#0Iz*>W*wP^YhpVp2xhs5;8iU@4Mk2P0;+>L6&ZlpzUx1K$w?acxH{F52ijX$iMuz5K@*ePLn@~a1~8@o9L z>a|PMXl$bE7!Ro0fzn9>uf4U;|MaidmBq%E9|z72u^GGR+1xm1tzGp9+js80AQ=i0 zp|o)#l(vH~8QPnH)&M?=W9cXkWRLHJWXDHwtRBUI`1_KO`1mM}>7%&mq8(>9>^QZE zcEW-Z}>P_@CuLL^a$>Oy-0C<{Kn$(8%TfX4tz5ShCsk zt7Bg4Y(e#9P7R6O8v4YEvx7U8taV+SvddMG>(b(QeB2HhArX(2D|cxQs26#{`gQ4A zLtp#Mh-+xh`FcuoDWOO!CioToPN?rG@YVXrDsn|M+}g zRuc)2uS>IbU0O`~oLP><$JeD(b(L<+J4XPl>aXiV9zs zuCnXWwTQOD{r(($@`Sf0uV33f__{Qc*QIL_?S*=0@{!*7x-`?*rE3wkLe0-VMVjO5 z(#&6%78B#yl=q*#{HKod@f+_=;q_1pzqU6^V}-O{N)5 zdItr1RyXb-^>GJ0k6pKpAu&&6%|G` zs}$7)$^RqgzhhLBX;c&RE~$^=D@HZhKs7=0(v?VajB2ujYJ%!-wUFu(Uqaj%tDdA2`SWjB2U^)dUYdE<_$+R8wWB zCK&PAK)w;w(h8%ROre@!#251ZvqVIulvv6}Wc6xVvHr4+c)rLtBeVuEs>u?n319s8 z#YlFHYO;!Ig7`nRk@y(ZWE$0^ALK&^rECFGA0Om1e2`DaHER%>95Vlxz(bU)eQAIW z#x*N_*aoE6n2o-BjBB!vYk~pXAIJcVYpMd*1P`>rkp~#p6xO~fn5+pl=;+F~LCMOO z77Z}2sVJ@qM(D0aMo7eC{ zgUtUiuE{R03F;fnL%Tf%BcW7Y7}u;yToV*Ge1jCnxF&13CJ0|^G7=u2@-TeLGf%|I zCWu&B;muG@qaq}J1zwPPAr4-TPAGr7AYJr&be8Dtg7fI@g4M1uv2{f6g;sdC-~z|F z8m|UCV*FmtBgQEVJz`v-*dxYft=Ilw8P^2)O(w{epITXAT$2f06AY+Le*cs>OR1=iF6i&** zA?nq#inF!*aG~IaoyGC>C3;<2Cfyn~weZ01S^>G8E{;9<-{;wlkM88p|Nc+rz-aT3 z2B%}39v(2buCv6ihqklF0u7E;Z4V-`w&map%Hf(Iy@fTB9^;y<F~#$r`XJK5FQ)5qo{5!Z-_u%-zZFpAu&&6%__Hs|?sQf~U|y@*aBVo5#Q= zlfb4CJcSN=_Y6gPV_=hMVABYmLI=%zl_1SAu*p2IDLf(r)q9Ues!QM@l)_0_JVd=( zR%`&9Wo2dK;EaGvXv7adg3D}>!-F+*MQP7|xF%E+ey2fj>oxn0-*E_yS!B8-@9#y5 z{$8T!T9WXMYR$fL(ciX_F$ zTTp$OQ$u36hCXrPY(#7Sn8@o*JPo5~1l<2*J*?8Vx?n3BR1HawdkL)c5`Mi=tvPr; z5+3&w*y<(xdZJo$$WNp_?j^9-OQ;2B9MuR-CvyH9<&1-p660P%RrL~tvu#1~VXx8m zj(Z8L^b&-dJP1ExA`%|=5?Jdc2%iN9@p(^?__&wAVlP1~zq6NY`Kgr^?j=-pFF_n< zN0Hw@CC=hrLgu`LuP^xTAHP^WqRM&+wV;;(Th8cF=$psA1SWe4wV;;(dXLFMdgERK z)4haR&`SW#$96`V<6Z*uy#(PA8L0mEVWc|lC9pXB0i<_tg`~&BA6Os$00wv*KnCF9 z52_gc03M8Mi#))?A5=E{0gUiEDc=ZcX{A8ZiuQJ+S~jgpr#@a`cf;<4ef6y|bJ`8x zcWL#T+$5hqT8GTjj_hS>_yZU*z85k=A|g{tEM>d<>U9A|CBq*;`Uz=BdOZArrQr`C z`=ky?c0Bxn)!`2yz7OU8iGS@!Z`-ofB{WAU|TvAYGVbyT5#r?G@1^ z+Z@p(+ly$D?N-{BFT#mp*0^uYB6!&xXI-8?46;2YXOQh-h6dTDD>leB&gzHP;oeLS zf1rcWaQ|frdH-cYj9?aj<;%&lK+d`2x}IpN@9_by6+Z`YQnBaGdpch7bS(Iittw z4{|)QXbzXF%eS|&Y32P$bInc?VrF+2R4HzHjPMs#-gF_^iyO1Gr;he~YOyt5^ znhO;s%3tO<*U0AxC(6t5;SNJScq--t%kOmw$3K7#)00sOijhs>a^Qbi;(aSdFV@0x z_N&RdtG_(wmmRzL&2@d6!6+mDz)Ksql<=JoruRsnw=?68*5j`WOEy>J1_yU4S?jtu zWtXcW$fl5O2O|P|AR{oc$r7?DycrDA&p3so$H*ot$fodSFbE$+sekD)U~rGW`VQ~m z)K8p*?knH@&xP%jb_RpPD^NCwN&LXC0oNF#gIZz~UL&_qQun zfR(;mnWx=!+DNvLP2u@)dX z*?E36L;H-UV*5-s-={1!KB>gv$U}ElkWKL>Kc^lN9wVEqA)CVa1F+A`-GIceaLh)n zu<(0bJ3Zg{>fE|)Xx`z~=aGLozs-0bzQc7@cx|)oj>G^hGs1VFPHg|hkxzW;8-B3~yYYsMf<3hS_rHr<@>vn5k^}@hN8!@1c*us9Qldr*wT^HK&{b zqKl!s;bN$G%u+b{{ZryBrJsn$KU5jADIEU*C;JiA(KnBgO(u~|;rIv8d%-NEH%2y@ zMmB}xA3*bkX5|S$~i39 z&QKB`u2>S!#`uQ{i41)3iyk8BF|x@jvMHWezQh9=fRRnMkxlWa?4{)WAB=3O1lbfD zv3x383{s7tmR1 z`dJzrW=1_hpFTcIo4nNiXlPD{NWDHotM2+U9n!2xXs&lgl1o4#%2m9y{y2s-_l8FH zT^_os(0tWoBIvl=>IxMvNWUQuNsr&?Vja>11EPl`1MnMNs(>`XgP5Dh1F3W%wS|Zw zO^fSxeB1o8UGXLs&*QG}FLpmZ!fcPB$%Fa&2?AX$4W||Y!lPxE;8eem;H~RQo%|OC zbbn`c>vYH2^HamWH-NopuBIzIfXrEp#+wT3U9e&E-|}rxvfiad1GT)Os1isMjM!3$ zjF5=O%GJ9x2h^+HRmmG&AbqSSk{&~vtU;O}{Ii%^?hNgLD}8VqGG}(YOLGwdvk>(iEWCzj& z)en>JpGY7fl)_0_Bt*ShRw{usLHZ*L&*|0u#?V6ag1Wz62inz@1>3{xiog|AK?(P-2celN8aeK4rM_$CU6~#eM z+}b+aw%6A7yKbE_BO`B)+wk`#z2a@=YF{H^7qzPM3>T)DIhBR4)G(r5+XFemnC5RB^%3T@}qF&{$O2%k{ z^yvXedQ51tPH2JwXWt_OFrldmLK8eV7mPf>gr@LLq6)@nf(@Bv@@-JE*ri2-3cW;i zICW47p$SH0k@=5AJXWsQr8%Hp#jZ*SO_2Tq7p(zIXtG9Vg76n-BjGWj$tIx*+UJn@ zKPEKUB{V_(%QMlJPa#Jrl@}&7tCG+J#jlX}pE04y8lefoUnTE9U_z5sLKDQ#`xosq zn9yXI&;xBpZxwQah6h1VM4P?2~Cjv<}kG7;Dh|CKgbUh zHBR=38Yk_AjgwpHNN-GNvO#Eq=6A^VA26ZG4xtIE-`$VCbO|1WQaCBggQ!=_N+pCQ zNPoW>k{%P9tP`4Gz=OTW08D7Ag3tsH9=1dtU_w)6V>H2tN5|wFK`pH?p~=)3O)%nd zS7d}lM5dHj%0^`MYFep;&;;q9knbO2LX#yz6J&ozzJG-YO;!m_5Wg@1Z8wDaQ<}ywT>+$j zSrO=F_7(12Gl$N)@fvQ23k!vII{;I$?408^SOpftrcl5{RcAt$CM^}l%_E}b`I))GDlxNg&v_)UYOFXDoRtl$$wsf6vvb%E0m^q zlmDuNgvXR7Ym}z=sNoxB{?XJK{(kPGoOtf;Jl=ruf$r3?FZi+Zs(tKSEdLMk{tu=! zS)HRPj@1GK9DHRo_G^>o#G=|5{A+;&{g!X$(X);M^3Y#ZJgcfHa(i=BVm}#EO z6p^uefs8eV$IfdJS{nC|=9toCp3)Q^k%8)(B>yeJgis16WtkB5YFV*PX$lh=kY4K& z`rt98$ttBOY*~Q;I)jk`n9^jM(iHYezyn?K{XS`1Q+P_k*YId)T2 z^oAIt29`CA8mMK{iuE~~;)twAzJDtbktrpXvJqLmnpUh+n!@u;*o=eDmgqAV+{+k3B zqFm8S<3iM{=v4`)3DTR8_n$GR$vUSA2AK9lI}YYFRl#Y32X)RP4=|^xB2E)*s7s#z zNVP%9l9v_@)UxDdeVV4QEm#A_34-+Fw3@I~H_6+RILMl?x7yAp0~tZ^wcFg1vD9~?Sl&zIQ1&mPy_9N^DC znQ`6t!^#Ppm-B<25{4(gdf>XTn^T}(yF`t~Cc2LC1UKwJ>7;?z-rDDX`q%5qVq?pX z1Lua=jNSBXZk)5$u6l&+JNI57eUK<3$B80x2ch)M9FX+5wZTGb!w6a%AbayfBs*?x zu-4izf-D=v|Fbm`UuM=jW{@s$+WR$#U`~LC$CxXE=K=vQNAOx8*mhB%yTDO!Ng!yn zmA2)J4titOxNprOc-b6hU7o)8(H_T$(PQ-oIi6TFhs)LF+uPW*@_wYb=B&pJ$41)@ z>nGG+Tsu0mVK1JUiV=uuDn|gzVhbwVbKfDh2X1cXOZua7$z6GBD#}Zjam)?ZX zYDJcN66YznzmX9e;9jUZvdzrTV<&hX^Y%)}=zPBKhMT$j7Z!7SCpF}@wCNd6L{+yT zZdW(l^P!WabL*B}pSJ1B@BU@x-q6Ut%R^Tcny;En1RZx<4RMyNe1%XSNZ)!p+H-J! zgZ2If7+_7_f5rU`RrEK&1Dic)5lB5)r&hi_I7DK3^vw`P-~4l;=$nb6?^@ci?yrM& zHLHACel1Lh^nnfS$oxmD4NBI%QfyGOr?FMi-vA@ppFrzDA|B)ZhIyNf#iy>FY*EY) zoM-0MsK*WtuL-)C8#Umf#WXA7z?b81NJ)({>!M6~vgiM-y#(=d8Q!2M6w!-D)> z4yQl3a6=R7-WT?N3+vtxRQC#zK9IgcTO>X1Z!p#00O4nggx@E+&SNJOzT-Y5Jf1_r z<{S#p-k!|=@f-?v=TLz94!h8oPq`mUsl4zUimIGL0g87vMT+A&6s*mm0O7l=M8e}a z6s*pn0P(x&Bk?N?LZDVycn(F?&!GVMyDyb3Kee*LuZ30hYhl7U6ktFP^82U6SxQBP z=TKDX914)U=REYy<2e*e&!GUldzB!)@f->^=1_peK($FL@=u%>Vh1sKquy#I<}O}1f8VXp){7&sa&0;!B3 zwV=YVrdwNw+xFVpe%Gy2W@O~eaU1@=q*uJnoNdody*;%0Sk(+0&e0}Z3-UterDWx` zA7hzgR~({wNItA7He%3Y`9@GntKTz1&6-v!nL`09V#pL^ghWK9lvv6}Wc9j~qmnrk zApOunBt3>TS%5W#b0|ReVdVSo7}jJB))da60P%<4Mc+JzHJOGr=`{e9UpyD2KCS^+ zr~&B6W^F=iB&Gfx?>pWraC-S*Y31r)8l$61eO2qpw_WYhW=7&4)e zjzYA)Dv(X^U<{@HrF&3uusudLEw0<~ZS%`^#hX|>kGsOZ*!}njvpt3;59aG92z0eH zoLUG7kCt76Q~g4Mx2`L7@?R9t{higV(;a8ePYwUx0Pyj-ny!6-np!NPrV>TeR96CQ zaD9a0Fh(|omseDRY=RMg`yeAQvbiM4;yeF9eu4ApKXQutCozd^g7j`rk@OhZWDVH_ z;XNqzFFghf?(tXO;XR!C31cuE`Rpr?!RJNSuE8#t9WD~UaB=diaY_f}Ng8E*g(3V3%Nhp;U zMmDPw*#yPCFCfJ+vdJ2<3Bpex=l@}3lT~CB#GiNuef1dGOn7)cqwTo48BhE#++0y> z-L=2l>bP~9HOS`c>Qk?FRXwr^^80j?EkCudsK7~&YfNk%>9g(C;iQLubizE=pAG{9 zCX?SkCC*a%i5S_eQe+b(_icy1d5ml_jckJ6Q+FV}F|x@9vI&~|llOlxvdIpz399p# zp)Xwm5TO)K$^s(l)v{6vvI){pC*S|Y?OGPvwF^Y&^an-f^qqxwPXZR9ZyzI@sz5fu zgBj%gAB=3O?CCHtV&*!u7^E6OEv+!JS?$Kpf1j5Adv8w9{6?XXLER2FPd0B_aoB}Y z^EHJd*T9HiOJsyZM5dHj%0^_2Y|5RlDV{SD5{;zC$RN`p*BJK@apLE z$AdOmr#8WW`Q-cyJZMuD)Fybazz{70skk8Z)*_}hnV~kphK1|p+n{6tOp6BiA+fxz ze7p9aruw~FJ@%mWn4kzAmvDUH=?nO5g?Jp<02v_>kCiKcX%47Y0j!dT#9$39ib2w2 zYLhi;6NF!4jD*L7Hrb>$LHng-{*S3mcBxHJe|Zh`d`xY!Ol^YvYu?M2pITXAYLkgUn_xf``TbMkETy8t z)Mk}Zn;`kR=jfZq)F#u^Cg{C^oPU9-O*W`a(0t=1^o?U`lO1XkRF5I=|41+*l)_0_ zMnt_@Rw|)3LHbRn(Fc#IP1dPRFklOL{|C3s*=di1wlHG3P* zWrk1x^LOhcS>$kcugJZ-?S+fmn@vCRBA%@%4tm1WpiSUMwhfeT1husKJtOd-P5G~) z3#m;oVtYO^LLwqlN-Sj~vU)YGR5EB2q~AFdNsp;bmZ(jTeb;#;JEk^Sr8YtQJ@8H< z;jPK**R~I)Hkqb2>FE!opnKDi`Z)bz@zM$%6{$mL#ScJ6NT4Fh)xk7WM7`=@m7tp- z{k}{jJw`WKM>oNM1eiT24gnb5R0X;T9vrxcJizFtFc7HVjdZZ#&@lNnC|L#5q5(!X z6-76}h(vP!yF@%zt_r3(pk7t5O3+P^{>VtQ1~9tG8oCL>AH9Nv$LJ=T=q6}?oXr0* zy2&oO3F@E7MqfS!DWOze7~QN&bQ2U$?t~P_=q79ECJ3K$6bX;fO;*uO5I@xhiI34u zmeEa+|I{Jb@>44-jBYZ4Zh`@6_P>I`6Qs`%L(*e(lXY|x47kZb24Hkk z73d~-aBCs*0Hd2KLN~#NJCyf7=+71Zda_>0cub217~ND9-2@}=#AD^EV44H! zRRyaA-300HlkXp3bdxo76NG=T7{vgLZnBAPg7y!|{2!y6?4p~X{-X%=72O2!pT0!mV|0^cbQ9z+oGe>@YGsAdO(xJy zFrbM1{wZ;mQc+=avr5rTki6Ipee)RIWE$NBy`-!Qt#2D%BFzar;9Vsw)obQ4tn zFB$DD5}*jBa8ec&QLmPjO3+P^zN9^p9;2JAqnltr=`mygMmJS~Zh{AI?U4sk*#$gx zlZ~mHV8pvh=6_L3D~xV31(5*F^MSnoFAZD zpSz$KfYD7>(M=HlO9~PnqnnJPn`LD`AA0)-36JhMa|um?(AYoV;E$SwJ)uQ#_a#+s zLxtaI5*pXJzwtXRp+T84Dbw}}e@}zp)@$|~e~&|G%&Mc%?`Dg1*$2896Ly3#q4k}7 z{{*+Q*=%Qn_CLt^-?*L4ZaW*)FB^_l4CR=eaw>z{*;U!jHYT)*b4YRA&StHh4Z>@5 zMZ)8DHmmJy5TAPziI3aaEVr{kek}*t@>9=Ta66lcb~YHGO@9BBI7>Ne!R_oSZD)hz zx~LI~z3DtA#Yj?QC}1*+zu6J~{tM;_#eOI4OI0j@#L} z5p!Jke9L^^Zq;OB)F3xAI2j5Tozx_VPHG$oklsK8eek%Q&3Zc<3@{}5cihfaMLQcj zsHKAz0d8ljY!VR|VYEuV5!BKOx3igQXM+(Yb&(Mg5t&kADI1Y-J3GW??51aPEMc2L+6v)i8fI(2k& z8YOZr@fR(4Yi!=|JI`wQRlQ+E)9@qFF6N{A?`xc%uHEkXPj0? zx9{gMP0MsU*Vs$2!=~O~Vc3nc`r&oBH>2%r+KyvFbRg;y+V#l!e|X1XY{wy=j5L9V zn(GnT^~w2{5_1let9og34%DmaWqt0D3E?1=-uyG#Z}8k9R_6|x5S@hvScV`2@Z2G` z=MI?=U4sL2|rDL!gw?S;h0peBo; zrda+qk7dhGt*kJpS=FGXIL_LT-#;bJQYtD8YE~JjX+pFWrZ#PFqHi99noNS4CPaIo z-tGG!y)mfCG^lAp*a|hbCHZd*YBCRMijT-TlJEaYU?G&kNm(pJy;@eR&m9scGIsmX zCWApuRzXd1%gUh@G5~{`Y=fHOUP-4!AcSwAm*}W^09%ECCVN-L^Y!TsUCeUh*Oa2$u6!5 z>JQ(Cwj2sZLaDqku3441CMeD`M~Y)ylQmougda)1e~ocXR&h-bf0PmW>M^d#GOh{o zyR4QiKee*LxF!?0CKxb={QfC%mQqn+T(e4XO_1DmA^PSquE{j633`tu-@nGVCL6dW zXzu2NzHy9evV&`a>h2Gb>Jo4WrEpRf4pFa`l}d0;klu3)k{;ultmB$sz_@$J0E}y@ z0@nl&yxoxpQYk_FOeE9KL<(_DFk<|3`9@GnD~xM0g=>Nl6Q>~~BqB1U#8NgQt5?%X zCAcO?Kj{^c9^;xU;hG@(Wb*zeX4V*H*2wn>#Un-QYl)2;6?G~L=$N1^G~W;cq1Gg=n!@G|?9xsFrV-6G?~GPhuzeEX9xFW$4bZpXLH zFWVJwV(~og3jbpF<0H)W7@9noub&{$)zWZkAs{?jb_q`P3klx3uGGnYQ9$>1R<}-f zoIO7^{Cfj<;AXC-D~w^_EEZiCN`!Ix!qOLP2z(;n1|>^hDK^Azig6h`CQ4mw;D1@- zeJe&U*1~f3tI4{nzdYxc9lQC>b$yz_C?o&COB=V8@SP8)_eh_&GvkidBu8UK4xhl$Qf)O))kr5K{Sh>=d=74&YzAE80LHeK~ zBt7OeS>rW9_z({yJmxjo5-bq&Ma@+2A!n^F`$QpP1KVht~wv7q3KLx&$3U zDV&t0L)5Efr4n8fq+ePcNsoC=)_F}ZVA)b+0OmDS!E1sCD-4kbnAcR392e9UVy&1-HIp(3Y5s7QZ;LuhX>KV?3X4;Og(cd$1^0t~+EwyhhIk#j`Vxt!N zt@QPml?m32xw-UEz@4{QlkU#$ns3;SIH!Ga5@$vtr>&X&)6R~g^7ew==!SNgY4Z_OQO$eQWb3 ziOCvEuZAyf>oLr1`JM&b^Fhfw?zz1Rx}RU*d9~=zyVdTQTLza**z)jmUctpp3)6Qj z?PkCEw9iM^5noqjq|}&P=e2+K{9^9zx;?a=Jr-zitZI7@iM9Q%9Xxt}z+WToFWEDc z-`;psfY*kTPrvr`ZP2d+kySPxI$0cFNq06T$akllr;>E1-fy{6&Q__HJIjZ5x)mPY zV^w%g}A1_$G_>%W}qI0VmheCpL|1LW>7oaJ#@S%|4aOf*R zgV2sKldm;i2`sJzZ_!Ff7VT5Lh5OW|6Udz=Q)kfkC|Rq(ltUz!?@?z(dsL=qk2;6; zs5P!Jv2_|))--BBrweH-U|QG=&I}FMpME%+*5MkjYCZ0ufY2~E(AU?iYahPL#-)Rk z?j#-Vl-}cdH@jW4wzjFZ_W5}p{ae4xyp`hn#mjbTn&k_fy2Lo{_+w$6Zikuao4>W# z)=GP(Sw_0n+3C8zM0@i;wtv^u(d}M)K;!p4O&t4g^KE9@*Lls0etfg}myVSY9qQ^m z3wv;+MU0EK{_8phMlNH+dp{SPGfT8u)@Q`o@bk^Lyc7r;(rC4aPlbkteTy4gzHn&s zIw_&^gU-{V?+-3o8o}cdyR8P!gcYz(QsXW4WNR$z>+h?y#^vasT4~IA()s?9H7|yq z=os+XdWQG9zC>ik+A}v^a?9>?9=mkj#kxmc?aauGdl$W9;J*%IJ$QHH*4;O_t}~!L zjpe-WYdUyZ%dcIpANcS6pLT{%iu{Z``yX_le8X`cC$`ud-k0D^kqiM_$@$k-Uj9?Z z`S^|ZrUqzPAAjA-L%`d`_c`PSL@;n$WmqD*OYb|RL;PjO7^c1$I#2n;Kh15ybD!_-LN*s#9 z3OHgdTVq*YKMny^8v^KuqVVA#J%>z@nls7z_;GrwTzU#pA1A+msZz^Y12{dc^7NE` zJOO*t3B3PTbyJLsINM}A+XQz~p+8A@ zYx4TF?Sr#TCJsYE<5OE?Yb@*Q$Jr*6*(QY9w06i8sVNZ7Hl-pcO1I!a>NDi`FI8$; zYXE1PRi15Ol(fuA))s}&M3p!b6>EKt zoc|ydkC3%T;VhNWEVVY#TeuQ3$@gDymRi+Ws#xQ!UTC+eps}p4A7`mdWvSxuoK4QZ zl^UL9g#dges`#0xI09TGzkjJx%UT0CORe%Om23@&y~!c>f2rQ!W$?Sj8hJD-lv;<- zzB~Yh0jY_QEU9tQsbta#T3@Mzf0ni7;G~oBq!Zk^%0nxmLP8{K72rm^f{l1#(g_;p zCCk=W*4K}dP9~F1Fy;DSWD1V9QqP2bH`>yV-$3eo^81%6wX8LOlg=tnIuDC(j%C8_ z)Y`;w;qBC$*%{}nW>Xb{hATsz0$Z* znF0eZ`(#HKgYX(2|MY4S75j)N*xS=*f!iVtt7=h~&ppl_Fx;Y=fzzt!j}5jp^V)8n z=<3|tI43Oq$JrhlOIs%DT5L4qnl8+nG{D|tcI3dI)CK{igQf>h+Wg^W6Qbyq)mz3y&@*PXJodhFx$tv&1? zke|L@fO)^OK)(EVy{LA*1c}y5x@dzNBHZBak@H_$%f5auW34pg5b5Rj@N-0)TeN6% ziz(mSdf6q}(l$2v>tu2fF-51I@Z}$UjU&ls8^e5Sebd&i`H1h~;gT(=)?{J&zVM!o zIc6>`jas%#|0m8S(ZGSS`Q}=A5W__!jTKVbP=R6**Dj~+V zP0TPeXj5~2FAwgqq9DYv;i~!(SzMT)kCKoo465dVAW&7#Y}hD;}$(txcQ%VR`?}r>?K|%nY>(vQ2jX za`Vx@&SnJ%OAgoCRgbvV-f0P}lXywl@00UCD#%{x8X2y%?DFHu|9#ob9v1YN<=$>* zdY{O$4MR7co_Ree_yljlJ=+mABKwZk{u;Y7HZb|H@9JJX2S#Mv*mXN&#HfB1uP58- zO&ypt{YRGH<$G=^4ht?#H}3xV;)jv92SYqD7%)i~PkudnNJ#+x9Z7Cz@^9(OSR;)q zk0%b@?fz_U4Y7kv0Di3#Ei2Oo+D}ctHCx|gqIco$tgK&yb(TLO>T_(hvnNEDX?ss= zYs0zo$HyA_4Fv@PLE@@#<9^j6MsgR1nYK%GZsP1czs4|64^Iz|?>g)AkGRd=lC!>h zpX-G$x6fV0Y1q-Jk6v6vjYa8#Mh`g`$9Y7yH6PosRir@MCZmzj#i9JL(h=Kjds_d+ ziPF3{VcazDrtvE@b*!=neKT}gd)Ud~cI*8+7LOM^np@B?JhQgT0)s)zI7`n?vTbTU zY4)`g7^6X(ki*mr98?$YhpS$!@Pe7gLaXRyi6 z!2M%sm#*iNt`-a&k=1q6r=B)*w%InO4H}vHEG^eB*1bFbo0rYysY!*?le&4I{J&AH{HFZ$*7D11h& z_@RE4q{$$ENt}H7E8W*{n#^LF3>({<(a5k08yilOnK>hdb@G;s{}p7fbdBINnWZ!t z*2nu0v_5c}%AQqn8jreM8%AA;5k-li(A=Kp6fLM;1_ zOxdzmx<+u)%RT1D$NyCU#0I~@E(k65?2a)RwgDeVj-~aOwu~>dBa{e98mK85nSi|n=MLnz+o!Mx; z;B1+tY#C(NeJ5M?N<#|HmRZV{wFsT+mBjx_556!)tl`u`KzOuN3L_SW6MgdiFPwcT zT&u8_eUZ?;*smHF(Y7YlFT7f$Wy`+<-ItY>IkfE~^Lb`;BVs??7^+Fw6I?O+)lM{M!8*mqG(mgGl$mp+ok3 zi4FYhaqZ0k{``{}*Ns1{oUnO0KiDZ@c=D?Ut{b~K1?sg+)M#v?>ljaP!w!^A8hGuk zeg3C^y{;@aw){A7Zivm;P0!}WIcve~Cfj%JFffNhm%c8cQw#1qi3iHordLk^UY@?= zXYhp&O?TlC@#4#4V=f9WkMS-GA93Awsr(VwMlrO9rrkGh-ne$m7y-dM&vDk~Rfk(* zw#4YM`hy%#ESkgR>hkSvY+89g(p+=a!Go!PJ#&&G6a* z?nOtQt;{?2Wma4RZ(>o6s634q9^7#@{Oq8xBWAZf^>ym#<}^y=T;eZU@YdM8;dh?Z z@~e8oh^FC3qFu~K_utnzJzTrp^`G2)?^aRVnK}C=jjL;u>~7!BW15!fcCN9PV24e; z!PRt@m1H058PF&Dl~^17(vI~R)h56 zqOU$d^wm2MAiXhp|J95x{Z#SYWiNF=82y9-iDs}ZbX0PJ9d-Hn$4@D zH@FE6xWI9)#;XAayq9A@3PT1IC}=?9UWcCfae1qMoWJrVX`GS8=!B37b31K&UHb0c zt~K+yITqdey!!jn=a}cy!`&&##Vot!Y7+;Oo67GauYL zns9&4$E{lN+QfYici~&R3q%GS6dBN2Xn+aaixq$S^`sl%?H}YPTmX(7;$XQ2nW9&W z1)>FzjuyZ=+5#xIVCx$9tyu&whvTfrGXxU?IkH#51%^y$q@&m>cr?=^<4WM@z90WO zq+RVDPF-2o>dMKy>eqFa_|b1y>gRm! z(#rW=X!eptCu2{Ic>eF5bB`?B-l`A#PhG)STks%K!Mn_Hu8}Xiv-)0+4|f>y!Ba6G%x)A1 zXS6Kd;brzIa~-F!yG6!}Wp2Sb`SvGYUc6^<-HvaYU$!gW#Nv6}75>HU$48j$F*JEV zzr7cn>K78cbzP~G|Du5I@2qZ}?l^mXYWVjCuounMbnOE+ES9vP4(w3kr6ARY@(NXG zL$BYmq5R2dHL-#JWr_E#7`<2v%h|6c>#qLtoL_eA<~P^%X$GT=`~xp-+)~1KKA7Gk zecsNDJ6eywE-cwxaZeiDsbsC|;*?#kd0Y8*?LSTRd$oG(LF+L=!uP)&UwHb05f$Qb zU9$d>h{uC__23N|H1v1kammY9W{-=WJF z->#te?^fn%H=Q=J@1DggwzzE`V^CHY=ZM6=h{WGY6JJyT#Qe&BvN_JWJbh6Am>l&FGo*gH zV(MpG9i6u1)}G}n%tthSmC*f@{gG2|^lJP!rlj=c%gw%risl%^YV5Xd5%GKac*K?=ePd!*KMaQ4h_fKNp z3AZv(f4b<)-z`$VolyM-y6DTVMUM2KR9SCw}1fCGji9=G5EO!u)~xoiU~sUN5KdKL`A` zYUZTqwHXtw3%C;_k~*L2de$U3qlI3@L}%l0@M}MMU z2cq_U7kz1)e8X8taT8K;dcmc@CXYlf&-6vbmO5W zw=e(LMHd=$KU4i(T2XLO>EUaS${L(_W^icD)LVZwnJ1;%ov^f~;r_yTmeaRCc)fS> zln0%SUOxDd@JYDIkDKq_5QLv2xyd&o`yZwig{SN@m2L7Bq!$-i>afXYKRS7=Wb5Ax z8izjH6MJ?=UyJB7f&PDwYF$zq;I=mF#64H@ZbKYie$9WgH{+#Cnfsvbbzs!i4%b!T zwavCW5(Bi%2;YS|8N6zSInSF~9%;Y%!O%bZoD6y}B*5@Yum+?yhEv?ff%w0OQp!<7 zV?O%o%}DVpR#>pjS9a8ZfE|)Xx`z~=aGLozs;zqe{BE7kxzW;8-B z3~yYYsMf=^J;-0-q^C)VZ275`l>#R{67o~3Ds_;be{{mURo@q;8kF4)O`7Lm@}l&d z?UW+kLmz`tw}Ng?>H0n>nZNAQz(?yw8WhbP?AG}3P16dy{(awtcFDD4{Q{=k`ukO3 z+ldD@{Z(A+)**u}4Q~)Z8smgVZ2^*Twi)^TQ{pV8qOw;a&XU!XI>cH2)U3BJEatB^ zFdXnR^!B|Zhx?P4+8+(g=@6;cXK3cUgs(67?;pQdKH_D!PUdlsYcG2^T%+Q_j_>x+ zuA5H|4bATI^>Ny~;?bY_tTzqXd*Ccc-ow!CGDyxBNxn;z+O!p>HqD=)Z@wnE<&c$B zM-H*8JhKsqGRpI!%q9zEHmk^Qy}XJ_=QwkCTEZ$yLv|Gv!cbyUprz13TIse)H&6XH z*ufxosqg5tm%Hq{{JfgEDQZq;LE(ZI5j9@6wrDeJPK>wDhYJN!$p@~^cX6wDlJ9Z5 z#UoF*_*acGo)^#kempWXH0nU=%JqLhV)Ivm3*gS2K#|^iM0&Ru>iy>kq_+{NH?@F* zZ+uU=;^#z)r;BoU0a82)DNZS#GC0m$o(^cvlaov?W=Qi!iY1e!OVc|f?hCuO+9xf; z*r5A}$g;uKJJVv;JIp@Z`M}riC0RP3y57aR zc}i;R0HYZtw^P!)KK*Yd6i+AKjxGevXNol6E7IInsCkPlq`5JvIjwMV;1GMuMZY8x z{enpJY$W<-Bs#TdI>&L=L<{=GE$2dJaU{G;W zAAF&=WoomK;7dn;J}bx!$c>oey60Qw^LFsTj~e7=2GUnZWLlH+kIm>)M(7ok0*MSw zeOeWz4v9=0fA-Jfmxj5W_dRTTI3!`;i@3)d?*%kVyE5$U%iTXFmZf&8K4JFjxCc$2 zX4e+9JpJ{}*ovS2q*+@E54Rk*KC4US3`@7xZkDfxmL$g>aL^uJgINBTyYRrELd(jU zeE+SUbOXvODFs?qQY}!UmQ~!N39amYy!7q+;r^IuC4yTkE*|>otoiyTKWg*qHuX+@ z+P%}`bRqY2b-V72p&}EmDrp{`;R%@gNg-}0=*KN3l)kgb!b_+%<$=d{%)NliyZFm6}fk}y>M}R zv*|}(#IqH}K~LP;I^4F`*7mz@oiZaMZ;spW_a(jJD;}QZh0aUK%4^+I?vb;jA zW3yhhkFk7h9UGij=Vo$jn#M;>BK6~FcwIx{G!ta+n2x@9Q@ZTbiVKd@Dm%@j2~VlI z)ZsMK>Pzm|d&>f=_1#_9U{24~uIA6`pXv6qSw@M~<2j>GW%wLv9P};s%a;|?RwVq4 zwHUggYgh$tYUO!<|1LAr{(QUf-i#|5FZUhLDLOUk^{9VuguzkrqvwC$0`ciRD-hql z8xmh8o(R2K1$tIA^{M5TI`pjKtbTYM?p->w@A8jH2fJt5b=ucuP0-w+jI+aDjCwlq z_4mNO+s)Gc>Uw6&iu)_)uitPbTB~9WuzdMA%g(8pT?#MM4*9J6K7_yYRlu3CYpX#D zI(X<9xCqZVCMtT*L(ywWj379Kj>9RWemi={@K^d%9(CQ_CB|$|_3@h#AnfG2aESBb znz)nbAi_36G>bmGd=`C%l@bqaJIJIP=bytGk6_DdU68G31Bm|`z& zOgXdFZ^@<^cM8H!6?C7HXKY~iV!GwCttrL)e>?d88=2bP;!1;n$7lRE7x7by-A5;6 zX+hZ#Q;XJSY!)(Oi|d6^bCTBdI{Kj1*^<=5AEpM(nOS5CnUdBv|0dwsuey!mS!eS6 zQ>tg>l$C7Hn*Nq&vfdC-8_%TQlFOd@skFtaw^zrR@VlQGcKvkWk2XQ4|D5Nzk2C1v zuw@k%jRv99 zm6vun<)L@f%#ul?pZbkySpy=>Rg)nvVX-8LA`C@%M{@p61>q}?E1&+53hPN(lJXC!|)-{)aL$kW0vuKBT@?%!UQ99(3m6`9j&O3zzO6J{pg4=wbZ zytx_RLzi`__Xqd^F#+ceJpX2qbTfOZpJl?Q`%QeWq|W_lL_q1+dDN8I;8mDpHqhfD zS|si0iLjyz1zvHgEE`b#)s3PFh3ibYH9Q^>n>e;Q@a)D$InQo%Wain8$lrc;qnUZ~ zlWD$(BcNmFw{lg#riO)`bUqE<+y&Zf9dd`kj6u#3N&S0~^`BJzD_<`#t>`!Om+|g~ z+UO5A=89sfujsnc&7u1G2Y25L^4i+iE8pkF-|cr>bUQOFZuRCH1*L6Y#^0OX-!3xO zBEH-Dg(*@V-q^Bx_q`(j1Ab*e?NcqTrWW0doprnQsALm3K6LImIT0MA=VFiw{Pmq& zG1Bs9oQuhLsqQy(F}eXz}K;?TDt{@q%AnD04>{|Vg zAkQk~WR>q&I2o=jdVxdPWEg}W*iyFel`j;W46~LDgI9y#xKiAZlA4TFzEE&7tW`cd zLfK>(^dB-xzW$Z37n}?;nhcYrejQ>cOeoNCs)ih^5LPO83-4|W?rvn>Tn7&RH3^+z z@Qz^l?uH8&@e|vMoMHaYd9&Dn5gp|lQ2Bzvc{6Kyvv`B!y+EE-*x)Kp2XNl3eBLY+ z9(F5dFWJIZzEE&vVKB0ghseT>4YB|ia{dp_n}5CC%+4XQSpU&0(OQwJf92~1=go}f z&E#>6cz1K{fgHnOg~8q=Jqd>CkYi6G;W0^Zj_o8E3~=i#-+;>ZDx3tfngoMq?&SSf zyjwB2ThWhKKzPqdXt7ifzVd~FlVH}8U?^C;U_MLv@d_p>yoIxG*-L^!fA1*y`d7YQ za1zXD5=^o$!mb<5ku7}X3k7G-tYy!T3-}F3UP;Xb zDqkoVz4$HkLLAxsugTZH^7VqVXGXJUdea4Vw`mLHJBC-x;_(W<-BCHLL_bN^A#?&X zN&Rsy%yupe1_UmYZ$RZ+7S4rP&4t0UndJN%oC~vdk_^HJ$>F~%Unn>iW-S*6uR`3& zjSZh9vwV^a`p^1TzW$Z37n}<-nhWn2-J%rUI2Km=aMgSED&$xjX}Oq$VG&^@5`|{rI z^$uC*3Qiq=zdU=BzCqTd$Dhic_nTqwd_3gu35RtnUN~O+rS$%R`?FJAqsN7BTG(Z8 z{h|%u4*HKX6h4qLd*VcExIP&yY1Le^|1Z@liLyn)s)oO16@83_nptJrt$3`Cwl@8x zrrBrSb(tMFri<;;vgFu9!vp+-+h#quZDhWK$Xh#1s5^a$z}<>@NAVJAf7t%W8eZS| z3Qa>`ZHY`1Es-?QZZ=T3n}w6{zeOd3DH=s{6j7Hn28!N(w$d7p;v7g3AvbLzNbsKCD8`1YcIdf3SAm zy6qo5Bg`Nvp}^!DvG3OW_t`iyCwHXV3+p-0_89Y5x>+ zch{xuUVby%wDD^i+Wh2tqx>23Nv<@S6VcNl_b55^8_kEJ#dLiZhgEZacFAsS4ys{xWQgo-yrcg88 zmz9+6?i12zQ~b7_j!Sy~GcF}HW99k~>ob0nr+-eKw&BRn8>{fFFt;_Wgb-6E+mN?`sIGJW?ceW&i2!TsXGOACko#Pp(j=l z;3G-?1t(U&eY8?(Vnx>-EB*m4lfT{9jzRUvA~cL{G|OmAXlzb)m{FUgfm&TGFez6+dtqvi5)CK7z9A@ofBjWE`MF%OqPvI7K)ACl=-Ll6!-d1 zZoYS`DDKRheUrx3wMllj@8>a1%e0bkt#l>}y6*``OG9b}A%A(`OqQ`smIO$|0X~jA z|G-WuQ0u7xAQ3tR7YgIywf6Fklr)GMr@(^8)m!z{X?8xWx2J84k-`5;=8ARS4=v=am8(gefoxe$WT4jS7OxUl%w9W+MfLDkYJ^zu^>^ zr4$&J#$gKoCQWzw%LAvtjHSTzvqf;~XdASNNlk(O=fp}1L;b5uu4QG~K>MkwnlaSU z&U6(h66w0bJoamCmd6BlFI+^H6l~Ql&&RN#+U@TG0K3CHcKe z(_Q|~gcDk}5?Zh-=>oED44E_7PFAw`Ya7cZaNs{@7q!Gq3ID ziLTBv9zXV&9oc#4yHDp9mjwim&bFVpVaCRcrOWiU?DWsD@CzQ2YTJ3qje?u6zDKt1 zeb^vqcY~C!TOR9Iycl@yJO8bBR}EcjrkebNSLs-_eltc%3k~P69;hC2cORU(XA?E+TkX`!PuD)@ z@n}^EF}`hLhM7T|n(KRca5omztoE$Az2oP2r+VA#>>vhcj4vOJ5n=SZb>gd08eLo2 zIfq(lZwWkO6nypc7Wd2t@6B5F>p=MG4uO6xXSbx}=laT)yppxyq_pJn;>Z7e$zd8v z|1EoS{yB8OpzLYcJeTP`(hH7%Ju@^sV4wNxt``mtYabPu{4>{O+|jSzkw=$a^m_W` zmB&{{pFXR?YnyF%BnD`i5xxsIMNdd|&AYeIZ}9a?_cyP9p1ip4@zlBh@ipVMiL%`j z!HtUXBl8t9$y(Yu;`V9pI z0zu-caN~Z}BSvx;hTUwN=-kBFdwz{!o*tea9^ZA==O1yKy(MRT_deGPUv8hfiqo*8 zQy;y!h#HI11&tnZE{^kvY->KYVXH`iwoOJOql-iNVWlIs+xE2nixZ`Jal*J?^Z9j} zXAk;j=(P5*lfmuQ`*$cYVYJiavf#-vx$a^nAenMlcr|G!ML=ov0x z?H5;I>8nLuE`QBUk#eL;GF*`U63M^eO0b%NBGR)~3DPrM*rG0#vjdKPrTeN_@h587q4kfblnC$OVPV5QP%5D#?XG`dhW- zOJB(v!Kb~fo%X_JcKa+ctHNehNxa9Wy@|CgmCc1=ecbIJTk=ZQ296)B#1B{(_wFK- za4yW^X)j3sfaD*fN?*wu!MQMVxiI}G1@^Ls9JEYu_+aEHg?`El_Y58dBFUv5HC7U} zF~_it7se4d2P?}lfb<0=@};k2XTq5+Ynd&W^@N;%g|{X~wkG7SGHzoklF$r{0#EpwSI$(+;$wiouY;(PL! z3HGXCcxG*v1qOqbah9H)WZTqS1?F?sSR_Uip>`Cj2=BAF8ZE+0gf9`1yG^dY6k87_Mn zE=d3BseI`x*`aWT%UXsDW_?+W%)%Ki!-u}Y3>PH-rYT$UO4bI>a9PQ4!KCl;$RwQM zvV=1TvnG)KXA}9-SF%QMhRa-rdr)){_dMKvu1ky%-hD2+io9wkEzKgm;_(KI4-6RW z8R#|1$8Vy$JKy)$Oa~ahabA1{y3;w)73c`j73lEtE6^EMQtq#n-^tawX#H_;tNvHA zFAZF|(mMOk*4p(1Fw%iL$zTmb~dn@hy^%;9Jq7ST|-zlc4!NlFm3NNpS(5Y^9S+`xHMq?9Q$9RGp zc7VV3#F!_})4h+lPOH0r(*@g!b{#E8+$u^dXn2D#d@&J_QqCL+w{$sk<-0}JBeL9T z^jmI~-!@cBx1>_OrIsnwI99(Ge3RZ@Zk0NghvqCoBu!d&OENB{p;iV zG?xkyp%fXSD1>AvW28tWQgl$66O|%Vrpy_VdAjByGL}Na5m9l?Q=tqYDq{#eYoE^9 zyVKcwpL4qQxwq%{{ln|k^E~HuU(f5a_FC)xUEj6V_sp~nPX`GtDCOyRli*HV}w}P2BxwuLY-&NB1fo&)RHZ>(O|F@@K>Okivu;Wb9^Yt`) z9mHJ37kgWP6{>FGz*+|f1^TrPkJ(x4(6`zTb1Qf?2uXN!@$26SlitpcHM((O-olkh z5AT@4+LMb1oFH#-4pbFgrPBVts-jDF57=`iqv*6*q$Y?Ciyii-=O3`{e%-pG-e_<0 zN3jJZEeaf#&Dy(}cUSk`sgIk!>$jh}H1y*8yMrPFqK*xoeWlmJH;Vfj^;U7?E>hr_ z=yI_*UB50%QvHT&X68P6niKq~j&?x3;x_@m&UR3O?E=@_Cql>)Pa~@7WF%u=>_F!q zQlt@v0~@GWsqSuv zAA1Ty^hfk8II-Yqj&aGJHL9QX^(k#Pp`Gnv&ZkDs<3RklsySSptp9eT5?`{PK!HUP zaZ!jb-0n~##kaV~i+^VEiWi`xzxdkO{x%P0Ut4?i_mu@h?{77@xUGKKsh2rTB7@6^ zww!cyNI*zg!8(_R!{%-3<+P=)H&=OpNu6+Kjtb_HcXm!;SbXuIfZRsy!vY^SoOv&{ z*kPT=lX54BX{#Ll9mJdk(!Rz=(pjho88H_{+OH;ai2|=8rlvU^iLYQ%X~lyy$CGM` zFABUMz81Ov2b~R_4b-)z5&z!-UWik*$@xzNyw%!wEi3SX=yl2dUop`oCvpV5@&H~i zlMcE6gp#@bNDil9F5#+&8p&LgSGlqRFNj|sswsk9W|ZPf_7eoWECR3SAs*7c?sLLk zD5gnDB>&POUI|lcxSTK(C7u0|m_|{#N3~!OzmY1l_>yB9x;A0$+C;P#3=ysI8Nym( z5lyn2pjxnQQ(4g%TtvSuz37tN1EH}z&=`Ef;3i=vN;;G5AE*}GHU;CF_K+2gL43m@ z%;HP-6NJVrqA{6B8pHcm5Yvo)6ZS%hL=qZPK9Vark&rdu#?YdojHxvbAVf!?D{UF_ zAJ>2d;v1JRi!V7sA<&fr=;HAST&3j>!dfUku@s-kdWk^v*5v+oG0`Qv2LfHTK^Ko? zkh@J13Ex1VduX+ie3~8v;Yb=#ot;g0yd1fb3QI^%tp}<=<#h!R@7j zskPfk+6y(QNO@KNM;IBzH?PkuzU1JB7+Fq?4A!zpBdjHcYm$pys2D6;F&JlL5WORr z|HVX?>>h}b<-y2se(dyw@D0Ssa%E)jk;!E}u#KsHSdsAre{%5(j8r$qT=9|W-aiR@ zb)sS}`nWc2l3&HL2soi!5iX0!;xCI>&tDd?mRJ^HMbUCfIp%vKJIl14FV;AX(?w}w zoi(s6LYsbDL@Z0&BAlh%7I7$|Zq(}Y{vogT)(f~fWi{`YZjYFp?yamm*IL!F3GTCz z=bjq)Vcr!z{p)TSSwAd_n~Oh9y*}sfLbrQ$i`?pS3w%%hj%bqfq|5z=OTu8(Gu$u@ZG-8S4Wp^*1BF46fvl% zRY3RNhdCn(ocDqJqW%Jd5B<%U=pTHU%Y@!+-*fbad+jw@?;7IeHHKm`Z zJu)oy51pCbH|Y-V*Vq27O0TyJ$X#`@Dd&(1_H?>)BXE?%RdpX&>HVv!x=Z0C!(`T- zF`@kDbq5F8dPx1L>vji&-$N4)Of5CNpSLVy*@OARW<(yDvbn{%1fD_rZFQ4MXO4^I zo$L8m*TNAE(>_PsT4|(cYSAYGi`KBf9%yvkdTxEL{c7{zDcu9_WRHL8(e=Z=0MBHz zA6m{@SggzBE?}wfsu^WaIpHClsFoK<;U&hBGm5m6bi!AY^ROlgmTIs&LQj=`cZ3@| zyCYQFRC{;C<83{MbQ$wuG#CxjmxB=npsZU5oek{d&KX!Q#0OisrzmwsXep&e}+kB{0(Q{Vr`uq=?@ zgY5sJlwS&Gks`7oSpMg*EQoBQH3@$ai)>OHEK#`_-Xxa<%jM8=Ai4Cp{rZ6rOy zjbb-FQI1K63V#F48J;Fk8%OfJ*Xndojz;Y*3Kjty=O6%l$P~ekk}44o)%* zC~=q>GI86N<|S^&dB0{Au30oGQ~y{-!Oy9mH^1>8*0*KLugwqeUcUSgw&TdG6!C)p zn_Zp#lrxJHb?0gacls6oa8=8ecfVZC_k;4~(fyO*a-S2-ulJ|&{bdO5FJp@dgSD%A zGed?r8ecKuV5!3UuuQ(MZ07sQ7NM{7?MibWMs29c1=tkT#`qp}O@=+{ne6OQSFH9P z^_&l-J>Tu06>1V}nc(&P-m@oedU?l+Pt=74-M2b9!4@yhp{lZv_obIzqK`mtrHryO z`^SG@c38sJdyZGfJ*fjC%Qp_)bYa%*;E=PfzK<_b1-}ksSf$>8rR%V2uTr0* z|8%x~mt~(u9^Uo3?%#=*7M?iwc%DI7Ur=9h&aj=Je#Mk(CG~w*F{>}pRiKzt4fTb0 zVKr2JxD31T`sMoPeHR%l)@k{&sG*g0$4^-gcFq}aHLoT^2pq!&nz?4Ri}8vPWHR2YZlziY2rBWSFY@+y>ExhYt2E_Dzb$DKOa7 zmxRGkoMJCdiI$2%cfZl}x=Z0Cs8q~WshI2o6VxjG$^EyKw=g94(uOqD2j(Q0ew3vT zOrZaaNWxPn^_Rk1q(~_gCl4X}ifpt;ePC7lz{pM*++Yye0<4h1%==YdsUy|3@xRcx zhZ_v0y93{l24k)i(?&SdxF>bvo~XZo{Il?v>%5il(m%Qklm0I9UznZ1&-P%j5a{d| z7z{D9yqFnupS_k|cPS_UF|&M_84w^8X7q$1jp{ZNF|$v~v-x>O5$U=k7il}JbbN!6HwY3#U@ixV>=PUbJis23x347z`n>ybu_4UldKRyA)1> z5LiA443=6#_J2{9Em21QqZwVassj2)UL-t)QhzDD1tBnt2wc$!hHs?6t12+>y=*#R zFmoydt7zYtn4VkRFY}><`;X1J*?7 z>g~40?3MMfsf)Lso^#4$tL4%3`6=FjDh%t%phu{iZ`L?7ED>~s(>)bZZ`t6Gl|jceDv$K2Y=p|2-Q_YX^{ zpZDa`!Svi2eJuxe#(M4_8VG)|yQ-VYx zgWpYh4IAeklUH&nY{Qoo!Xpu zEA~O_l-OSvc5i*W&C4%;Oxm%h`wxUe;4--213mQO31jUb!a(NIRD-MdgcJ#b;`UzH zHw?G;s;yhU+I#s*!xzyFI$>JSGK^U&;Vr|;^c!^Ub%=9o;pV+SYuH%sST6UcYRsLJ zW9DwpjIkMT`&HrY`D-{vU7QBgI=E14X=+Ndr<`l!xRD+7J&nvFQ&cQ?&2+8}^$#y` z*=^aU%0+PN>Tb~4-z&FhMzfgVjUyH?+qHGFcTJio-J zbWh5PmfG6IJG|X2-X%OQ`8vk1{B6NLif-XA7bf}b)XtqbVdR7Sv%zhWJf410^*h(e z4R?{{j?=e_xd`O;XtMrG3BQP`L~?s|nM#ypVPbp33(`fX4joAMKcc%R%YyD3J(+cv z5Sq|okEO$&D9b`%+Dz`hM=bj;X86rZ9l8F={%cb5HO%^`wz?eNf2`#e9c{`7fMsP_ zkb4UduHfQaOl}E2fmoJxmIVWC(Mp@K5X-X0vZCj4$n87H{$CWs=u^hObRLJ<#9a@`7>1y?dYkWLF5rUhe+^^ST|x{) z&?^_{g$TCi2Vp3&2qxh~hzhte7I48Y_D!OfTY^s@=w%&x!5_3zl zVL|uU`pmjZh)4)w<$|zasACrhL!o%|$K#P`%>(3)GozPVf=?iXWgTH5KcC1Z41^F? zUUgg0{ZxNu-6c2&LRi)imTVW%hX!1TVW&%wsb*BH`In|m=U-_T!A%9@f@h(LL^y$t zD!6i};0kn4sK>0kglLBFmv#Kr#~g46Id`5g6vAH?@mC;sk}1915_|&TFRS<~$nEE| z2m>Mfl?VO`62`@T%(_c(4urp~;V)TI6qqWR-2aYTggqA#)g1;kQzwuvf>>5gEDO4) zReJxBN_PpN39+nPSQZR*X({0#h-F!3S&%!ulwNKLK7m-4b(RGKUD-q!2qkLv618Z3 z5OlvQ^oLSHIqm=f(~^nxWL>ue6l3l$39^%?|lo z_-4JaUD(R~+4uYYGE%gix@%g2ScTJ^$Q;sZ*tvaMPO#P9wE_L6FwT<%ljR3uI&-4r`ud$C)fM!YwY8<_;GT=X2<+l&;5@D%)36ZwzC#?_QM3Y zJLk;i4{261RTlaF5v8eUdaDRiL333#OeMZfp~jesa`rE7+r2MwXXd|ccI5B8i^Ydl zgnqrhy{N6r9(bTF``R#=KNd{K3WbM>f^3q1DTSvAXN2&`N7(X_8GIi};fFIvJe=Kt zm%{|t9cEH4R3OaMgoC9BH!AS>p==pHlr0yAvc9b)Zd4HX#@km|p~@C~r9u|NN`)kL zRx0qSy;5P@(AJ?NM@$LLP?>Pf(0qL0bhU2<$FB6^z~uO#14DwrH}+H&{O>Y)!6o^H zqs)TS=8gZp;4sAT>f@u~`9b9djar5J>Q6fr7;?)$ZFESgbL5fG;zpYT61sUN_`7+&^(NR*<&9fA z2oqVzuvCHY3n}sj#dN>;Rt3R>2#(LNt(c`UtX0^=uvXzKJ8KmprMa)AT(0}bSf|I= ze!(GGXMVoPJDQa7Yint{(hc){jP7FF?>j^BmQ(R^i!g6MFuK!_S$IkQ0fiGu37bNA z@wEyyMtHa=j~`Ur^?bvxLG`WQ&HTQs)Yz`oq4Q0je9G7M+Go16z1i(qAv>SHTdRNm zPTuG@*~X3f*obecdb}muD|i0UuQMhr{Sxrl=IOm@eO`Bn|Jjw>4ZHPX(nm2Dfee3_ z+wnHKui?^}~DLUsH=!v7iJMbCX8{DV`>!b^%xh-u}+v=E#g zjUh~hnASv9#=5SoObdcPzCkazB)>pR%RbYBiE?KWCPGXrH>L&QpA<3+FUda;)3U_0 zq9=36@Xt07mJ&-FlB|TH82eESTHf=?{TJxiAx-i9k5qI)_~$1G-=Gv;l7Arjl@I+w z7<)?hbgR24SJsLy2>*tB z|CI9BA;~`w{jx;A2Z#Ij@`i6Fy&^+J~N9aph% zL8=BG`S+3FmV#mOZ|bC_P&~$34+Mjcl#%-n5d^ai z!65igvj0;|a7liFAeem!hJ)en-elfD1zmX*bOm{%Jds&=N&bN#m?a1ntuLxz>Kt!o zOQCaOM7r+CMcNK4IV;j8TDH`$W*57t@T!=)LM|yhVp@4IEeNmV#w@&~*o2r?K1>Uy zQsEG$5<@IWfeAG!NWN_%o7?chE(oql_J4{AF3B$t)3VRBU?O$${WDb9l}n2*&a@!B z#%dxMQ3@}~KM>Qh#Iy&9;aNP2aWHjFvj5YJKr6@ou3ldJC*QqDOF2xMFfq{HC(z5w ze`-(wHp)Q_vb&^dW#N7Kllz@p^y<~(lIfWi!U5Wry`Hr0)N_wgi~3$`Tt;PKcd|M6 zv@ALJP^KdNTi3IyHI!-A%uF>Nf9txAydtAV<8&(`qFKEa#-@XUT3DI& z_I(rw;SiP1*QXMRW*|qIsZ>- zCNjf|o5`qJW)ikF)Ic-YK3{)5h|8_x%~c*?QYYM*qk?(l&7Y9jsdi*$S&trb6LM^h z9tk$j{QaVfX?jB#TfTbExihFP>KqD~dhOvv7^74jImf@@9DaP~I1d{a46s3X46E~# zFp~*?<3gqQ#pD?J9PGSs+d?Y7ZQ;(9l%kXr?&TD|DQYi;iLO`pr9rDd;RW&uG)m7km2xD^z-`a`K^(FXry4>0oZy5DcbV>HZeLfuk$QT1F-o_Dt>_H1LnPo=+A52;`d;ZFFvg?y`i^b{s9Ujpr$9MY0yt6sJ z8$Aww?sq?ay1VC@J=wO0IpeNP@dNt=R!tT4$@kC9h(fNE9??{Cst|TERLbbWPKE|m zoPxy-Br$bj7J4Xd%B!Qc&vsniMP%+ckb2Hk1*M~s`))p3O9pe;?>#Nxw!W6?}OVlQ{ z`D|rU^lpZ4o@rFuo1>d*#?;I4QN9bBkN7y$5H#OkRde0>gilav&g2)sx{@f0LUZAY zg_@^%%D2_qMqHW|Gu?j13(xcIrf+xu0C)$I7+$8XKk zK47zH^qP;|6mP5AcUqu%G(}BW!KrNu#x>1x+iah(V&eRujRB)y&T=?;y`BBR$3dK@ zN^2%#L3+N+z)GSJ1?jwD3(`tdtjm-u5JX7{QKG6VV(NMTw8DKzNOcJ(j6Z{}pn79+ z{~slU>COR;c7c60@`4O-Hx&6J`My2=4v!9GpX z3HwlD6jOpm=vobQ1B7`7YJQhLRZm4H>C4sWc^PpPcT^sMJ`q%mnh^aVd}Mg!7tIp&Z=<l zP^w@hRfyJS!5i8fCcFVbt~~0qpt)&NX3d%W0zocIkW1!?rf`o3k<087VI{H1h4O@A zgT*%|+FMbLD@jufA(6Eye-d21eMdrV$_gQq;;0%fWi^~^DG;b`evnyprbvX4m3?Hz zLyQJ_uRaMpl95oo_O0&g2&e zSy@6>vhhQZ&bva3l<;z!s@VoviEOs{7aBisE5YDiHv`g2s5;AfbylFdb*24xRH`%i z20~r=pe{H^5AyvJgu3jaE~wp;JpYNQ&EyaWb>)D%aPYGkNTv#^gw9kIK`jU}wjc-+ zh3$D}&6)fHp)O0POWs=utkh>5vz3r%kf>YDzH-qTEzD8&y+>+Jg-}eIqZ)0sYP6tw ze+OpOnc@)wSNVV|m}$T#!c1Z)#pD>Mtw5%>0=z~GY7ZPsuQroIAaIofxPpD`EA9V4 z;L7sY7mh@r`QRYJCn)RUOn!mDl_hXJ#2?N}h4J8~*bsa?*s%#=rFK*hMP()8ttAH*QZ700 z{h`vp^WBV)E$82sSr*>cj_O$3ciN5Q_x^eNpKg4)qEKHuTCKQ~wtG=r+R5z?Z{5Zy zmKI+W3NX=qW%aNq&|TC)UR<`Os)PP)tvVQ3?NtX_=U?Y}H!P|fvNvtvpnZ2P z1g+ouc!5eUm=_-4Hw2cSa&}jh+r^b$ZbqK~=9N)y@l^*kM{a!A!S$WfRl^PihUCut zyz+w6YF?)!^ZT#={#JX$ylvYW95xTOn%Tp;v-!&ohBxGl<%afT;e`k zQ;hj^h~w#LOR?-=Q?-{Jtg7?DD&ft-EAAhLHys|isHa|+M~Ru&lR3^>Som$H_aOVh zsHTA zn0052OX#4-+Cfj0V<9kk#uA1?92@5$ThoDz91C)f0eBKLsfo$W=o5%z<-oCEATP52 znG&cN6E)&k{?aa4ITmCeYf1P8rR8*#|Fl5 zgz!}k_=?9PaFChg{5Oh6^apV2)hsgLt3d96EW$6u)umvh*nk@^l1w|4k`7qi-PYWs7^sc65QA7A`{W zQStU3o9*bLKGN4ur8X zV=TzMYz5&L2xIB1<#KCR6V20LpcQ6>fhc*3(I-&(wp!&|kbUJIX4x6@1;SXiF!nG0 zcpv_px*=?OfhptFg9$sCQ1KLLC;R@+US7mNC2X{i^ql&Z^;G%9L-~rCq$TY9G286yn=H_Vdikvm&o}zO%V| ztkl{w`pNm;2AAuj9roNmRP>#Bx(Fr!958xyl1f>V!M<$!q_Jfcvop z3wAwry_@d$D&NjC>dez%-S2*CFxXcX>kE&?aQv%UDC#}4g~$LVY9UxqSPctRdi1_# zS%_*2n}2dZHa(e?J!-fL=$#Thlm~i?dISs#tR?Tikw?(K zrY=8^n85dl1b)owkH@Ta<%CDHpx=xj>J5!JSb}gVLK=T5LOOpb!X;uU!aDk;2$ek{ zopVq5jylAu&x-Dznhv&Sm(EjERP(p8urTv}rnD}Nd)KzvE(5kUBJ{2HMg;erMR^IY z7TWEKKBD@3P^(h61D5gMqZYu1mVP@O(!dcmSCu*1mtJPNv@lp^nQ2nOe_v)CX~*i8 z2J3GtDA`|dRCD3gx{(F5Zk*!HJEwo_>W{?b9m4{w=KR$s&+)ZsWKjI|E^Mr0XsDCFVU$iqMo_4~qI9O#@+iS5 z{@WK=q0(CwSPbIJuo$E#TW(-iO*aTxq@SBV@w7h__sC*A~ckQUrA7>pKp8Ljs&dozXV{9IY?*)0>cCk-U^VtqXGtaK`yW+4R z_b<;yo(-+SbrrF7r^mJd^N8XRr0dO^M3kaT*L2Q+@F-RqJQCF#+RC@gMS4qdVE8jQ z3VLsS$E-J9_(2CWmJVp5I0_MH`)a~M2uFXV^;TG68mDHkNssXjLm6=tWZpr}KZ?mr z=L85x*~U>Y%r5f#rvxOr42*D8W*h~zcmE`!4W-(2u7Gfq4ICv8%|=*%4AS+Ut%P|{ zloC${ayvAW+-g+u-ta!M{}r7R=*xQloLdEY@83%}0;S$`aSCxO>)a{`RR;$U77`0p zbU7Q9QKc`V;xfbh(4km*ndzJWaVwkLibp6&)knzhpOU2Ld;oE)Ox!9^`zSg8OQ|-U zD{+6w=zTeeS#P@FgJ6^m7%eX^ zKLGaNoZycuT!G=J#+WM%N2v`CBh1u^sBnyMa`p0Zb+z{i^dil~|DOXK#|Dm{ILU8< zaJCcvn}_-TO&ph_THSTd%3FazsUg6FZ(8JMd_RQj@>ZQqodVzSTB6+yNTMwx}b=~e@@Ox;&fvKgY_w$xzEPF72*o?>{Q#Q9a zm%uYCADlymLMO>RLFWVcO@2TPuweO)dIFV9^>D*aMBOThFb}wO?%>Jf(Z! zo$T>1J-U9_7vPy}_CpJX7h_!}cL7U<6V`76)`vUoYmEs@QCgo&LDbd^gWHtB2!c>c+GUK4fPK%aoKIz$GjgWTIrhaxEbCp53N>GZIj_pR@-U) z2`B9b?T-Al%qQj9{5+$Gbls7Qv>jG*R-{d|Y^gtS?yYm$nwkqVkEW<8D>$`H!MLV9 zemvgRb4ZslA4cvwantzH*tM#IAI&VyZoM(WRS`3ab2%>JeFFV6w=nB3$@@Tm*J|l6 zd^5FX>Thw8*R^>&O87!Xruy{#mfpy1TCp6~7e?Q6FsA^+%K5DVm`BIIgKYIln z<8>btT6!^biN(BKmc|(=8Y`WQuzd=e*x9scXA;97-+x)2`#X6~{@Ll-0r9`h9OKU) zoea}mc5YLbl$ZZLg*yzVIfOPN%oLv`&F0U)E4vPuG}T8v&?g|U{mkjp1K~FqR_iSM z`;GslwH^MI;(s-o4F7iEf3+9_{|@7SwT2ZSCV%07S*gLl-uPd3J27lvDE^llJeM>g z9{+2CIsBW5{}r+d!)E8;f5kvI@1~~sUuT^#EItVT>;6Oxd(a&I;wZI*e~I{C9aS+- zr!M$k)7QYim-t_a#uz851N@~>?*^t&AOEi?gl@nT=FY(XD!G6ud?m$Jb%Ou6ZP{I9aEnDWmN@Rw@sftYF?ZTL%V%p&-A5&lxoXbAtN<9}V> zgw+Zd2!Clc8yws@8a{K*E`IdEpDZ_moqiR+5gyaqP$3!l_4mP35xGqO4k(h|&}QBy zpp<`E5NyuO3_qiAM)Bd)I(@^<+N-oO^0;&{f}>=mucz0bqrbwSv2l}k)%m-*%Dv^6 zmOI2XYpZFdskyQ|W!yREzCJBuX? zTP}L}>>lQ8rPSxmz=I*TI7_V{*eKXrTUdAtqfHAbq78iQqi?T_3bqU~*zv>QskO(Y zYk9*mIu1V*)Ihu8*M^sO4!J(d%4zP0BL%55BXxVEPMEuVe~Ng1n75>T{FnxA@rUj@ z`@L$k!ZWI8@~gIOHCB(qG_t(oz&_+t69W7EZAC;UO8b!6p^AOrxLytWRN5s_qwGVK z4RVTO0z<6c&ih=FaU`I8;*sZLc9h(jc5m?>qq|tM#ZEKC+yPqkZ{mdJt>ike!vIRT zkvs5{ue{u$qUY%xKU@vK!_{p&!agnO_Xt#R1w9USP6)Ht_=^Ox`HKXyh(!Wh=obl8 za)TM+XE_qhEr zB=h^PDBkd`$ts`9x6iVg*{|sQw%{$iM{gf}3>um~_J{4H0c#?3^>*80_R4zL)Wusz z<b2>KZ;gHA6Y#}Z(wKaaSNY2RO#Rz`Te&cj*5=`iKF5_Ne4`e z1cDW2JP+vf$$mwn@KN1TMnyb|nA&A&Gus9Y73-~OH|}W)rl1cnUaOH+#9oV$ot@Ms z)>WL+e_a1o59;WuG*D5w9L62gt}y-nxvAgBTTZ*!^sQ2T`@;d+|-9}RYy zsm#*jrRt>3^q+!t(pT6W*uu5P8?2xH?uN}dEe-VaI-amPxz5#YfufH0{$HvGuZ}cP zeHSzQ=B18Y|78C)Dft>^eNF#>- zXY2R)l=l3ncdXsr`Kq;-e6cP%Y5EF38@xT~q$8FNAl%#so)o47D!FNWK?*vekek>f zP_yLLd%E^{*`!x{pN+B8{^DY4Z92}teWm%!Zom9yef#W|G~~tFWZs^l5y9@SrgYo( z$UCvygOG0GTQxk+{N9z6@u5NRxt@_Zt7EqpHwgL^S-7C(6^K&{vL_mYeVqC9`WK`F zYpgGx4j#G@_Gw;`4k$dqzQ2o?m;cnD01S?5Dje)DK`z*G1c$-Bl z&dNMIaa<#dO{hHdKIJcq#*XUU8}AG`rl`2k$ReNnM#IJU!Nk%p>!uc& zABhir7dq<2#qWN*JwCs%T;q&wA2~1`3R3uL)+*2m6!eIkf1sqGe-dRV4h8>X&?(CJ zPyx!(C%$4Nw`>(3nHeAC&V|FjFsP#=FKvPC-(h@+K2J>v`yj@buJD;7E8~OA&(_h) zOv^Qh@mXekFibxA{UgR_nejpH=Vbk#Qf^vbK#b1@T2mNaV=u73{%iqZl2Mmoi8@|1vW)# zGQKx&K;^x8;bxmi)}Qi+SwB@q)?ZeO&-yJt4k#);-gx%Azny}-x}Un=&dfq;?zrB{6tq_!mBXR+CRjp!oGxk#NrffMFQ3SWvcyy8@xMAFEgzRpme};I)LxL zi2VL3L6z1Aq__&EC`J=YglkqY`p?wC5${)Rjh5K@CK`3X9ZJdl7bw90nE(&Loh<+0c@$*+M%F*YWTtfi#QQAr zJ|3SS>;53We@XyFy#Hrw`=7W2 zh1}?hfO%R*lx4v_isbuuh-Jl(4F5vkdPN@5P$J*ILoCZ;Pm3T5>%!!R#X2QZ>0+7Wx18m{qZ1afP@r2>=Oh?@STp-Q|02=>w3M%YJ;nrK}C zHC9O7SRs&ETajL7S{FctR+bB`@cq{&zkdX$EW;_tUAGqD2b6Nt`U0vcu~AbJg;NDg zL+1!#9|Wf|?P!w%PG4%MuY|64jrx|t-7M|83NW=|wRWXb0{lkEO_YWp9Yy4nFr-?i zgi5uh8g)uY`KDc4Q~S%ic#k(F=iS2+OHc39jw&A-6}Wm>V1nx|!zUAdJ;+;QAKB!{ zSG&mJ>FyoFtduEc@jHA{j#n>#kmGxzJor?6Y~Y0#&M(^D7^?w`yDSHe+Xok?qUHfV z(Y=k#^WMhR@p7MT^p6wD%{)H*xsiDO8M`0uVGU;z<}s#U6(Zs}z_jtg^EH2FYv>Nv1w5{XN27F*sRgv}X(~C?Ko?ywdj3QHqrvJXk_{*vHm+!nd@#KtgBYht~XbCfJvQj zXO0Tyk+;7?&ui0uUv`c^GiQ{adgt=dVF#Yf=$v~>V{c=uj@u+yYQ~w$w@ane(Reyx zm)2ATK=N28;s8w~rw)qNRiab4bf(toD8{L&Dn8JeEgu+AO&-tHfQI&+T7# zje7N^ZBza8lZxJca0VY3xz!QsMpg0v1ezv~nYE_z11Zuph1NuyaLv*h-oH4O{wsNW zjCs%td+Xxd*zUZw*&#nn^Wqz6do8{`boA_!DBgOdi*I+?n@65+p+y^`??qJkDh(@;N$N}UHm4ExI218%a;pbhfBX?pM_u^5@(x3tyKxrFqlJ{ z2i00-Svf2P&OR34ESwoM>UYA^uA!C?Y|UE>z?L)#UjA0(|ArB4X`&MI#>VqFB|?PPK9Gc^TvdE5U2K5SYaBcX0S<5vB6@x&GRyIDu`@M zuGkh6ndbXPoXR$*f?ZlRBtCw`sr~C^hNnvRB9F|R3R<@UJQ7^PQEE-&2Z&Qy;Z(9- z(H!fCbE-*m!aQOjiUx>Ky@IJ;0j&TTzt7@Ksy-L~Rg_&D2{{#x1saB$`2V#{vXEFi zG!zgkS?9ow0}KVf5&DjpacKMo<&Qs~Kgb%a;BaS3_P?SMIo(~wzreNvw=w%l?=~o= zecKxVn{AaEjf|+Pp|s02oRi`v6Wat zQiBy}-NB1lYZ^a5Y|9GUil%90Orzs(WFEs^t;X-xkMnS&Ip)c!L=IH^O{^}9dl8t) z67IQ$Cqz^#|C6P1k-Drv?Jo70)vkWbLUq}yPc2Ap?+`tYgEMp`_y33;`Dju%I*&7* z$H50g&($DuH?saGCNjaqf@do*Fzn#KBY?F2E@kd|*Hxv(_|zfGCy~imljOgExd5gLwd) z28NU7p@O2y=5Z#U%Nr`WAzV_Lt}lEuuSPThmTHu@{^UJ%R>+S#zE^K2CmS?1h<@

s#LJ)Q&6uu;-Xk@Aw^^`uMq=+MJu^IDT}L zKBG%uB*=5YI}QGuriGPysyZt=2J4 z!MMJf?IBDtJXW@DSv-z`+Jg@tQFDwWjd{De?fC>iQ>B zT{zD|GqZ~mX&x$`tz6rv#It{3gs2AbEF}~rpPIHml2;tuv}w2FIW|4M=a2asmpbj< ztzsB)yf?{tHPGoHv2Izas;uR9qAJ@8s;+#Vg=2<>y){m@!*zvZYjs16Yz2oB_>I(L zi;P&AY$X{H?pj5UfZ#H2mHPjwT&8kFtE4tM0H?1$X-s&7 z7&#{EVaZE*4EHD0%mB=OFW>MEzs) z)x48J-yi1nQ?ms*W3ErEM4IjO9_KKGR zAGbYsx2Spcg`$~X^8Buh+nCUQUzc<3mu{&Ip3rr&_Y1KI1%2aV$oId+@&JuxP=xx! z5egvqYDL2VfwuNWLbQdmhsN;wc*^$rA%tA(WJ<^dbHZ;BLMR+G8g&t4-rT z2y*2Cxo`*>uSVEMEVv=arJM+p6>>pkZ*u>;n94LBfKtdGP9b8D3wH5UA-;Y}@S^bm z1i6c3gj~>i0=fT>QfnGNK#ad;4Y{=Rfhcsm8vEPcjbY*a2%Od zN_YbbbJDdd$cnq*1=B~{$yYThg(9LuaVjUOP~ zWdnD~ihB!aSAb6R0P_7iN>4!6vA2>>#T_s?iK3=U;;t7@h`Vq`(U|-i2j8H4A^7&% zDh9p+6T@#1__7ba0=0uB^#9N}6arti!52T@L%<6uA-n;B@1F->fy#5-=vAii00h2l zf-fGO;6M^uMA(Hg-6Qa|x1#Fk36TYS1zOJ=#jG`rA0Y5$1ANK09^5)G7C8SKvvrUs zRL>ItW5cA}o(J1ZD?;LFL8JL7rurR4nwrPeflfDo7s1m4Z>wmuJY9WAgyI5{mVBRs)G(%cP9luw)F7ZiYva!`Xr zeO~w)WopXnl$1MH_^(l3CSIdlD|w`R=hn?5MrC1lu5#|F-_e36DYLMX33{8tr`9b~ zQK|FHpUWMYk)qyWaq8iSKDMEHBTaQoJEkr&VewmW@ML%Mi`RWJ4fjm?^u@_0 z1FO@-WRLQlRFzBIXKRWvpAK<6J?)q}F$+BuH|5n)+h@Exe~_wCzS@O4>LzbeQr0X> zU%O$iwT}L3qfXMi6dJsI`%sO!UWFws)&&f8pOVDmseirNN&7~SmCeHmBW`qP68XzM zSjYQVziwt*=NYX_HrL)?JK9%tf%+uBb1&}g)_>C_^?a72`KzSD_NOKs^GrVn?eH4& zeLDa*9~P`$Ao_W!YQH1`2f>(I1p>oaMkD+MZqgMWg)rPFm=cCzfE0csOiYwd6VRM| z(1fDDOntw22n4lPahcVo34sWP2p&mt&FRQ|wDyuXvt&+x=GV4-YNd!TMSv#UBE z0Qj5e;S>CA6S@DDQtrxBP}$$m;gf~KCm3iVKOiRXs{{RUpV`ugNLMYU1x@$!a_sNw z<>l&X?-S@n7YrM7umoXcAdO!cNat4uE)kW1bsk%GHY_e|tYc`XlfPk<4ox6T=iF1i zqYhPp&x-Dznhv&Sm(EjERP(p8urTv}rnD}Nd)KzvE(6V@y32GobNjmLjnvcAE01V- z=4eB&f|D`gUlljoIPHAd`>wpZ~4{B2fB0|t{qu6vgpeEZO`HoM|bNsepy?J%?}z+UU$iT zzV2~aOPAd9L!QpK;5GflVT%b`13O~_Z6`Q_Wh!2SuT&q~PQHI&BE?%1M14$OG7?a% zBdG8Mh59Q>XKJmEx{;k_+Rhhy`vNOedaH8sp^+~}?+88Fas<0-Izot&<*$+l&-dle zxO4M{aY3m;Y3)`Mauy%92O^lidgw+_yJ8DaCAD{u`%fsJF7;@#+*$MVBTW?HjdjF6?4=6?s`sxi4sY;xJKJHy@ zP@H0XB~31I3RpS{lCw=K;bJps8z!g#dBrv%Aj9cLW1||L_W!aJqVj+-l&B1!0{jM{ zvgC_{KZ432_ntk>a??ddgv#qi&}q4KXZ=~^meMP<-=|519K>6`$eGV7=e zmN__punZ-<(H$5OD$9(@p!T8D%xcs50zze0P+7Ee0$uh;TnYQ2eDH_z0f4Uy;HkPC zdW)G+IVM<$%5Wmm_^T5R&9Re&&^&NBB{aiF0lz`eEGK9NxsN?ymYXg{B50NeG{dPT zZZ=^c6eRypkQA%ifFqp9qt}_v2@o{14$a^Jr$PzKAZYe*qiG4Cx@eIZnnCT;j0pT+%_)In{wS-|l7&lJM zV73qPhU$6)z;OR+zdQG5+B2x|`RI8eKEugLBjErJ(!38skap-!3DWSPz;B4KNj3FJ z_o1Lh_gMdA=0j0f96tAYHK$IT7YwT4HuMLuJ<`?9+;hk4XDC z4fpc$pIWgK1FVxMoP4>=pM2r*CtuQt$(I<$ofw978XJ18altoYT;bePy;BQjUmO@V zVtBB%5o5V@8!<|i>c4kOI@i6&yp!ke75``zX{S0qtKhvuaKdYspZN}Ui+K@yTw>kg zT3R1{aQoD&kOJRLy_~kx_2w!MFsT#n%u&HS^7hAnSnhFSfo1%m9Si(kjk$H~%EzZk z%eSd198$qnSot0V@Sf7@&-OVpgLDEkbX0&eoTfB*CHUuXqn{9|hjbJ}YW(a4lfCd8 zgw%4VO$+4y`xLX>q=(w~N27x*@2!C|5840L66=rq!!6YRwwqy8gz83Hw$VX>&bQAK zj)0I_{OIy;j1CGy)LnA_H9~6kkXoSjy-S22P^wLr5D-$cg47kemW4mo#IIALkr=!?VkquoNdc4{MlYtfIV;x|ILMjubB7|z17 z#S4`hjNsO_w&{@2x!NS8Lnv2-wCEHqIdpG>)mN@zdJ$ z8dHD$_A+Zs?&7FX8sj)^=DBlLgXr$(^ETzpdzIBCazf};B2&vrMbeJ!5um)Si(x6kuF zo==s3`rnAkZxtaxKn{4ekFZKB=1m-xzkb+8BMBfVHbW45IBKh{ ziZah=gKyz@juk4tRfa7btr)g&v}4Nw`d8BdLKd~15~6=P^Q+aVH>>T2?@dzedwFQ^ z!-I|2Ovcv7I`#pjw^da-pZxwQm8SLqxTmRx(!>sq8l^PefqQyHrpD~H`rV4sn_f@! zd7E)E=+}mQ>uc%v3V!{w!I<4IN?uR+w%yRTz4ig#jSee%{!$U&z~T03;JFpw-ki(y zdFS7~!MLIZ8=T%H8og}=`)GD_^@i@tDzOPV*q<}Re`>p+*z{*(69D&64a>bay8h`6 z9MV>%NsPXJT!_(dzS4NK3;+M>C9(h6S%(s+;WL8YAfR@<+^3tYOYS6`#=vDoff}?f z7{jbJwc8?~mIF}3ap(0B48OA@`t)L%bBYnIU(zWmZ*gjFbk51}>HWsS_W zYEb$c`TbKWP3;2+tyw^8(R&1#DfmIge`K4?@KmbqG~8?^r2lv!))5Q`vN4v-DjDJT1X5oEIg*~(Zg zjuGJjVj5F_{RpyIhHNFQmcm)WDwKGEAp4I%HYlw`e*ct8Q~LmdY!)C}^cF`2tEEiF ze@fdRFQ|?e05U88zo}o7kX^zRLN=VuG=81{+^(h8Oo-bf(kXEpJ~H?X!fkorHfXKB zidk#wV2f~D4!8~0sbxx7M+^n2LoMpRmZ|?5G}gr5WA|2a9oS)jn8ws!Kf-O6aT^Xn z+ARsIAkd(!=2ZtYpfN|ZZVKOjZSwo4RGQic5N@-8+Z7wClVJX)6*ds3rMhJNZ!LK< z2=N>!Z1PGH4&hwq58-6;hj6YDLpVDmt_Bgl*P6k(r*=mZCUI6W>;{>_)^3oMQtk$s zTk6{8e>1t>uyopUmt&;3z)N! z`@?q9fHe`idb@2gdu2Ur>f)_W+StYKc>2W8CDSrD>u$7XRJ-!GZLe{aU`k}p)QK(t z=^a?#P|ZBXNYySvh1+GHgt!gowOVRM_@`8Fix9lsk5PiRD*rdaREP#2*&KWXt#wt% z1Si?G(b6CGmi|Ce6z$Uk>onL)SVs&B5qJj}=;+cvtMU7J|0| zR!g6Z|CHb@>IDRN!^YxjfVb!Zk(vYE)Jq_ygxuoY^j{un_gb^Gc3JPWAaqcPIQG>e2X@(6Xx@;|A=u^!F3<*2k zXQlE!Jh6#NS=OkjgN93v$AK;h+2zHxApD*>Fa-4EY^9VF&4U3%P#>8L$@-O&R>JjWN7c>2D*_{On~_6WnFqo0J$Sf5Zvx7KSH zZgU(nYw{W1=ZIS?jTB8S`b1#S8Wz|CjjqGr>-q&mr{~0&d%kFpI3V-O=r0~4-Y*XB zeVEh2a{M?jOT`*0A%noj-;#X)q_yM~B!mMrk<1{Hc97i2&N781B$HoYQe>xin{Ddra6iy{cF40e3HvYb z3KCSEzs?UfXgbeo#IURT-}yf(^x5?+`d`Ksg4lK;nY{F?I|44 zy9^fsx>qnIpu@)qzY!t{WhIy9gG3iL{>iceI;h>1$E-F@TtojZs>A89R_}5g+8xgbp?&;N zN@#}<6n=xCT|Nykp!QH>X0>TzHG+0|Ks(sSiM;+X)8=ptol_i`YoJB#=(2u@yMgjo>2vxV;ht+|EFTGO}z!gm(%eE}cekMi-o z8wSG#N@%G*g?SX*$CeWe0K;J;yauoYq(uac` z6GrWH{vMbVe$-ASqV@*LsJ&yI*N!WgYbM97mTPT@+kqT+PfZ8gvrFeGDysQgSy-5P zKT}$l#=UFXY}ZxR;&wOb;@V7W_%(;3`10go=%x=rp$~p6N(yV^_56v)o4i>Gp`3x8vD3O^ z6~GG1`BsSITcIazg-L0I74$`|;Oc7c6X+#8L*aJl$HC%+cHr^taEWh+bixknMeJax zlfPkB_6V7iSANa;>zDPJGDo)D+$s(^xI;_l;?ASecDtzxv*!C=Bty!zJ0!O z;oY0j@$SFkZ4YxiUwgx42xnPUOH6TKwuBHcaZB{AmL-IHduoa$B12W4meud}+p5Pe zz3mqscTCT_xps`>i3jJj8+LxKad!9tYkjNq?~A58C-Rmn&se*{xwSaUY<==u*RSUU z~}DPXkG@Fn0l8mM3ubk*tfUq5Qm}FsRb7o zzSGMucz&a%)Ph$$bskdRbJRQkuvgt99~~+)XsUGRcg*U#{s}sILted@u|F* z?~HCox5s{86|h*6_H z$JX)Y?Y7Zg5SeSxzWp#RyN*z9@X+n3}{)*YveJQ{m_OL58gU*ngZ8P`X9y6c=A&x@0+J&$b)ExD)D zjY56bvKzdq7gtBl%jr^DtZ9=QdNww=yX)_IdmCf^T|JM2`r&-_kMPyE!quNKm{7l= zsQUaHC?N~kaIhmn`T6ewxboiv*hRbt&`k6_0RA1+mI@Hc;JB%|;`{$kGw6SUHT}Ct zr@zhlIbFw{@v@$o&}!Mw3-fZ;x|BIBXlT6Ac;St^Q(qkkGw9WB`Li+I;xFaxPu+I; zW24$*f7sq^myx2e(#Z(hr=W?QP5ZUn^+@FR7Y0pdX?4#@zR)x8iRrMSvtEfhp#P<% zVuA>_7Qbv<-%z-@ zFP(Ew`3@cyKQn|ygNpBh`^PUQ`%3XyP~E}9oafG>yo6T^?RG^UQGGtBRjJzn%lPk6 z3xKiu?Q{UHN&45umjb6r7~$g)$9 zKd$K&|M#v|_k%UtAA4j_01Fade)PErhN;+uHaZL zwTT^xHB4=oC%mrJDKy%8mi5dzpH~?8CO*$-d%c}&*ub>BfU>B34|R9#sL>y19UGqe z#(&PuLqTI~9*Mt2^SJF|pQ7fo9g1e2UFUbjVMFd;o{KyiT7~N>V(U(i#ZQ8wl@XZ4 zo$W@L2USMqM#`Zw0u}8Gs2PRdK~7e)M-C8Hv*CKBmQN3yQfI=_MgCA|t+|Gn)`a%( zzpEXTPg&uH4;g-gD%|qus)8sYTHyxq=WQdzM-^`QSGeJVC#)%90TjpO634+0L@V52 zg>dry4^-ivXm7=x`>#~E!4eDF6aGMH2_bf(3OAb-ZZO0`lK)YKTdoyu5Pz{H;RUF| zEyoHsNWbJbAw8;a%eTS}>Mw0asE;b#ELXSzgaY`*&wczGhK_iX+_G3g|GH$2xzc=# z6$k3)SO_P8%N>=$zuLKuU+oy(Ag*>c5d~#(wk3_@rr@dq)#LgM)#C`(ZWSz~sGH%l zE!#YG4UP92oU8vxbJbZp>zS(GlP}b(Uy$+O^RCYu)7wYhh(4f|Mra3r z3prB$qG_gYWTDG9SGVrQQ-itOI^JC60VZ|AojEF)N8bF|Z&Yi4da$_ZSHr^Gjrtdq47?7F=fj8Zrj%V5(eZg7Pc35Xv_YS6+CV1`}P9E;Gfn7jzZXjLS@~UyqYk zn}40@y|K;w`(?cBwSE5{G|%zyT3*=g7Hds%JtL#MoX;1eAGXd((7!nSrb0g_Xp0i9 z<|ik7jgEBRy*qMy;L@`Awg#n7a##CBKK+VMboI#@a~srO$p7vu_2aJABh=R;)few5 z>Mgk+m(X9Vo2W+g<6e1fGv-s#(E0E5?!D;z>-rnJ)s1dvze;}eSKcPQlyB)ry{_JL zFV(hr>|$e=8t~|yx%+x3Ay>W5%sRKB>FULIe0A>WeyLp3Acu1Jo#YIxUWW`fNg zkFP(L8)(-~(>~yJdfqGlmUBY~Y)(ARJFzKt#K=(xtd3r|*!ts{oyOvtdriv!6ldM# zKJTV)a#`f?vRscGmz*x&Od=ri{2zPg9oAI#t^X53Q>rLMMX^g&6boVjbrgGL5JgB7 z?24$U*g-|?qM|7Fv3IQ44WNQz866Ai*c~ijnbEPK@>?e+`<#Rjh+odhS0)K&@0hBn`^$I6QCvaBc)Gc0#6FAAUY}ofN&)@c)+-LNl0rS&L z{2PrtWRYzD+NIIG6-{g2fAsj`yL(;tBzO%UX?-Dh>j2Y&HID6o$Yz~xR+tnpt41%uDO3we`@c8zEHml#z_U=A{E?&7Cc@^1h--YXA?v4nQxIxenu*Jij)|V z5Xs|- zx1W0PLb3d*bXz;l!EW!80&73~{%_{UkFTfN zCVvZD28E^f_`Wfq`UI)!TcxTupjBTsfv8@VQC)!)g1U06Tz2W}nSRpOGuM;XGpjPM zXR;}wGXj5oKl-)Ics05&=1lkc#nb&k*9Wh12c3ACu^}?&)tot{7Ou|zzOMhcuHm7^ z`$HqMU7x=la{v6Yfq`p1|1^zwe6+=+7AoeKf$RS2_aWd++UuFW<&FRF?e)n{JHqlr zCxqJ#ss4gsXApj}RCsx#01$rpKSX$IMRbT2qySJXs!@OfmB554ooE!`+hlRvh=itH zn+{zW8=Jn`JpB5Hdl_E;9$c{TaqfUY{rifVlwS9B+=o_%2hv8qzO#Pdp5X-$xwpp0 zCZ>ku#V@Oz_p4X)3tw(VhPzFC7PjiRAs|}L*ModO`vN7+3a0<5ouc-NiK-YSje`6N zsY)kG8s8x;FTZWF>X%A4b_O>x_FnVXwyTGqhQHfl6U_?XJ!$>DeOAXVv)39bkZD3S^A z^{;$OzWxfT9gruhV%*tK>47fY+3?*SbEWa=$AK@l)v0msP_M9K8x7~R9d>BVwv8vk zeIHh9Q_nZ$W3d;F zdGx69_dU#D9CYp*^e?zj;6!WHIO2l6q6-BRRx$2uDELqyW$DJ9jk#ORrtLo9Y2R_! zCvV#>dAV7q<}|E7Yr@Wuw8?j$1v`1zmrb4$yxh7XJ9<60V8A}sY)lCoO@8ADO6>Bdf17&8=Y^ggld$u}*rqmPGTe5&-<>mE zoK%lmeo|Bmd?>IguVwE4Dy_<_xTWc;%tS7oSe0WJdG}d5ZCbf;pS=GVxMoAPpYN== zB~3hX{k5j5Y_~tSrF{072M-ouTRE6ImOwCNnOR*<5c|i<;hDo_Bcg%O}zXby!YBa zuXCLV=l8zONLk{3_H?&vk50#*{JPTMihYaA6CQkTCrNuW@vmc|voO4 z*>9VC8Wj9{p8+c7{tv4j?|YTeanZNo=PM8R^>nky_^C&m&i$9(X=t;r&t*`5nsn}$ zU)X@T|F^G1eOpEK6=E33U(|&S1^L-gOeZdE_`Xafh)NCT54wBu_xy+3hGdP} z{NVQQ-BV5n?VsNDa`(Y0Gv6(3KHYw**NNU=KSkRWNMe!tUCX`A3R{!CZ)E$0zPI;Q zuYBUf)Yl(6*Mf!vCMyG@Kz_v{2zp>|nndJR36sc)aT+EmMiHcNXGuqkARaER0w|k| z9otM)L9(Hx3qrC$a+^%9OoOvqyU20bNq?)vW#N;C-^i;;B`k-vw}4Fu|Mw<@FrO%H zE`a#k;7+8hlt)~ae_R$E*wL9dfVix7a9O~{cfx_LJkl!OL;b1E1zcAR?5N=}d73ta z04H__sds|CxquBJxM~OiM(oKZMkuYztlb4o*JWMXUC>HH2oOJsdH)u1Ssrm&kbd8N z^4%jY%P%eq>L2J!)JI&FZ(J7iKlmrnpM~$Rk{IH$C5p>}C;3lXa$!A2Tm~k4_WIi zD`6onTf(?3h<-AKeD;XTa*oS_;HT>o!4a3`2bTraCEJMVh|BVZ%YyK!Rf+IQU=Axt zX$s~bE?YD%3)-jAbXussU%Ps35trp3mjwsTGX0Mbmz}H?Tozn7x0#Fsm5`=}q=*8o zqobsVHgH*RBBQQ)C)hy?aaoRVS@7XJtN)84y2grIny$*acA%vVTo$y?^dMsZaasOw zSy28W)Bgf-S$=U@Q2(+E`R);yM7yK?P6Sc(L z)O2NleXsUAo=936)&JwW@Or3gyl`CRj=}xQyRQ&=&K8d{u-PCi8zTA5y+Y?wO)IR6 z+#?jusN()uT*{prd2E47|MFT>zW~@vk%Ye-NDIADjgTZp)JkpR=`;h|1Q)2 z9l=>1!C8?0o-LVx5S--~oCWpoFC*$BILkLU3;I8_Ci=5L9##@VaJEFjS&;k@)Bgg& zS^mITQ2q(Lsw}HD5uD{0oCWotMiKQ9oYew23l?NE{T~Zhzz$jn&T`Uc794m69U^5@ z5No|vTImKR*v7Wg)b(&-@$H|3c|AXC2&Xis~Rjd-TD{VYgZ& zHy3T8pG%x_QQTVrp>?F1&~r>atN)8)&rOXVwo$+ab?;%D*a!3ZMQ9zVrr>}n#q>Wy zgqDAVmcFC{F6c4+zYwAQvEjf&Pj{G+6t4?`6T;hM;Z^Abd&2>*3zgh;A;t33fHF1c zPmEAnmsvXwny$-;(6(&((Prb_upGCpGhkg7riiA)D3~NIKk4VY9jS(JY0lK}0TCY& zTAmPEN2)O`y)kqCg$ONw2(2UKNvmHfi+uQq(DI4UI#Ts%{Y{wjFBa0nN@9r6mKZ`S zo9j)P_dgJ!=(j1FZFym9P+@ zEkT6Vk*Y_}Jms1b(Gj8L6rpvbJZQnK_YlDmq2(T-b)*{5s@v8esv|`2SE$>O*X2~E2;TyJlL6>3^*f++-)LV!Tp}y=N(5)b6*&6ULr@BED8(kw^09<~#P^pXeT6|9IxC zlSbPODajV`HQ39aE#1pclJ4cx4e(4W?MuzZ-|(rr{p9jd&sJ&eXBS{+XO|yc z^Uy9^(X;)3ExLN}^Tcfqq10@%_^Wy^#lrsX;Thwj_S7_jhcpE=rlt2_@-M=){KB-Le#4sN!$+8wZ8h-22U^;I zX+ir=Uy1ez)A9$?g7RI)5#LX0cIZUf~mTOFzb{|9J$Fp3n&vF&p zv6Z4sd;Cq*S0YGQF<4WAM8{$awLD_AfRhJ*C(>gS<2s5(Ne|$6N)O;Y>3d${$wYm` zYWc=$LH~Zt{wHF!qm4eFK3TThvy80w>)$7wtrL4C_HM&vC5qL8Hyb@2sI^NQhC(*IvEv-`KLU=f$+=30i-;&LwSM=qy2~rNO}I~TQYa6*|gmUJncIU`{ZrgB`-JY)SQO( zXHD1{k~aD7vtTC=`?ASXf|vVQ|1~)BM8v#H%?wtw=oVp8KS{58Ib)ygKSz0j6JzeH zcYGB97L@7ayTkz{sDTxQHH8{_m*n zAy>#D7%_7KF+yo|W<_F6S7%*|#9BcPLHw8xM0`XJc|;CD`dFs_2_lF5B8R}*&VEBa zd_)fUMh-#$xeWitLLgXq3_E$2C~^pr&tvU>ju<;KOw_rJpGY*KUqr~@Az_2YhEshy z88;J9+vHD!r%Im&KP!D2{0w;-dYM=y|AZkd}h=AxA;R2LHPx&{m+8RhmIT{Mr#i>&JMZu=cL-FNwrTU z+AlAl{n{m>wds^!y1?Ja&ji$upbA%iJ7?-QES~zJZ+DL`tjjNQ2dhBlUEg*+rL0k}7 zZ3|ey4qAvDme3yPSm`eGX6Y4+`V=^@_yKW%wcfH479xlLGjgc-WEY6Ol;Iz+lPBk$ zJVEgIv*asB#*-h$6I5T`l&Fr3CvS`=2%m6}2(Ltwu!5APG>MKKY-xq@1npP46YY`l zAmgb$#uJ=ald9eccF;n`lOx6xd|20k_@J~Zv*MPf ztFo>gXlaA-1nt+KBibY5$sgkh%Kzp=lt;#sU&a&E-*}3skBlehjHhA)6FAG=+?


vN%U7@NLWEwQ-(yxf-ovV1k8CGMzppcq9@$QQ*-lV@e@PHx@a zOT}M&uDWGNO3&|Bh1#rVN*zx9cJb)4$_s0cjMP6jYm)oai{nSSC4>i7G3r#y)RbCg z9<;T<_77NatV!V(u!9z|ot&_p;K1?Y!~xcN%Su?tc9t;P38J55_#b3DIcGaT@KYPe zSB`8aKWrzcF0m!5BiqRv+X=#_#1Y|@C=ynX(v%|6u_G<5u$`cNnkms9*-rl1PH^DN zLgE0jo!VhL!G&|>hzrPeYLD#%C(_rdcY+O6W7(H~oRjWX3*uiZMZ`y(lV6u?w|4&{R$F3bNzP4etuf1@;I}mf1W*PseGI@HzYb2%^|0sgU;&XFaZY}4PI{^b4EbMk|8%Jw+_%^_bs;+*{9ob+)P ztim6M5cLt~lYgoc9Qf9ceEvvvYKQ6s7rs9uE+EyZJ*v}`GNYLKU!@c542)DK zS5zk$A^1Xugwndq3c;GL%eoeVwL*1*_`>l-e55*gq&h)*{eOt`*rbzRsuR>VWX^xF zNhhC8I%&!T^f!7-zJ3;E!b)Ozt?_@n)~LAk0FsvqA(G?P1D9J5Q>B|eJEWUFG{A3G z`W{gpsZM^WPEg-8kf@JTrxvJAu)yqo;TEui7E+y@P@Uj_g_tgXlaA$1nn#Pk}-f(Cx28YDDRL$lt-$QU#b(-uj)PVJDL;DRGl|ES~%+0Oyu^XuK+Zo03@eg5#{o`I>q8u+FNjS@bkuS|U({aavM zw6)y^AF6|a9W^{AFX-@t%I2qYnm4Pq@c6V(@6%7_z6eV0{ViG5^Xv5OMsUKJdH+?V z6YLy}WG7c-Cm7*!olHzh>oO|{Yq~D$S`gL>*$LvibtmE@*~uf>3DVcONTf%ylV7qE z)OYVd)JL+DZ?Y5guXlmy&mv7&Nes!(5+yr9a*yUjawI$XBRfI)2FXNu-1y;o<7bAH ztKB8#YMamyc_W5@K(bQ{WG7hA_y`#Q1uS3(EhIZRAv?i=CSJq=)_TiISV(r3Fxd&B zd+jBnBiYG0*$IL-t3m`vvXdXO6I5@$oT!duCvRjY2=8q{gjZrnSV2lthD67Xw6sEY zg7!Yl`)5dY@=tbx1Fg)+*N0~zme?Zoa|KG zVulU=PAkZ?Q(%9Sjr6n>P`mV~NWmDfxKE1YxkTcZE_p4HtUo6)p6MgGD3MfM9GJMy z_DxO&D+dRw$IJRyi4qf*uUon#F_rQ=FYqtrXBK*(<$J1kU(-%Lhi6X_3XQ`(-Q8V7 zAL=hj72oiwy8fu?F6%kgE6-MG?PnKYXJ?lmUGvZ`ThX)qe=WLt@bkoN4x!X+v-qoe zFU7+C?%^5ZqxRcn^)ofam-TvMrv>Y`vc(#6ggjk$$=5a=c9+gU& zH;!BR!C`%^kPY@pf&OjEW=5raOKqt)&n3yY(h57FP0ZCF#m{qZUbdJO@lJEftV=~= zff7c-%EFprBs!LbwS#wp_+44wzvAncBl<;z3?33TXlywB`e-rs_Z40rUAHP=$?C-` z6N39lbmaSweK4PYyb~Pg&b)t#c&B#oPH>^edNK*AgbLYf0pgvU;ho@wpR;-=*kKs) zPOk7yFyhw(#0aHznU#h$U6*w&4QmDO1o3;h6Y&x6v8W=@IYb7w-i1`#2Hx z5%1(1?*#n=w-fzYxCtwXA>LV{cqd36WKSeVypun?6O<2T_y@#0`NcayeQ`PR-Q(UH zFMDs(6$o3{{_K}fxCQK>RgCQ)rR~pks}ckH_KDY-?a$aNEf@3}X!3PjWXQg$|Mtyy zj2*Ps)xqUT>F^_8BIkVj!U^694umlK|E%?vm9P-+EMdG8L=Rm^K6}JFOB(OY&;S0r zNYw3Dk!Zw_u(4x6X~7!lymK7L{#sN!_}ipC^Dag0kdY($4+$F(D(c+EPbBj5>o6cf zB%gcK{&$3wNpt2Z-p~Jc^eB`1F64vw`Tvf9GN~2+Q~#YYQhhdqKDD8JzJM|tRG0jB zShE6F;p^WgoUIdkCH8K^WhGXH*QOfMk`MlkNRCxFKUH{bsxhtnP$vIi70zE3UYlw{ zt3ND}eD_#|^IV0?EEvJ?e+4X%uY|=sgvBb{|H$^KbH=PnyYcw&od&#Oc)s+7F$B!moKVGup|B_X0XjTU$(jC)hy?FIjPL$x5~=PcBV-P+FB)aZA%xS=X>KzAstH_Bf}^BHH66 zD}F9n$@Vy>f$&tQzF)g~ZSj&7f0wLkQ=arH95t1wkC&`CzhuSiuF;P{TTo`v--!IU zyJl{YYQ9x;a7uP~&63<*Q@mDXNtw-H>VGACo0Wbw#kX}V{ql0HDnCE}Nj@Ce)}d#b z7*opP$OQPy%3$jFhV*H$KoPou;Q+spuLamrpZ_~o;J_?~|G+y}+PQNDF3e{3|MAY% zk8J$A_d4?cx3^%N^>uN6?RCzZvpZMd#GJKcMpEenI~(JjE3WQb zff4g+6C;!sZB{tebkWweaIBR(S0MiUO+$_#ak2X2mqTb5&xrrxVbgAo-Fw^2sCZ$q()61hgk8AJ6RnBkjo_?db%x zC#b({Hu>%gpoZ)o7ufGebpC4ZkLKQYdwTn(jJcCrxA#)<7oV$c8IscTyH%k!>zPuA zQ@>q2x~%fT+9MW_S6pTDRW{|xOykpK?`Y54rou=s=S%u zpOsc+R@~BbRo1lwExu__dXE!kqpi&S7o)VVsJ~-4 zQ6Fhf&S}q4(p#pRq_4KtqX0bInM35qjUEm*dK5{|GL+fwzC>^(XoQt~HHAiWEcx<9 zdfouWa{!?|0cE7z4~(Q(`124?h^(E{6@YZhukOs+$T7Y#N2;D?o&J5C%CYm z;h$B~hU{}l?6uDs_X$oMVEUh`bb_6dk^AI|`vfBnG5nj-63&Xrnl9nG7L&EYeS-Lh zSCe%axlbOsPmunIEs-9%Pky;iQ2*#MqCRq;d~=_m|8Z-gKZ`?Qg*N0qOO*Qr$xkr+ z8*-ogai5_4DTe<+?vr2c6VyK)MW!0$K6&o)NkbA~L5i?&3)n#mxlc}d?SliUO#egH zddo^!$bFVD_X)&#+86TK<8c57#{o>2Pn!D#!OspOf+P3I5BCYGpL;}9NA8n1?h}O1 z=tG28B1~98N>jo_$4aaLf~ zbRE~Vz^oMl6vThX^uI*{lt%&-q<>tVeD_Fz@=Jh%`cD#w`bdECO@M;_e=+e&_V)~ z69N<*$c-fqu-02v!a@SHgb7d({nb|@Iuf9q6QIC3zZpRU$CDVYPGTlV`<`r-_C0Ap z^Zjp`^DiVoc_TnU_-KzMN35iy3aw=FkI9NSi*2hYen6P}^ z(j|$hl;0(Rzu9#`=z*5+1-<*4cJetqdx}tK9Pa7v?i%_~e@UwNhELV?w@r6h&#_*4 zwn}S1y8t^oyZq>yhj!VDp6&l@(ba>WCvI~HrDmJOU)6gl7WQ`!&ln%I-!7}4sZmGc z%#_`Ni{ZvIuPyHO_a>p`sF}gD_2VPN;*2woGdlFBRLZ<*+{zCQ>uZHhOTBq6Nye2{*a>Z7u4<0}1t&fYRqq5lXdwa05djK5eEx^{fCT6#?>`2v*^uq$ zJ1cHU6OUYfugP;<-yZI8FZZqMofo6pwBDF@d(G{b$(N3~Pg(!mCRA+D*3mMmhro~u zd{m}KwKw~6t{X0zQ97owL7*ryDa-jtj>8wj9%hu#EGQA47?~(7MYl;y(S~#>niod2 zM*@^T0u+@0`kW|_1Sr1*D5(E^AW61z1pLXa`{> z{SC69+F?PR;mr(iKtG<08e~EFXF;9e%?xnCumW)bS!ecCI|>ag8mlOiT*4$ zg%#kC1uZcaR5sUJGX2ky1?7hYl}+_lmgHkc7L-30RJQj}b|Fz8Sx}x?P?-hRWeT@| z9kh@IEpZl9w%*z-B@VFGTUNqC7PJIeP-l1r9*P`0VPEjgL)C4atjNRypriujUuN+>8u&oA@kj)p5g3 z6)5NHK|Zi^HeIUyE~)lSXzeRKB_BPqp!~9+^obTY;K1+?$b$0Eg3{+G;6jyLG7gXh z)eZ|PbE4Wf^-i#Z7P6omu%NP4x%zkFgVL(Zid&kl%DQ%-#dkMQdXE!kqnZEkR=-*GfiA6Zb&Sy06w21ugQ-$Z_;DIbH_f{l-J2Mp@p zSJb3*Ndz%P9JCx|?!w&vRpLij+H1;>AP#!;!O3B_S|m3YZHWSYWXeTx@BIAyRLV>h z3z~IP#)8sNw7IYhO@+ES%Bj#wn^jVwFl^vANQG*T3Izx1F#Hcvq1vHB!3B4BG7eO- zh#KCfOkA4Sw@3UsTV1|SsSPR=oTzt9y%X#VjZ`RCR45qX(V7^cw5qd0w5F@Nu7zmc zJA~3H8pLlPA>tzy$|DsD(l=^Kq(>^0Un&&TZ+w)fk5nk%R4C}*qzTcVMX9iI98#er zN`->tUb~3oNQLr8g@W?UY7ym;3gwpy1@)UV_rH(|)dCd?7I@bt1E7Ef?4X5IC?`}X zH~{>Md;^ZP-m(%FQlTYGg@Wj3gw&%1;JY{B7!3o$`2I^s<&bKKO+^&8x;x= zbla)qbH~HTp(|r!(^s2^U;l6~!|UIJC43mENQHv-zF&y;NQLrGg@Oa^Cld#d3e^r3 z3NCciBQ78nsy!+coai)Py%X%9g;Xdxy<6;~TC@^vBRs}3sy?A9pa32ZfcV6IM z%Fm2`Uw|t7eSxE#y)V$XSnmt){(?Mx#}XU}xIi30K2$q=D7et48*u^oQ0;XM1t$XU zsCR;$rjZZjiVp=Nf4t`06&N{1i$5;^DF7f$$4aA4?p;s9&CWhE@+Lra(s1<{AMAfh85$~hkjf{)ls z1V=uUA3hXRAIZFbfqW=$d?*MXzLtFMN+b#^NNGx<=-8nc?=Q&HjnDy#9$kTGk4M6M z90|{o_HErG?c3Uvc3{kE;sElY+TlaNg|XF$3&@9Rj}HYW#xeJQRXV{ATF8fT#D{_p zkqyX*P+FB)aZA%xS=SDGeTk`#rZ1iwL2RC}F7!GY;a|8FEkwL^-63o|E@ae$<#_DE51VwS#o zC)l|fNl~syQ7|HQ0Wm^peP;!0P1ko_3)WicBnslsVebDSDas=$3ewMwA>TccqWqGg zp#FRzQ6EWBzDZHgf58l*KZ{&pB|0QUOOzA^$rpYgk|Qa~A1MmT$AuH+krd^Z6b1Dc zzb5JDY z6PWv7NQ&}9ih}AZyOPfwNm1TNQ4l^cg9xw0q_BdNrc8>C9gS&)6b0>9|3b7!Qj~vE z6gcTM=ZFJ%JkQth{9Gxon`a?S5-KVeI)Q;%gNac_e>szGMIW ziSF_Bk7v$0X|&ytl57!QqbR{_Whd6(RPO{kXdx-e5h)5I;a|5ulyg#4u?PZ2aa(sH zKNdlJUYAj%MQthb?RSY2N;C;85^GA6=vXA?ix%Bane7z<0cxhIlPE)q_A-={q5xEz zM~|YJ(Vbi5%xK*UDw$CjRPY=5oQB;+dp+6v**+q)?&*8U*PgAZ?z{Q;zjrD&uJrv@ zhxf+ACp~@pca0gl`Y%aJ`?u8Y8x@=MN_ijsTVPzYwcQ3Es)KkC3 zC^(Vison{8!bWD4D`pgo*q1_#P+H|#5nIz$Ue_YFR+v!`{{Ykf2ANSFnNg7b;8F73 zBQwe`GYaY-ZcNliW|VJc6!cF{Ci=4&7FMo9X0$|^QIPy7bN?HeQT~`wQ2rS6{y8$E z{4%4U{t2f44KkxzU`D}$lbgwGU%&!(&_ZUE6J`_~IL-7wV6C^TgoVs#2{WT0x`gR} zhRi7E%qR$+YDVT6WJdX6MnUy7=KdElqr5SrApDuHStQ#wV*4#>2^jDq&( zB8m3MjPlQnf&=Mahy%!sYKIvGcKUo2aRG4&K5+>}W)z&rWcoj-bb=kUkQwEO83i9M zE+HdAX;o&$ElpQtT|3ay1~UrUUp6P&BQwe$GYZOInN5^OW|UuM6x6?JNYqDWlyhcO zap?t8@fy?r2bW%H5#)P$%ax!vuQz#){@UW^u2CHp5LtswOP-UI-L6L;?ZT57uFsbsef+PB=@No z$B%SN2oJ1c)Tx%KDYeWzXe(S@oi1&LvrF2Rs0jrYJbO~O1?-@O^e89vC^+z90C9k| z-m(%F(xW9zkAmngnf^COk8)0rg5bFw$ybi_C_nTlsQ!w%|BLh}Z}cb#|Hg}a?n;~r zD@bX|sp!~YnO5jg(EjaVqCL{1{L`c0z&}j?2c$=}Lyv+B?@p0%pc0&5CoJqZ%2~%z zpsD|Dr``#6&_a5NLr(y+T-8g?5|;KPUCi4RJvGAnLrx+?41ftEJtQPBPq)BhOh zQU2&rQ2sO1{~PI1e(6zAKac5ukMt<#^e8jsyHJfNpccQT;glcC&y>#)qqO$rDf92% zjP?VD4+x1EJu*zxxs9Jl#0tTh@*_GHg83pw<0!M4Zz!`E6W~70{ug?5XMmg@1@PMZ z#uNDOSx~NWjrgIt+*qu~Rp4q+D^NV1%JB84}2mhEL0G8hdAZUZu+V+rKYgSSI|4)$SKbC;r}% zwlCvdev5&ln(SJ$`}2Y^F-Dyn9FG~44~Q;PT7SJ?ltAKtMAY8nZIajcthB{#ZhuQ_ z+47^!#=BuTZe3^i2`OQUXgVD5Pm=C=Y?JPJG=u`!!srYUA30JUIZ}|m6jT2qN6Ifp z3hI|WNj`k!NcrYSL4VWcM1L07!b*O~k(MY&3X+>85y_Dw<⪻>0ugH<|%aMZm zmfOg8j~uBMx{!heRxX8Gzz$l-k#fS3f&*oD69-u9E$d(tInom5NI`UKrvEW=q?~i4 zAh^v^@|7b;$`3~hs@s(!sv}3r8%GMlmxnza*?W~r#0o1&X-cf<*b$mmI8xBQ!gr!Q za-{roq~L&k6mbAKQtfc0;6i1l{~2GZIlQHmfd-SMC!5Fc)&soWHiR8LO@>(KU zpCU1y=_9!)kyKr_dflq{l?jPfZ*nSFIjmb1uw?b(l?iK?h}Nc4ewhM)6F-a411%#2 zz5AMW@;N+vicn}A?&m3f-}c;BV(Yt!#vy49?M%l*i=mq(8pf8WCl z#zE)4LGYN2V6K$mN`jsQvU8Mn0mKEB93yrn&CNPBr(ykB6LyBAP3ELCsmzJ?kJUTD z4q8Z*azK>IR^^UEi4RJvGAnLrx+?41ffhe)>J-r~psG7%6YUZ0;u7teDqRP5NY{bJ zbZ?q2Vxl|}rTh`4^l=ub-}OFGABj@ViBjg{cZIWD;2OIJ5&7}s=lbJks8UzxOG)w zM7ETFwiF!j?@Js&wp2T8DY(#!mH$-r$kZ^2CoWCw+arFRt#n7k!Rqm{K31Z{gyrj& zE=f$K{4NRn&8`bzPvpItJ(1g-?TIuk)}BcE@80Xo1Ki$@HQoN*eblfg6VngQtJc@W z`L)+M@9u6l-PhzkfB13Fz|>z2d{cx*37^teroNBj!CwdPj<3T0m%TD6RmU6|G zf)Ra~_m7n-16CB*v@+1OD6SQ@6vXd4nv4QuOL=5VLHeLqM0#XP`DIH%ebF$YKC-2J zv!$TFIEU!ZVqI8S5ZTfaWlKTw{=r0YWJ~#DOF{XNETTNJrTns`p#A`VqJ9Cek)5~j z)%%iv^_~VXz=DCd3%7tBw2&?3ge?UJ1~dE@YrSP9EM!Ydm@Ng-hx|@Hdt^&FXG=lw zVJ(T^$d>ZMmV)ZTlZoocmh#4yg79JPM0h2Ng%zYUrC4uI)rLvM{y?#JmPg+<8{-dG{a6Q*(+_8GSQ2uk8COD zY$+qZE1Z=@^|nQPxgJW?%pHLK3(|qKVX&1W!l&LEmGr%x}-ymfw%^iLU+N|N8ZO_SccJtMt$dzRe0U5b>c=9H<- zi5blOXO&K{GdWVGTu`R6`zJBX`KMBezzXG>mI%5Q%6X?uU7?vj%tx~r{sk#ho+wlL z)*DDayC+!+kTT_uGL_}RIai7LNSX3UnYuy;Q~3Jlbtn3>s25f!M9Q?pC{x*7KmQDo z94S+NC{x)~zmPfqLCTar%2c-Zu!!0JM#_}u4yG~-7JHMar+@|QpoNraiBqPs^>#@L zae%envJw_jrX@(3xHCH zlqoNisch?P#d0FN63@a4QkwECI(E9oJ7p>>X%el7_I9fKXO0c0yJr@oq*2*DGq{h0 z@;fi^FXd-O_lcki@11#+vpx}xi`6H>cSuWqDO1^r)+*-xW28*^r%Yw%C~GQ^ae$Pm zb|_Ps6KglAcY+*>crL-qaI)oZJ?DzoC2rmM299cb}QnacJ!H*6!? zBW21DWh&d_+*pk$kCZ8Ylqr3j1$6bM^+bK7OgYCK*GnPa69DFc4ec}_X z9Ny?ZK4<$h-o}F(J34Bt-dN%7;g%jzu5}D+RS7(EDq5iLX>Vs|Q$Jjn(q; zYksydsxnwrkfmn`XXbKc%DxBp2Wd}ng=>lXE?Glmxi3&ti1>e+cdZR*q8 zG}nFBjG8n5dU%r>=&9f8MZ0a0*95aYTh;wVuY;Gnduabrp*6QJ)Q_#6(J9ZO+tPjA z%(L8f{x)ZTSYN3_|ktxJHtPt=)ZObz(1Sm zztmHvegSgI6hLl^Y2NT}SZq&{^QL|^Rr0359m8+rH5hg^<)1eN2X@vc4j^x;9o`gN z*mZ=spptTwPki)ZguE#yT}|n94o>W0?!T#YLS7=!P9Sf}6>kbg?9C)Y0(sN$Y1vI< z?`+SjR9S!f_vH)Agdefm{UYha-#gOwWxUI8F>q9qU2ArKUN9!csFQ=^F@y2}(Pc{O zulI`*Nc@k8+IzfB@*1C&wz$phZ)q)Ceze(mH!R1k>kL03B}@@bheB+Uv=G}SEtDJ5 zqhKGyzawwTBX0`Q?@uG&J@Tgf@}{8vL8ku^@}_+Arl9{JmHZ3qm=t-_|AjZr&;Nc5 zcA^X^)2{EyG*jqK6s!!)MO{bu_Y5f08VtHoynnAY{CoP8$>&fF|M$`Vo{%!R?D7-; zo*rfL+t;7?_XLz#)YM}Ad*h@l<`!5nod9VugQPo={CkMBaF4V&0n!4hAAL(yN2G-p zq{RtdBLd-%4lXB4s=o4=SBtZ<-;}!U57ETa{Dui4RJvG9oShUX$mz zzCGOGUhZ4hJ1<7HX}vM+_L|!KS+xc+|UN)&ob{{A=1Jh(&7XG9n?=BL%#b$ zHw56U1Uf_Ply^zFOd}M2nbIyP7vu;<)wK$IwKNMPE)lIwr~EPm{>FZ$fL9+>gIDk4 z46klfJiMBh&RvSg1ax`I_=m`^5^0g293nEIDKb$2P!UoV7sRM`kpR)p0xFG7ovbKR zCmVoAOs59GKSA)9*?@e?Y)~EgFFqdSWg4e3NXyhia-;?LF^lbcX}Bf1fedc3xR^j$ z`PG2`S-I~iGR#GhJA*+Azma1uDhH{&K2{#2$enS*oh3*&Z;wciJKNB1UT#eMsWD>g z$S_goHhvkItJKlq!zd4Xy$umH{F=R>ezD(R zTTM{2lAT@seV+T5`1wvxvkcwx&1l>C07s+T<-cA%Q(qi@Iy_GDxU_34qgv;p+VpFX zXk~8q;L*FMJHFKk?qNQ%Qe)=?`~IUMPYsMczVo3;?69THD)+W;7~wfF(tpwRyKb># z7d?4z6ZUJ&3*qMSEe-v}GxY?H)?zBjI&6o>sy_#I^&1ly*5ZMiUu0s=-L4;l4wXFu zFU+p84ebQ8?Qp3rEL`)-Wnzn?;@zzu))cn3RVW$Su>)Dx4@9p;sf z@@gXSiar)SywU9PYTU8l%AGRBaHm0w7k!Nk4HdMmTs8WSX69cXJRUVrknFo`X1vji zf5(pRy+tH)Cb>o;7tuWxWT( z6@G*D-hbzw!2pGN59;x(+Qgp%(`jL2)>h+sPv!lB|9-uvs-k#3XvLmL&kB{^8-MEe zA|*0>-=v)zkKX+{V}F$!)a-hlZWmayK;+x=3m3VtY66RDZi{NL zY7L8O(E86s>a{Lx7+_J&K~b%^3PAOC-;KC~tAKJ*{gbPJVu@@(Sv*)mL{_==RoHZc zC2}!JazPOK#~u_|r}!60~=Q@F^5)fHG?b6Z}+x1SC7sbpPcu)HqhL_t+;q4AzI zX#MQ9daVl^23THmP+l`93U1JB11i54O#W9n63`v`Y>lTA1*R^i-7%yra-5m5gIm{y zv_kn=k^$D)|G=I#Q(oV=Coiud^IAOmi!QWZb(I&|`oF0xv|$LtZ^&Y%>LV#^2qU+^ z3AdoQg8}}$enb4hLYtF98$^EVSGdT94FD{(xh=FI|NPD5AC=`rVH-=hkFDW8Hfa5> z7a0O7wJvNJV4=-Hq0L}YvQq9n!@sL^2SRC2^;#D;46x+ppyWOv1qjc= zg%LNZ2dw0l)L`OHJym5Oa6Vch*t)Ed!)gd=zG!8 zDQlz47)M8~jE+uoZ&4h-W~n0P`?2@vPQW;up@GTf0)!?CS4Z)mXntC~RHJ#@Bzz%XZ;W(=Q&(ry7(@;v{j z-^)vrs*ZYX-s^(R+TD%EM0oyraY8k(V}JBJAh`AAi^U@G7oQ<*U+zJZ{ztY zi*F<8Z?O3OuNL3nkHG^nhOzkOr1%DrjeaRyWM*Acd>{dfZ*Gfku*LW;u?357E{ku_ zy0ou)tqU6lSbTF(d@ELRdX%LJ)BjFo@r{+7>f-ySD!Brsw2-ni8%Tx+8lXY?`6&aS zl&%Um$$#lGWO0k{k5}ZyH{@kY{T6f?ZgEathL@fpE5jAjH4Je0jl2w3`F%0dZy`|) z(>2HGdV(}vr${$M=%WqGG82epZps}riky$D38)l#D~(L)X2?qEW=NuZGo*FnpVp4WQxAMuf3vjV=sh=`y zn|qSGyZd)x$DzIJp1M3B(_!O~cmH`L{4nkfHK1Nnnw|M_%jGiy zg)1Ie8gH(?;cjM^RU{btpxe$(<##`bEo zC@(qp#_NVolfw>NUl#?P0g|V69}aWzPG#Y(nEp=%gfDWO_-QP>`W+}85MJ`hTok-! zN5CJ)Qyz`*Jvlu2U*Fi|7}1qA=DDMeOndEe>(B{nb6bazSy$c-Je0QOM$^N?n>4iD zVi7-3U*T1aA%|ZazcfGb>EXylr&f(zz947gn|phP+E0ZGv8KO|fG3XyQOY||<<^if zQcrz5lR~DonnmwGHSuoj)ub?Pb~gw zwEX)1;K^$;mwUJUBRglq)Hp$vM%~()Y@1;=J4I6Uf#5=aacn(%*Q%~Y$%gJ}RV_RC zwmQ4i+~-W0iB+w-U5k#cT6SJ_m-b_8c>1&)cxPHv;{2G=lCa*^on3{7A5Dx(x$cov zixBrW4Jg-kqthdwCrdAn6^ktuUY9F#aBf;i{>SQ-w|;#+q1@T=N2je?^SX}HKNRc$ z=I!eVTS0>T%KF<>Qm=oJ!==|x>HlNM80)P5QGZuz^33Det`*a+FRt#>bINz~QNH8G zecv5(?sTQ8!P!T5zU_GZQN#54-yim}u&;A6=zF`&b~XhE%|Eq<$D02qYkm;EGIRgAfbc~YF%a z7<)4%v*QnA#rbLB!WUW2VD-;i^$%mjmFfS7)j!|WKj>e_f(#Xv`WHD|u=?k=`d2)o zfg-eS3Oe=^XJq`H(J00SL&~xq^ZpZV{PO?XUl!`0v*E7#uaGCN{vpm=)=q~1TP!^!@*Gg6s>&Py!ykTwIY5VU09e+L z8UL6Ac*_AGe4|`4RtgATWMP9jfVUg~kxw4uFVn>2aD)1m4%=iJ)vxm5Bf*0{9Ik0v$>OU|Gu; zVp)O0w8(iIxuBoqf2PTq6pjP#n> z4CytqX!$j>G)E13?p(Qa(Z$WMf9_CumpOxD=N+r|Ci|D6p7xO$C#p~Qqh9RUY=?0p&CAU!_=5A7 z4Tr*-2i7^-`QGl_fx+FgC*~%s-g)F)CwNu3vHfrtxWF+(*|hd?WSFTmt3x+R?j2vtvw`)T0@*+oQsFn44Rjm5X&$d@N!xp6TWMO z!i6t%=?owKW97tbz)LoeO>5oG600y9@JS8H^zYF|z5YcG7t99SW&_1C6K;9*{74+b zGLtmqEKWA~sWP)br4Ojxugv{lw19gH^7FsHxJ{Yu71HMhSvM)O+XnELx!zsMTv&$w z%k?E??pB-rtCu1C<4gagJJgV>Y|!F@JR87PfMv&5@c)rzKsk9zustnH35pvBFgd_) zqijQ|x|1#IWiDDplT4C|$bK6C>T} znI+%pXBdAAQQaU48vd$+s(k zLq29t^Z3)|S$Z^d6uRHC#{rnhcPUFB$n^g#Abrte1)_q6(ihfJNOz=%wj1lF*f-3! zJ?u3(yjSSnRfn%XI-gzMD8zi~m?OJyUD>-P^1-7c!?H71?C26}J-+TgfBObU7u;Xl zk>`Kx(wBqhMr=O1AW%FYF{kOPw!OZMxzcin;APz*hrp}p%KK}5-!gNXMlFTNP=QN( zMQw@}zo*c%f!WJ!gl6K0)G9#(BWNnLLQPYlMqD)&`sJsa3Vl7A@#LYO+aiy@S2eq^ zW6iyM+sfg+9$pb{H>7O*dctK6!9isQf=8=&py&|;&FX%_fkL_p>5c<5vVPNe^0bJd zC#!#KyRgDJ*Qvf+*0e1jSnE#H$y4?>s~VEmH8y=+=?iE4CwbX_J0bqqf2HY)7Tprd zC^UjPyrW}Q(5HW{bZ~%T zbhwCkhKozFf`jU=7ey~Fia7v)$q{#m^q2#9IbXPB7GnaX^8Y8d5*P-FIcH#kga0>q zaDcr7%g|1Agb0t5M+o~S1tNqjy~1xWLg-9{kZli*X3qa{mgjVqXU-mE3+xyP87~E- zFS;PY2*FE)pyxK&9vKVoQBa}!e(mbDRaw9mowo7pp)GTXL6frwnFHe({#B&|MUNPa z5FAGc=DdjZ4C>g(-_(y9^sJC)K=?Z^Qp_H3S!kj^V6c#DDkcyN)o7`}h~ zzk({MiVz{c$|D5qCs>9rqQk`Gd-5<*d9=zf0aFM3hAgDHU{10UbrG}wq0)h(M-0Xaj$?)5 zDJ)bCi`$Y>Q%6-%tK?bFkbc7lh(xSr5k=hqE3(a!d6zC1+b>vDH^a$ICobQ?{fLCV z%S>8LUhDbi1gqwmHDAw4JGo|6qs#C8$9Qh{9yX<2+ZAbH4?Z2K-D;G1=Bi0cMHOvR z-$!4YUq$HR-YS}kFYQicm2UhbX>-^&-^qV&Y0~g`?=Fo-H6QS6w)4dB=It6%n|%gB z*b^usY2ek8f7E-W6hM@`D#rez3a=EqE9sh7Dv`8l&gF6+HLISU_T=c{yk1ROe@wi+ z^Io=HR{>o8+FG%@Gu*e%`oDUeTKyRX6Dua&md6U%cd(q}K?jUw%j5y0+H;iw12QuF z1_Opp1q?7Pp#w1u0|t))1EgQU^#3U!z0%U#xnD%c;2~jyL?Uvb7Xt=w0Ry~BWbOaq zJqlj$QP2Sc99Z>^OjH;!G>Lfp0X2$6E8Klqvz2(JGGJiQIz!*4m$;0Vdp$$f(i^YF zhyPU3S|Bd~wYjbuN-v31H;5nUdEMiTHxVc&3l!qC~P` zZD8U$+cy=h92~43FY99^N=#V3Zt0RlD~C5rSFY`@XEpanjvo4y8m2$gaR2QpZK>XU zO*{D&pzab8lYMoh|%=vF?%ir2#Cn&6E1VO*&Yfm@88QB&izl(HRSeO|P4 zT=2WFOwRE$7*8HdK>yLXYOvwGc+gMT41Gn5<-OV`@W! z>F(|t`cQvKs`!Ra)%BfCcUjM|UMX(vXBS{+XO|yc^Uy9^(X;)3ExLN}^Tcfqq10@% z_^Wy^#lrsX;Thwj_S-{f4BFc7i_eg@anaUx8+@n^26oi&n7fI2 ze@3h-uym-m3Fk46 z<}oJYx&OE;zjGe(NM$L;l)U~>?v7EK$M9f620j-RG~i(31S}i>r~AEXen1fmFG~D4 zpokv<2N3b}!U~%XmK*ZvzxH&HA6nFIt1?Nz6a&A(B%x7G)>4vy)VKEo@v6XbpnjUc zB=Ivz0wmuzq;Sb`1!jjgEhP!?=m6{f3nmG^k^~5Ua26RHDkGfw0fI?_yClJELeWVA zZoM3?N36o7gOg1t#b5wTX7VN?IR*nxPaqV(c98z4n*Bd4l-R5GPi?uv(ZqYo+G$XM zOd*TJ$JVO<^3|^kcsju$SE)EuApf0U@*ge?oGc8?>4a=8K6#Q1j{=gb9~!u;$I-4H zb2>p!VK6LC_ah#uEN#`VB6vC>J-gHBbV4S)gz0~&Qh4X*_ z7(uI)&bN6xNK(uqaF_3lJ7blqn=b07Gd!dy{vpNQ|Ff^rmCWe`z+tk}iQVvB?rBe- zQ^4kg<*_yNU+2EcPb^$!s!SI!?Z9vF#NxksVxd^-f>#;ah*wzZs!bPK*glt~^$VSd zM=C2W^=ku`x<6Oy!k%m<>;8+Xa|^7Wv{gST#z9!8E`{S5At)z@u$p4w3b$S^v-*Ev z;aZ$0?0)tJh2qX2NdG(2|D(X#t$s~F+MpP;0c@gYpUb0*-m8Dhi(a=4DvMs2fZ#V+ z^!_)CUd4d}Uj4z`f5CyHw&?w-)29NUEJ%KX>3>r|a`j~tZmILKr4Cu}CR_uM-&s;s z?y6r?FlYQ+&Va#@<)&VE^#cUA)VbSIPmCr6u6y5tg5T3+*NHA>50%w zK&8nqj9rml7+WU2Ft(gr81pXf#Xi}q^c5t6M$};AxJ!ohN@qOoFV@@NCby!UPgGiT zbib;?+HvI!cN&%!><$s{ZnrB#zv6GHl2nQ0lZET{0Vh9sCAdwSzva@c*59fKeTJ`} zZ_~}FqUDU})~AH=XC>w#o&l`_%)`vYvszb)UpRAa?MhLz3p5W2pMLRRKGm?i$zM_T z3VZ(-9${@}=0C{f`h-VshkW}S*j%rn`I}$647mSr!o%=MFP)Zs7}U5@mk*WK9B%)9 zd)nhw{ik+}+>%zXDbenEMt^!;?{%be@Von$YE0dcb869+J^d>lo(>ny=6el$0_)Bc zWxwvuR_~YUIgR*L&o7o-u5wvmTG9w*?{o-47w^s>Ih+Pes@qjfduF zirehq)k6#76_&qh6Ni?z*}Xxzd?=@~{ZRZ(LLqr`xH#G4XT={Y z4HBoN!LJkDGw0b=BEC!UnuL%LQAI$VmWRGe($Ke98v2%y(ATs$T|#65VrVus%&-C# zdZ6We!Tuw5E24&%$ul&xd>Aek2V{`;IXjEjKIeLhA>BXc4K<)%QktFla?9m20);Cc zSsHJyzTt6uqbiS$Pg<68d?8^P6~$Nac;)TW`CO|e;^bX(Yb_Vd$crgfk0|fIKHW{b z=65OWRnJp->*mDX3!JY1XZHBWXh?5CHM&iK?e;scjs8}avMulTr~i5}T)y>Q?}=>d zT``k`HSimG%uqQgsa`*bJ($V4&E$$h2|UfQCw}2j;$kR4THK;Ae}U+^iwhUMhyl`F zlJwa(Jy^l zJZRIhYwXx(3A?T9e>MEWsDsCJ(_Io{1HEp}5=!jy=k-OOj&*)FdQ4#W=3AFW?md;0 z^Y!kYKB9>}I|T7d2e$^7CM$1Oy>3igs$2Xmcruj%2U^tjVlkWUJR6w347%=^J)|m9 z4fHbx&ag8G9aYa=aJ$+EGt(-PM-oX=Ty)uXrDyyqjETBgFUh}#f9Q0x?)}95#Nw|; z%dhVbp1dY=xp&(?vU4^}jT2O9)UB<_wi#x#QzTU%2rl#&$JVoVt?Fu&Z0Md=)v|+c ztFueZea@7bSk?*qQ3XanC~5K1Y2Brbo#8#3f;Y0k$%((Vn;sv1;0%E zR@DAQT=qMz_d1XLy&?)145-=bAV+KDSZ_)=>xenx?H?bw#6l^zhe$9K3TN^PXXJ^c zBixoi0o^AR!b-GchK`G}J1&x#C0HV#!wK=rEfERAqZ5l_=#fAvPAuO$M}o*4>X8J( z)0aPl_Oy;KAw;1giF}VFR62$uWXUnYUgg$`1Y4o)L|)qom5#wm--i)aq9rs5Q4P^? z=unOd=8<1vItJ~3kohlK?Io-dM8|TZW9rrq-1T!Fu~5o|GYN|XZT-@0{jld#VHe8h zev$7#(C+yXgh!<&#@HSO4J;w;m*i3t61eb_28jQM?Ejz@UqX07z$HiEg3Nw7PA)qj zYI9l=feM-JO}{{CuC%JLoQ~vV{7k8YX)BQTzvVU-2}=xdvivz2UsZ~jw$fa3RiP}h zMfGHn1==frXV+fBDnXnqM@}YF{$~w@@wL2? z$sm4>{|NERZTu31M<+^YPLu%2Z#6~;i(M>+3yNK+M|)*V8@>@1yC6fzy?6r0UJ$)j z7PIIQHgtr&)q}loIMJF)Scx`8DIrdw^l)X;!-9i+?P7NAC9D#Jy>i4}@*V{@7HsA^ zWdA!_D9z-0KBQ8tDyChx0l82R#Y&g)s%#NAHA@WQ>uzKhUqX~ZSz`HSiQm?D7Xq;5 z>~huKg5U+;2mu(SjQ{qOF>E#sY7q7+cVLmQ-Vi%i4?DvNr(qmnC4>a>MFMIT5>|*l z+5bqZy@XYQ*jbM39M7*3zYKj~9WiHUNYXaAPb`#9bo@pdi~pc{u#?Z!(c{Jkj3u8@ zAI8Bh3);afFY?>L!ujoBVZwGWUMrRvAj(%ee04JEpOuk0VD)PJIdzqWDKG*=ah&EF z7y(j?c?5{Rubf7JERk*mNbbWX&Hg#7K9`zD#Bhe3sD!q&-j{#seYUn`4+ zYo$*IhSte)vio6gUSfr+TR6};>8QZmIr>os*TG4QU-UBb|3;$Zei=(XP^%m^Vf5 zegp&z7(EZ~oZJyGv>l|5pYRX$+Vw}^k~i5Qr0DLuXw%4W5`u4p6kU$W$Q2>_ATH5l zg!1{)_4as|SU_O3jlAE79VZgyn(K$dyeagZ3>Y^gl{i zB?yJ(h{9B|QoyuZJtr0lEeewhg=u?7WF;(ll}uJt`Ju|I4LSb{$?v)jQeo1ZL@Eom z)Po?%-}WiH_miQD$@|ekP;Li7K;lP&)4Fz(82qPpr-V@4^O^vPsV0Sh2Hyywn07@W zld5QxuQ7sp2Lq#5t0$)9m>0Hw#@mvX8&&b(++GKd;8(@xUWb2uO2_J%yjM!^QIXoHC7B3 zwr4oSFS_EdX&?4*S+QE;n$lyQk9+)ndvM}B<=qXj>CGo9(j{R!lJgHcu~#HB1xqPV zfn~IXN`C#)78{$5^)1+wo?icXWIe@N+$UHx$6OUgTWnTg9&OR3lbl9d^sVe@3+*#@ zcb}#B_FC=S+q(PJKi(g>?X8|4J^8jXBw_BRyC{JC)W8P8uhS}GozTiJd96sXSJB8X zoNn=N$`7Tk-Hxn0yJ_3eRVDjOt|;9xLaa#-eRBiyj@z$w{;lU zCD5W>@uS1zcbYjGwYsj>DNdnjT{Xd*bRHklGrW>6WOx-#PI!1DVtC%pqFKr9_peui z$+@@nC)Ndnk%6rf|8-Msb8Et2D6mPKH}<~>Y@Ogb0BX)Uzak7piwx)tM28HOg>W*v zELbO9#}e|FTPKpGN7-e3%7x_Y@C_Hwbhe8&akJgX}&~A4U|I=;_ zg^=A#O#s=X$?yvSfNc0i2-(PF5!Z6R zu|8;7O>$U6s4U1pR~VH+{vM~<<(Irx5Guj{Lx$^|ybYXu>I6bL{$ z(c|C?1!Fe(gzN}>t1_*@vL}&r54J?6%a=m!@mre|Y~nROJ4X4hJM> z;83=@GTCZ;cY)-m3wi%ZdwM}M_LtF^s~)kR&>9S_7dnI4JX@bNjmxd=%T=~(Ra^S? zLWN=IFBrawl%KA9Qzr7$q)q?-YzCwr9zp&Q@bHo#D~4eBFNEO)JRjYJDC{7!)fSJ4 zY_&FItOw%XK_)dCVKcSUh@>)T8XnE$L6Zowk=`dHo40FNv93LNIy_nIF9J9fQ zM)naLEL}K=Ba=T|U_F1hz*^yO0iFd7%F*_K2;Mern8xX%G_Lj}$L4K#}*4)chOsmacq}nylY`O50(1cQZpQ0)5l9c7189@0yvM zlJLl&p{h-uriKRfdDE$%o8_K)>=hDyX2|91CSQ{ZRM+7^t(#}agJ?%y7&Mnc7{ zHo88$)tLF4uo~nGDq%HR0RMNaMqe=fHN1o2szX1!pVb(Xd%@P>^xbn7@48qi>u_cYRnRlJQNz;NccSkjZ)YXI3V5sp62}98`9rI=*G#1KDOP^~im+ZkiL50a4 zx~l=nYXu>-Jdv6TX8;G2P7(&Ag#+gGg5az$CB>N1CH+exoKf|MQnUbbr2l8JS`BI7 zOeSy)`cIbN|I7i4@PBlqlt@ z5Uw3N3gCJs|D?*}=NASGz7g(T9mETGr3i$^9E>L%T$s(zEo|WD7B&iV3w^Am8C)nl zp~8euY@FH}$Sc%f$t#SO(}YiHUL~YogpPhIB2N72yn08pM~}^}T}~%ndXRiQc#|Tu zz9)Vd0@FJsCi~yY2`|M899m)FSra;czwl5hHRtJwKa=0Q^VdAa8}s#N%*fZp zZ`aQszA)?@?{?d(saDQ~`m1IIdJq{JSo-*ym~7sn?o~KqAZ)E z+=?<+;-z1~hI(dLt$eJJg1H*Z_0Usgp6lT%uel!mDm~X@=fV7-{jM3$JSQ%;f7f}Q z*53M?a>pDk;=o$T_jT?E1~@KmfDm&2jTxN*q*yP&*p)B08#{hUa?t;k!4tmy| z@;oD9j-~D3yK7&Z+!+-+cq{MaW`Cm-1-Zd9K7UEC(K-H<=DziJ-HJB64tuS*r$H|j zSMEXuPAgq57N;9Py;F=mkU zA1Jy>m#(g+=tjkHsI8jGBcmJL4TBWX4dtqZL!%@ghp4$~(0_IxcKxLYZV1QajpJ8w zRwfzS&k54fe8z@kf_6{}{-<+02x)uZG+Nq*)B}8jXj^a}QGK)x!q3~!EW8v84AFKq z(KeVWlzjh|7WSmrj}UDW#iJFaZ7@Js4zX5f4IssOL9{JT+NLIP03pH`6Ba8++fpnS zl*G}%64HK2PU2A68LHV9{vm`%R|yrUGoa6+NxVv+vNPzvcs{%SQiLkR&hlpGRX80> zTWw93(I#m-Xo8ToA;F+Mw>|!EUb0Wf-GfilayO(S;2Xr<)z00pGc7Y_7G8?QiMYF( zxEt2U@@s^tXt7TUkU-pBY3>FCtaM~IfE4Qmako6Vn|dz+__69Yve>M~I)iV9g*C3T zyeYq(L7I!9YP*-90z33StRed!5U^J_u!H{VOvx2PciA9Cj6=XKZ(u(ss3f7|?m6Fu z+zm+w?InA00*`DhB=Di%X$c%cC47SjyxIvIgx|1}S$HW{8zS&(B5*h?ZE_+^g$TR? zI6*%)p`w{@Fu>+4b^}PUUJ!xHlfbE^S-=xpLfI{b4y{fm(oY(Zykw|A4b3N8IivyT zfKCbnpz4vzR*!)G+rrrOmm*{#pq4jKFA%)VSm9c3b^>40SxDfJh|rE~g|m1J@&4nm zZnP{8K@+|~EMDy_4#LMmv$&vDtlT}V6bluxcr~#&9GiBM^M4VGS38TtTG{PRtQA@V zNU>fJi_4S6%XPHug-UQ?bfZH-af>;R_L(%s+gsR#EJbu|7?vvbRRJlp=4mm9>)v((@0>K*gZ_J+@* zE&VoJiSnm;r`G;P&XI`WY5vPzX?HzwE!cB)&!VZij@z`Lqorlgcz98RLoFBrO4ygk zZYptV7c-TyMEV?dVg++NB1C0c%k;l zwxQq3^+iB!-2pQH-2ttU%#(kx&JAk52+cd*W{Vrs(yB&f$)bfG7B%h zXF2&y9X)P*z*ui@q8D0^Au?!*LB9i2>=iWUmR_bnQ8p&FG+*QkI2 zjuo>TK#KK(s9c^@PNJSUHW+`0I9}xbkB&IRmkdI^mfOltIgyNdRNqH}BsnDVx z65h=-%=T5vMZLIIE9{)p==XEU!jtGv_>)o`1>Xqy zysacY7jr3n4t7B}CFMGQO3GUPl$3SEloTs5L}WTEPbk19sx-uWPxM;BIi94qIdqbC zsftSNCxKk9TSkg{vqh-~!+P5s(sOI8)3#IU;{B#^rlzL96$eZh(cyg7m}^G6#((-U z*g6BN-Kfe`Vwd~7wPnkK-IvqrKaa!* zqCCMGVdhHsKon)>fhc!6#<@0g^<7YFh!59?%l)akG3WTmIoqynw6?odP#8OJ4ad04 zV7r?8!)q-`O=~HOjj?~26Ir>!zL|y`0>|?yhW>zhHUB_Qxx)f@Gg(%dk!7_G}1r`qS*16 zN&1v1W{x@HG;*#9VJj5Hx^WW^FKe-p6JEL~T&do0uC}FVM-`JAyKY1VR$COKf;v=v zofpafpc7JM6K@p-br3$K2eCA0Pe>BN7y@98<_H4k(rAPK7jk+}RwoJ`>0Q4DY+<1k^XawINceV_!66bT$E zSVIlnRRcnBTE3GYI;t|^kEN+7ScCA_HnIyZAu1uTmK#`GaEvi+P79oL|E5ehN#Y6C zLK26>g?8Fh{QopdUC87kwP~3g(j4%Oa3e&)l_kwV`#g^n!IjL83wE>YSByK=Hkc|x`Y=0;_j#mk3SCGj;_nRX3zlh2G9?TEjvSQJclznl? zGVt>gkV=jX`x=oD$Qr?g6LfCbPj?)B;UU%C%A;=is)Sa zbPn0T+%v?IKpFGus9nK{4|IRjiCuRIs{;|A{D_aN@4{^b;PLnoyRFa(RwlPfRMLhn zi+op7cSPFNMcN?z(`-~wjx^C!B`gs{YjQ7{Qjhs{Qjg>!u}+lu~aQ@f@hq! z!0Z*yxEgEV8RweJ&p5x6*E7yDq*ONvS20n2ea(ir#IuW`pCum%~dtqVur%}(`hz-b_xGQh(^`Ec(dUm_<<=YS4 zqfHvuFusk|8aN5+R5`Q7jrGoqSXi{iVmMS0#%f$CW0h|x`*(~*-x<1f@}7tT_orzc z8n7wXDBUP9JAX$0ont>$b~nT>E#tobVwWGWQy~yYq{M=-6@tJ(X^6eD5J;6C z%EP{$K+-GrX(5<@EKljF{0-9oAm<;`dObsgB91=joGPbtD)6_Vh8xb`_3hx-2>d5m zzu3t>D^c3I7c#R>68rO6LSlzRi1r6KbYt2-&k5Px?|^{agZU@%6n@&oN$`!3-A$yN z=t8)n0S8MFj&$MiQzlFKDU)Twlu4hK5=XiSPT+3}utJqB_)M2fmYFWe@|x+gv9k1T z-B|m%&z{pfp3fDplHp@!6?N7$3^>tfY`BXeX0A23H`rmXxa7ad`KRS1m$do^S6K4$ zLtXwI$?v5N_t7;B^Y&b9bjPs5BaYYwTCQmx1 zf5%p7kax)XMtij){gfKGg=yDN+Tjw)NpVd&ka##a`rM<7{(Fj>CH|+|$m_AUPtffF zPZeQiMe(bN5RuELsQ%m`w@A7QY8j-cq#Eq<(yw3xOm=a4f<MAGo)s;#np3njIQOmE zYToGBr{=4?(hUaQ-7{M`^igJ7#J zt8caY@uc(JMK_*8Cigja?AUU_OcB#j_(j+X1+&Y&S?VhPXTeMf(^1+*Scw)oWf070 zE=N$l9i&$=WS3r2FhjgAKi;<0iMLLL@i&w^UYFiVMBQ}@oH3&5q57KLs{ZF*gONva0&E?1D3V3$< zZ=VmtUPVLZS6tOL;sy*sER> zf<0tYbn5!zh;Q6ai1_}UXb~SmG<<^)zZwxAByXI|EV-l|5g~rHAU+(#n)(n{LWp0D zh!4^?%Vn2d(jq~KFE_+5hm9@B_YcrAk?S&{O0SCPv{EFO2_p9DtEUI)TZ`m>=%kkv zp->KAZaMtlU;kgIr>D!^lZanutPt@b!=htskHfx6XCdrQ_M(M-IB3E*2==QH_CfM? z`iqEBQ@ChoEn(3q0fm zdrT*h_y0)jaq;$cad8+k)tfYxwHJrCjMNOfxpr{FSk)$2 za|1oSPVp8;*SVyJh1d4o`%88I6}MKZZ#NFRaj_HE^g>|m)TcE}daJg$9BS)hzC|ZQ z?{WUogG)x|QNrEr+HH zQkH}_H%+qG%IWgpy6>R)=#K~c2RyuIo2&4)R$jB=@01Q;nC$}3+t36tO57Oc3dF)` zOJfW&UtiuBwJKu_?=71)#;jlW?-;|p=Lb(!6_sAxi>W$#d+p+G6qWi;KmGo0zfW!k zdC%wV9q_2#AGb#RT4QGe9aTVKs=GrCVP{`fg^|0<%iaI{`F&{=1f1Hh?cmo${HHJ9 z;MZ^XNt^S}JmgfBLLEBTH<~GgeaO1#n3&@s*}_DK{Xr{eu^&#W@D0NLYQ%n!yju*j z<8()r?5*eX^|l8mmBs|NfS7z z=On^bh&1W2zoMi`RTzreth129tOu)#Doka0P`DR4|ANkk$#7Htw1keXRXn=-P>!w; zPe;x_rEjkB> zc2=x~9lvjU85`lL;b@bvWUiv3TBu3Xl@#@6i&77U^|m=A-Sn56oQvEiYt%<`+AUI0 zwNm4y)^UiidEKwo%KsF#L)U3)XhgZ`5AT2QSxPUpPPcw5=J=XKDu!G;IBrw})6L!% zOSnPG^lX>Ke{X?~T*ybGnn(pEG9L7poqdaj(Y6 z;MtkKYVEFzjV~M!1|x(%G=rmnZg8^T0I-O!_db5!rXPMt=-ZCayJgwD9YSk1$2aF- z`-C~0Ogv}fa*dy}xk}`0wtH-mDqr&)o2WEF1%$Sv3cZ;WdZLVlPF0mc=oRmakBtA) z$S1FTMCtLZKD~7eHH}JcyPtJgIW?(H%lL^wKIeJLyk+k$4=MC{(`$U++Im587nSQr zn5Eby-#<6=S;5(%m)>3NVwL#ql2NYG_F3UB81~;x&*hL!pgKJm!S*;|gto}(tLXF~ z@Cr&)0fFu7unNrP>0K)mRm!rHO8-n$S@AyO+#auklX;yZylv;U*WVVc|9$oygDdLU zA>U3l>Dv3(fKxoxPqtgHEHg0uvctTI+6oIdh4AIP2HahFAtUcUNtpS_6&aYRGA93u{uDxr?bp&7X#P==T$U0*e-1ce8X z^ZyZ5(w&Utl_7?hGn60ycLL>DVtp{6N+&Y@HxZ*8k|-T)oykin6%Mvj061IH`GAwk z2b>fFa3ZOIBXS4wqM^Q#_ZnySOe}}Txuck)3S$JPDvDxc=5~^98p)V$8cRbrA{hC) z29A*>U)D`3>ToD=_S@O+&(D7ki1qmV+B|Jd-zo&rJ-1)N-e~*LJ3MI=; z+5UU8_`2+e9nl2qjOoM0U~}TXSC^c#>+dsl^tkZ>W2XgT?v85RIoN*TAwP}32VTPW zz#SKbdtl>r{5`O+p^i~ZwA;F^Yoj)ebWg!tE^%CyUDRRM%Mso60xe8UO?;mytxMzH zwrRX0O5?EZQr)Oa#(iA$-1PMH{)9C@b-2Fw%i}Lr-#Gbw)_x;jY)P%i8wzi@ila;e zGp2_g*SqJVp*leATIvx_W}w85f^sI1DvV*ECxb_0?!c zi_x+AaUQO=b*_b`{z~hmu)J-Yn!#p0#d?cwjB~Ky&WU(jW0iCL$ZJ-|@6PkqX*sSi zqvN74yt5u5ntxh;wyrr`bNaU#hyFY_yyh2C=={ya)vD>F>0EAYU#_xUtJXDAGJ<#3Qg;(*&M(WLyrVrNctKXF(NiLV%yXurHO3}N*?8{3W#|8H57&&_s$)8 z3)BzitACiUem7kG;Rb}?>yhf06)6c(5cmCF9PF@A|5U#I>3sb!6Z)?u^e?MO5&~cv z=5mGOs_s$?Eby7d0;lC>fv!?nApMo~$WKLs=DpRs^Stw~tk?Fd8r-^GaN)tu{LOkP z-!2{YzH(zkiKg{KXKVY^;0JFzj9Ab8VRNHhMoNtpgN?C0Wi73lr#Bd`YB_m^(&fdo zP3L7!pWJR!pog)lV+#!pY=fp31YgbqaTB=hU^an}R&W#8RM-T8 z`7>n3==gbGJpZ(c7<|j|Ths~o{o_@!=N$JbW5=xE)pYGL`IJ_o zeo@;ld7G`8Kgxfa`6+ME(M^_-A~*<_i4?EhnlW&qp)L-J>e3A6T%mKFJEvoTyQTnLU0{YaHn2TWQZW}E$5qhhVki}i|jx~i`~FY{{MdtKjc;1$-NsJy$4nST4iO_R4B zeXm$v^x@A>bE2tnZeG8o3~VlPT$NnZK=()%-Pg)Z_fFF3 z{&>*icW=j@eUlM8qww9*rRuA;7W|GG6*4H$SM9j(i~^Ha#p54c+2H4~-6B_G>eJIb zXZNIuFMcsEe7?F}y-Nsp z*QGh>lp%c&4t7B3@mKjCf1NLVCL#S6;*J#edcOSbIMZL|xT?6+0P)RPm?lYhH%smy;`ZJ$*Z7>?lv~ZZ%?lhJ5XcK~D(%Pj$|~*_-Phpup^;tGthc?`e)tE4hJmpnZ*+KhkN|m1!c<%xPDiac?$UZI<}G z$HC)|W|>BIO1`%EoGR=FDSNp$!4gZvE#U_Yiuo$J^y_FxYYEYV7OaR$SVH8m^-oyB zK5K8b*ZZQ~7E^xqymL6{O>)y~txp{r-e8wO&rKQo?oPbct^1Fg9mWKBy$Q5T+I@LL z|Jr-C>gFxbIGm!UtT4Du3dU`l>uTFEdAaX};7x~KPqV5QoYC&!=Ocv+xN#k^jpw{u zfE~*1%41g&b`aZ@MUGn)-Id8B*FUinAxBRxn7P#0iUbC*7*;`l3 zIZI876CCp17MnbH9c0^Q7w`VI`-|pWKSL9ZhBkR0Uznq^I=sdrLuC)|8FBa8Bo!L{ zP=P%`v6j~s(7s%bbG$mCeO+oaB2QgXwyl&HM|I=YB>hXY2ROn)zZ72ntQTjz6VLYyeXVGlNO^^4waw*Eh90gS zGvChJ_hyLx@F%G=kEZ3#x!)UJOllI~1tW{g=Qt;X6Ux`4DldBck|M`R(O%>T_D|$E z?T37ivp>A=Pq#KRp9f5+Q#8$G_Yb$k@HSlE&GCJ8f@d;p*UFN&cg)mE?^g)M81E` zcqahIFp!_xcUe}$w}YtRYrLHzzvxjcgX5~^q5*sFNtV+i8i_OD~|@;4bw zHR!xz?89^a{dux~{Jo$(kMI9CB=v&V@dbmjhKx&H_-TEQ1^V+W&yM){EljW6No4Hlp<~=1epj&dq6=A?5>`1D zn^EhWpoM(025Ada=OptwCr~&LzjrMIO;qPp&2>)heCiigbAvI0@H(9`V~E!u2rsw1 zz00>TsFzE~NvP%)-gTAhzW-aRxkc)ome%CgJ2n4kS5$JW;mo)H4W8z0b>eP26H~lw ze$>?d^P%49>xyDQ#XMDGsPPE)MjA_MO^~cHY@F0!4IAgw(}c2gp@hwY(bxjXV+%mx z5XhaOVEwk1 z$ypI(tRZs%M0XimG)J9$;+=fu&Y2+a>{qM;vw1pdYN}dO6WPJn6~(9A+}*=262=fG z7&cc&2YK;>d@A1KZV10j@NReL1JfBe9{YD;_FKNVN) z?h%|6`(v_0YPxa6r|9J^XFMLd;DG8LZG7(MP(4rmEUeKxzmL&(oxUI5^)O+_m;lc5 zYkAF#&f~LH7Yy|_0}%BUH^>?&(Rzc>Ng`8%X3Myb zKEHx}^alj@O0&-=Ss(6HeDA>Bcjv$U@=A=`wA6SHwr!x}2Dn!$)LEG(gv_-{DEUi4!eI@0|3N1(U6dD&j7N}J zHOQn=4eYq<%n4_wgG@v_=WBFtbS<%KyByWozCIxZEJnzg=P|&c6O*Zdm+nyNH;Z*GKNXv`K%aQRYnj zZ(n!vvbTR%`B_w;`F`bvMK$kjNy`bpxdb0X+*+f(=Gf4cArHp{<@qmvH+*{6#?Fb2 zKGi$z) zY4H;d3GfZt2I!~#R~vqU8FrKPKXh!wx(y)wl#HJs@g8VX761>O#H_mj!p|z=Cp9?% zd-A?kgf-A6P=!qZ(4NYaP@%JbA)zfD@300FbW1M9Ejeg>fUN(f)0j1yAf}WpQ-V1T zO($F)g_A0T6SyVcBB^GRF6T+&=cboJ{DcgH&dy^vh#u)C1kp`hXh9S<1o%d{J&RkA zbupoQL{;(;)I2C0&W@%KW}u@a*1ZFPsALcYiQ~xgFP+4!-i|=DiXcj6r7fVs6LyaH zcvgo;*07ReR+>at+!|2(e8QTv22H&!YxqIvDgj*u8lT+7sxhm_BXpG=x>`ViC%Ak< zC&C=!;KSf-f zM&V>|8rsEJ5_hbN1R<$pBn64jB(O@%x+Neats;_=XE4DAeYP*FHIT!LKEy0lEee%0 zp}zba`7#w<`i@m%#FV|}QVvc9#|HRDNJZl2WL;S(Un-k?sUUsCD<;f9m&{|` zND!Q|!>Is=F0u1JtG6RKtsfwF^zL==IXnk)({`A z510E>bz{!)k#n|P+h}cftDrD;-WrZ^m%(;5cPlPQO=(=`vVwaJWzCSd_IJ8@p8GWZrqj0Xn#bC@ z-w%E<&muR;eEyzV*YqQ5D!iWOJbSm#n_lDl*47J(yQo}0!YsuuIjVc)bmyYvmkCaz zw|j(UdUm~%mu%Apv%TvT1$X>q^%Ejy$QZ|J1~TqY%}}#4W)SHN{uj(3(#NDzf3IEK zjiOS=;JlI^k)PbQH7%TX`QuTK-!tb!2YhjN2M3}wTBt2l3SAMI|ASn-hDmSL7MDY9 zeayG;JKyEy;$^P~@8U%C`r8TG$CzjW4nmi z&h0(5Z+RCK$3J_rXN2w*&FOXPXTBbIW17XEcgIf_M_w@fvctTI+6oJ|v<1s~4Y<4V z-sa9fbNZg)^yI6xU(~zsbyf6^p_cuWvBbe1L*d&03|+pIgww=TLO6x&gihKx+(6fB z3o&)uSXxYlQwMy5F!g_!L&X^i%#gK-FayF=NdvT{N-0qaV6fFI<0#DC?b$6M<7Yj)V!_E7i211657tTO^)uk)iOj&aH-o zMLN<@6-lg2R;kOu;)PbZ#R00hGVJL;Pvr?HSe`Mv~%g06z;ry;{}&=-4<#(uHYb@A~jZ0T-sv3$oaBJ~Hpq zbFIy@1~=4JveP$-O1+e?5qBeC@0G53F0OZz{Fht3FPPuumQ$N?I9QDxG738IIb+2` z(;c$@OFT45UIeu(8k!_5fWJ00iA=cZ&u=#_Sv0$DUba!zr+Ys0F3*iPead6S)5dU$ zyBF`62mE@4kEEGb0Xek*PIc#m3*fYYc1x^59lQ?Jp!eD`Oof#p_=n@cw~p}e2T*%A z75+%@iVJHaIQ8TK)fi&WDwli3g{=pCO08w91?~6~{+VZ7v=mo&*ZQLsyM3|I`5E^d zor7i{w$g%UTy_-o-wTdk_DrmZwDpBE}EVj{B2lnvG*8W=LoIC*$L_vM-~fbDC*QNo1vHx?6CS_!PwNI zqs|j@k6ND!ir&&e4ff$#E4`q%Pb_l;*^LJ~Nh`4K!&EDv%u)Hw5j86Z)cNo?()Py1 zPINDERV^zA+UB{lYReQoP~y2tiDwu7{wU1KLHKw?=KsaBawNS_Lx*ZbvvMS}y#E)n zaw4drUN(o)_J(qi*6)%TZ(H?valXKLqla5bBS4juIEOF@$<$0IX!@ix;o9PwGPyO% zbXG3Y3G!2s59H5>Ik*V<=th2ucy`&Vauq^8koK7ttF%nsjgYS@$VZJYuysD)Kv)6A zmz|~!oz$VJMLstv<%kDwH7=LWFr$ zLYP53!Z5ivqCz$*1lN9p?!T9CsfC(yjM`Kx1eY$nO1N}Oln7*fBD6wO2-3c5&MGZa z&_+~P6)L2j}QLfY37d{tuxkLcOPy(y^)~072Ui9<16j zMGb^{GDJNv!$FQ*9k5$Y++4)s9V z60-i2PFg1KMyOX6)T7?KLwxzRk=4PG736>O4wF89h}1uzn|B$)A!u7VgY14xk*MAln0a@9I9Y- zIPF-aW%6zWhgE??GSS@)I(oqh3MW{tz!bMp#cR2WS27)lORI|2QPL!(rJ)igY0=Fu z+s&^)TjdF?+A;+XM0_$LzHV4w+{INs5M~gk6DIdYH@{3bzXDCwY#B9Wa%)6lJd)ub*jvg9^b(iW!ar?OFx#{WY{RwM+>TrGUm&aeMzH##Vto=s5 z*pgb2HxyoDqf7%c7RHBe>>UJrZ$VTdljZ;(_d9(v7^bVL(OgnJt-!7hWiMqi3&=D&zgQf|q8OZHZ%nVXY{w;3?k(&B{zziz~b-x=H z)(anyle6f6_N0@g=lA;;mHKb%;o9NU6zfmtt{+L6S-)RvY;f=ZZ_t+N;tE(DEwcX` zxp=j?cv+3fZoZ4#;*mz1tp9E%)`rjM-YOgn9_pRJ3FdO$Q}|Kl5=YD7TvEch3)5krS(<4W#6LwMM>bS6}SVuhz-!wOy0kJ8mnrVT>3vV_aY^meW z^s|l9j@^oieOlB@AGYp|Mc}=#%#(kx&JAk52+cd*W{aDOMnQeeCT^Z#wy#ny>czEM zVdtE->T=8Ng()e<_+*x=X*p_L9?U)P#I_`2;+P&Gme$j~Th@MF@9UJB;Mxkq`r~1v zEFUPMsm^F3Y=~>h7`Z!ER8#t>{g-O0`OIpL^UqleIs`}Xw%slLaqR8v!eSq@m{ou5 zdPI2C@4BR+v z(%T$5NxM`din8vv_bM;+NYlsimhs*t`x-sYY$T{1tZM3$# zRZti^Zw<$|%V4{jyA_wDrZlc|S;4)AvSvu_#o;X@HN$SM9o#ThwF%bTKu@nzyv5OV zF6m)?w0!seQr&;Wt(EHAjl&pTG1GgT|LC>1Sx>XB-8@%E8MjTgxtD*v-T|lJ)k&7Q z%Mwjv_BDPL=u_}$z}I$~!~^UP1P`!J`q3os|DVnXBz9Ab02jp-H-bn9+Q9RyHV8ASu-gnXv&v+#n0B3 zzPQ*v@#dl5#(S{Otp-3i;ZWThijD`6{qJ4V1@r#3{4X(HAd z%wPZ@n4N<7vL&4#UsCz;C54DDk<`Xu`q;wb+)>O?h1fDxafGLaqfNq+xr&Nvp_$vS zq^LJrlzK3%w@o7%KgQiywbQNNiaEX}k%}SL4vrf|`vN^YZjra=)0NvES_XzE_x4W= zX*ca#PH^DzqSp;Hr=JK(@c(=zIH`7V$*u(lbRB!NTB)YcRCD9eCr<-a?v3kO^3^-w z(S+pOqNF2117aGzX>s+0E_6|E!@L&33K$_CTL$hU#Bagc#7c=RaG;kITUc6HOQzIX zwYoz_>vn$Fn`c;|;u~dn5WNLPu|W)J0}mP7AXXY1h&%u=p8u4eR-@!}!yzTVXKuN0 z_haC8@0VXk4X6vv*)h)?dV#Rv;=&FFT?L0uI$;?++pIFe5)G{Xg0L{&BzVy34`(to zA|C(oefassCj;em8g1Xz)G(O$*n8uLN$b|~B8Fzy@n7duZr{`lnt1x%(Z_F#nx47) z?)NOS#HeYyuy39~0=FmK(tfa?nrBaqk zB%rKIS5{>i!Uh8FssJ~YNFZ>tnt}C!}g=6j0Xl zE#cNEjbb9AtQ@n1Ln&z#h%F<72qTEc7KS|&+A1rSMxoCR{;g>gkt6);8qQ7cjVt-G zZcE)OJGFl+E6CseQu^lMum%*wmL%2W*sj4KTr~w0vFwTiE0ty)Txv^?&2a1l5VN{gC zr4if7hV5Ygn@H||bO^$*2ci@odx{T)oivT`YC2&VJR7lH<=KwB_qBqGDF`h7-;fDR zH+x$w;RcC6Ya#c0U)%)HrZAbD|4NsRVhAs&UW%n&N}y~YIscMQS%&z6U{E$NXodB` z-Fu2V5n&JvRt*LPiUuEMRFuJ`5e!xx1_c}DwAO_9bj-pKtq}||!=NBohb&+fmcg?T z3|1Zn$?8gh31&b&H%k#9^6jTwsgl@az7H1`ddX&vB!xx!KAGhE1j^1zVpWzQY9Pdu z9pZs|&mrf(AjGRW;(?-bR}*e6t|)^`BgB&p;(_?{H3;z$;>qMLR}id2$FK^^;MoZA zDvx+%!8UFJ=w1qYMVf$yL_|$6RF)l*b^n15$yG&5OB*B%GB_Gi*m8;YthdDvA}`Gu z?U=w@S$N#JUiRnUq!ia9*G@Gy+G`kTrxd2GI=~|^$U=LxT6cXB^(i-DX8Y+i zTg`vdV&*Tke#+R!-ky!1Qm1S%3ErkEFhaOLs}V#wTf_)8B{$oYZ&RhzK>hVbD1ZGs z`>%69ssR@5@t%GaxP1zoBY)q4f8^BGOi!2mg9$p|($6 z{!QNh;t47UGK;0V{}nMn)oS|)@&BFLzDsQr&RBnI|04d`rkGOqFUJCW4CA5m_wa!J zEhx2p0!1UFyZ;qYv}!!~*DDH5r|l;1ope4bir3?m`c+r^-UdB?I6V4sCTI`u-8VC> z8IRgcx45vP|BccUM&+7LlV29*dY<(=n%Z`r@sbQ=E4)$n)r=t1rv|5M8b)9>yZ70~tVBSN>^99AZcYIEfw=X+EvF`rA5aWIUHlCmF$3;6h+~>vf-M{_u(-G z_R4M2-~Wmj0D&Mg2nvMVF8%$lh_DC*s{n%3SPhXS<~6}0;vmStAt+W;Vl|cSpoJ#Z zicl2Mo#;4`D%|Gc0RyDnxsg>`hLa1TJK5147-2WmFbJxGQN*CH?vwU57pFT=bPrko zA+9KcOC!3I4c);8xc3d=&M3{tTHPn@ZLZ)bzP}HvuneA!=&lNMN8YFlHp~O*$Oa7U zyz;+M$87)&kB6jx|3KuFgR*^+vV9=!;k~4Lqv}&`xTD z^~2lk&YdM}(2|BlL~Q`Cp8AguD0}v_3kTaLeD*Yx|19A({HX=@4DLo`SC4ugiOPeeLX=cc+6}LBU$nRHvzl zb)c*FVV)PvfZ}+Go8bbP|E4p8C=!aAL8CHeptSYs8~yEO5V;A~sn37;w9u$!@?`gq z*GFF&WLL6l*4W(R+kYuTU$3^JV+e>#^>L^nPT7HcocV2EdHJ{t-^X|HLrq^i)TFf} z%+OTq@lz)MR^?#3gs-1w^TW&)ewfK1!pv6c>!%F9eUWoVDMt-L%t^}gry3$pZ?={3 z)0@32`1Gba|LM(=@fRkac|5LrVSLc*@g;kfdgLi6l-f9OvTi%f1lOJ?t|^b4|0%91 zQ|JaVteB<@Vf$~@6uNsme_VZJQemBlhmWt{n|m*-+nm}H&5O^!eI9sg*?X^qx$Ak| z<}PjbYu`4a0dk?B0W#`i`)MaOtekFDc>ZBKPmipXV~=tmYBw|<4v9WN|D(v}d6DdY zY$^FgSz_~q@vD+HPf1V8W?#>23vbF!c#cg}nxF!2%Gy!hlr`B*z9~CV#*Up@LC0QN z_NMHMpEC`Qu3z8%)UR9J9ee2K7G(t~Ld;%Uqn{;6yI6cfyiE2#(n-tY+;EbwxU{1E zy{lIL?b1Sn@BNf|CI*L#ihsov-8g(|wZY7K@0{vQxmQ@+XL$cZ0bxx`10rN2EO}akp+HSKETM`ErNURp_fL_yRTP(GKHPIb+^dnSp34+J(7C2c=Ngjv zdSQL>0Fp`8|B3SnVm@vKnNOhU^~r>5i)+f{)`6FH@$iB|wshxsD(XS; za30G44P&^Ltiqiy9yUPSM=1ZtBrf!O$p6k4#}Y8Z6O{iGe;ra}N32Bt4>Zk3`9E<@ z<##|YE8fQMeDS~nJKGh>m4HLgv~UIC+TxlrrAkm9P&Ro$K=XGs2=(dqPNt}hE{7#t4ufMCk@NrPq-An$ zln1Oz9*}za0V}*OL=G;S(+`#Spd0Fg9Vsm$zN*z)Qp*8B+>g6h#bt^f=;l|3H@{$p z&us`Zh|>wv-iU5~*=~M8(=YQFHDz*ZM0~O#K2ZPbPeOe@^+X;RjtfZsH~=<+7T9j4`4nfH^!BHv!o``45;t^%3}5--cA@n_~FmPQp!4^Upw2%0DRJ2`v?U8$}O%?Urr_dk$ptG2itYU^XZ zMJEKG2Ph}kp3QgdSblWrjYpT?R)lLe7V8P7k1VDfELPYNe2pJjvhc{VBb$gUo2eba zWcR9wOQ&(}DCDR>R57RY1FzUlwoTTQaoZ$Enzl)}$9$1~=(X~umsYuXA8r&4SiGNS ze`Hyrb<20tlIDKBoNX!XKh{;jVfxAqY6vws+QVH!Mn5*o?#OeCn!VX zstToXb%jVr_5c}MAWRwyh$LY9mo={PmU|igz7{aEVNk!B_Y(2~XPe}y!n?FjZ}i&+ zvMvylwJxU?tE}{1ZB`jsiMG{$KvuXq+tVoP%J-*-dD*`s9)3AG-SVSTW{;TE;?(?2 zH)|;`oo{6D=)$X>U1v}0MELc|GQUpr;@l31&K__&x&4oqA-nPhd;2ne|RWK8Le^@hIGx*Cuh5j5|K$7p)>QB0|1Od-jyH)aQ0W!+l&g!G80$wW!z6Ho$O zwYstDO7GT){A5CYf~ZoPoPQ}ERS@|#y*I8zTJpp7gixVFzJDsNC%rqPC|$`YO(yj4 zXaXB#od`0TpoCu4$xk3_-AAml(t9-`zsi#zc{&%wlzLpk0^%_R#c0Vfnu>GESl#+@ zgq{fJWQB8}YXdb_UFjnQ!a14Y9GIXX`ThqA73x-%PvqgD7xZlOkZ@^nJ?Y&U;haox z4oKR-m5?4K@??<66P&pXkFd&0@6`zBDvxvI4LNQBh$+S$kOjOg%D1pr!worZ0Trxn zW3vAd0idh^5Oi&-PI@-Y4LN;mK>#Q-00a{>Bl{o4fd!ovC7%_kw=SS(^IXEE#r33j zX9R#U0YKO%Taxwf2moaSfZ)-sjuIYCCo8>IBLJ*C08$gxV1YK|{7*Uy(8p<1c`8b=m))f=;iR<63{DuiRRKfR9k6A^FWLAUN+VW;Rtem@tF-zl0#^mA>L)F~=_z%-q6 zM>VG=G<7*rT1BHoT169N+$!2f+E&r%5q&?Fc8@p|Ve#POpWNv){EPixp4d4wqS2AN zkGscMJ<_Sg@SefV(Tt@tsL-6kfZF8=*6K|3zm6>Sjq80lLvEZUkyY``>N^ky^c*WzC|cisrc`Ef0Tn z>k*V(IwJOa!1u&;&@Ac@?l2aVrMfp10C#zT+`ALk^g>|m)N&K!R>{e|o%!y)lOJRH z;4#Kx0%3$EV(p>~t_|Zub_)AN)A{iwl^9T0#@9;EmHa&#*$pH+UCr3rZJmrbrjAhm74iwt{Xw zUeJXbsb$Eyy1d8A9N+Gf8cfSIZ22M-y!uH~dw3s$vrt^v?(12FW$jBkY*= zzX%(k$btuJIi4S>AP6A%;{`<|EVq*kIT3WDUhYPntnL#iJCet$ zEJK7r=|0({`*3&x_vVuCe~ZHlgKMMeKBnqEfuugsgkOtG%HY$82W7*9aK0I>M~IJj zP-Z*`fx=g${t2;2wXg_Tx+>T)$9zG~ZMwPq92b$1TT&G@P?LVp$uxE#DJ)HDieV>2 zkS781K-qEoS(Rnj5E10b4)VYV6UhEYaUx-GZ3KC2kOz`Z3?=+pTv7&~Mvx~P$OG{w zk@Zgq@?-{iAZ!3R|By~t2G2%GzbYjCM)BKdBB9@|57rs)x10Qch%9v}ED}80XVREy zftb6aS~Cs?jW{zn!CdZ^6uiSO@)G|=#*4xZyTj*-j{2pW?&*K2t}J8y_&X%#B-euuVcY&g8rCHNG2P zCER$ss2j&byRF;0HfqyI+>gjy-@}~{)yy@SgzlJz>vD})J7UY4Z5!HwE8DbD;HHWBTyM;K`HtI!Ez!w#0RwO4ffgpw-!T^7H^a zV6^67Q9`A!@|Di!E1g9s9Yv?~_I2Ld(=nH;9M>8ynxM5gXN0GQqfNq+xr&Nvp({kR z4wtpo92M!JhJF8C-4A!k%pa8fyF=K7TLzc%-e&uaYi%(ndC$EWuV0OhsB>Xg%#`@~ ziw>yT9y2z5%DtlAUUO{d%8&~$uRs4CSU)lS>&p&#iT9kQ1&^4bv|9mNsX4+Ew4Th@ zI-0L_cU26r7{ z6LcEplF4z^$iZ;mrs%D)jr8qZvevtobb7nJ$(et2;i>B#+kSaGWPU>3^ny?C+xN6Q z@N#qGD^vDNi;ZrYK677#Y@>Q9IpI5p*VYS)Q&O%UVU}W-{QgtjkjKZe-o>BEO8J!; ze>dQ}QIt&^Ebg+~AUe52$oG%vnDrmFHu2_NsYOzkT3++t1b$77N{5y+)aPo1MP@dhgj=xBR0Mc6!*HUl9FV zTYXIpY(8(;J&@Z+oH}NZ{a;k-aO%^`WuW6AT<4}0qmK85Z}pzOe3t!JQ%B%|md{EC z9qS%3z$kmkfOkWKXB=zO;_^4I-26FduCC4FJtMyFIkEcD3|Dn$tz{#7aWAG0)hTPu zooAKw;1_3kWV&YF>>{f}zPX2UYW8Y_JwEOX)2=xq`2w%!hYxc+e9R>0|JI>|kHLe6 z_p`Of^=-_-)(gVMo3Y%fT<%q0?sP79urF7sw=vg``=s{3>;0YDx4xRayrkCB)mp_` zC4tk%Vr`ycZ57%mK}Y%3Fq8HwEsQ-bo)69`$TD78uzwM;~i@?4(l$}jk;vq z$3@RgPfzbpSo2ed>wCXE{$llwliz3UH}b`n)QY^J@ERLs8kn&#K6GR6uyBKg`A^jM z^w}}BS)|>oFg2V0sw#^eO;#Ohc01_F4uz=ke>f95)w{|`9y825UPCXlgMQz*B!%Vw zg)QyK9iq2#??Qvi)03L^c(LQl!YL6}V>)i}${HusTrKn{}B3{ z1I_mjJh+o5DGD8UozQ_Vd^l1%`qU>wtANXACJ$K>6g=h^2YSoD4|jbE8js;?ypgYQ zS6t&c!Gy;3C>rw*CPLBxkv9rO<_{N&z@jE?C$fz;c`=Y zn2?urn2@xzNw5UtdbQQC$ts>T*>U8L4Ucimj2740^>IRt9 z+UD&W1fvm(^0%E@()_L6nTuaH-RrYty-r3>yLma`p&xsyL)BN)!Tko16&GKCBU%x% z)d^W{JpvnC84GZh3~siYRnM%n%w8>k@6Wsde+r%C^7ekOWZj5+PbQ_;-XCn>{m-8f zfulF1gb#Rq@7`1^-9~$N@9mWMcO_Db)R)#G4pzIqr1S}^ZMSXmiJ*yJA3m|Yr~~=2 z%4<(Gc}sBQa>Cc}2fnP;p{ylXHX2A)?MZ8ppLq%X5+eR1A#6@c{QR1s>91baIv?8T zd%IR1<3BY&<^TJKrn=G9!}oMf{XIKM7Q;Pdo7}@4PrivW9G9#gYnFd!ywS<_XR6RK z+}hit4KV(pu8#Uh7#kDDKh_`RsH|z_Jt1HKNegn<{lC^?$elcr!iJaMmN5ituFSwjpMaNIf`OTgQgNLQ< z>84v~FnGn+gxFf0|Gqi4(xJWK$4~teqD_M7VI02n|*e5yE#8AueAMalGCP@t%NG z?`XRZ%|g3%Ft(jG$Yok#%4Yy22)?Y$4KteV^ zMz&g}jBMe=2_aR->%4&kB9Ce+YN85k=X2OPve-uP%{H=pY(E)w9P%>2e`m_E=bCrg ze%d$Dph?b^cOQ-~Olfp^a>t1i6Lh1i=6JLoQoWKaW|zxi_Wr<|UzernEvc2`kw0Wx zV0h^DDfH~BKX<#w0o!2`w#y}KZD_W0uM)QUge|AG0JzI#aD62OvRG0eiwWFx8C*&7 z>vpk4wl9^CN9__N&~y_xbVsu2&f}Zz(sI(ZYi506WQI-j8Kq?-N0GAr~$^ zz81PvpR$_o4sR_K>v;g&`ON%(1CGAMsx8XVRbtP$`z5+cmJx%~T}Nmv*fMikcIuHT zv5P&P7Z}fYyFWB$XrtHtJA9p&QqyPF9^FA1q;PX7D%?6fSe*6VdD`a3qu)+Wh<CRx+~@u=XIQ}Gzs4NG+!~`CIRhKAEDQeGdmfgDz6Crx82ir5zxJf? zlV492jOrY}X}X91mMzCUw)DOcP&3+i(AfzSjwXW?3Zh6Q9{qwgxEK?5m7%|@K zk1eiOpIg;gxo7c{Wqqm+%*uIY`SIn9gL9)+rYvZ;A)&#NCn>Tc_(w9-C+1D6AD24W zE9UDP>niIXhMilX2^C$BTz5D=6NN}nvpOD&{Rrxs9MnHn99Cs_JlL>*s5*Y)j)(1# z_<;{T)t;Q1KYe@GTSuR^Z{BlXu-1Z%@K^I^_%+^U@Mwom!2MHoD%CKy9|m1-NjuMS zO?#~xV-H9B{IMzeyN!9DdU+YOkM8XdxFp9YDu$x8*7wf_;K9X#FS%YSaCYg@v?9P+ z<;PD9JgwG|b92ML42zDB-Vk%&rDf=&JC>`iJl%gIZ}qL&XWIYvZ>@XrN3O#6mAN%q3o(gnJEm+3F`!nLg{7jBkhjc_SiBb=s<-{*AY?tl$OYo~Q|-JE!Lu}e%Oq;>kU2TTLTKXg4-jwXza zIE*=!NjbV6Y{=pJZXq|4fpF{^LKq{NzewhqZ?Iy}cPynx z$7sF@X}$sLbz=!@c`6o~Z#>g7>7R@)l%s1e=mzn>B<{O{p8?fDXiNq|LZ@ijs0QW!> z0+-w5SFGppL;LP$pQrFYGT(skK_>r2^GzW04KN>iM*S04HuFfYE&8*?a@MC%zNB zXub(yz5(MueF}hp)qw(E=oDPvLytv}2~Ldt)*UA6!CNtBS`*gXPQPN6 zAiL9#_8X-_-<$4E+0ulQRuRHzzX@T#(W?eFtL6*^@MVU6~i zfOFrZxX;}#6*s#QMEgyNxo`BI2+@6kdH;pGCn{D5viC%^-&E);P~EB4G~iSi=OSII z(SGC4e(O%PrRiSOBy`b!8tf%cnp z7P6vq-=v$}l~V*Wv)Qo<4h1$locl(p&~x9qQ`YoCam|>JMf;5p`>i|GispJfh;T*w zjW7GHJ7q(&y^%rKqW#93{U$x%xn)VECZjAfx>zko_h-g1-yz-;^R_vG4oi+&87>zDa@SF#b2%Z$i#}lWvOlJCOkAZi=iL z1nswq*l*INNA3m!80|M6?KkO>TORZNDcWy*+i%jY$3vJ{FWm&ue&fl0=i^>9=fK`_$-~^xm#|Ush z_I*=A76mxI1vudQVFBTb0-O*695DV^Kp3L{CzJpOoIedJku$sSKmkq(0S+*KK20z) zn;om*P++q|0ge&ifb16&LKX!$z63bn`d={NiUJ&;0vxdYnn~EA0LQBU2XMbPCUCjE zepYRP0$ha!I5oH638gP2`In;YSm!`wLBjS1vuUwKBZes09=iE z|4klncIkoR-;^2uRx}C6P@^*maPFqaszFeItEd160j(KN0HXlMvj7KpwV3%gD8LCJ zzyae*BS;9N04In5r%oAGP9>1B+22`rP~q`mzAcZ0Pij^#?q8QFTT&LCj^FH1`LJ2@ z;M+akE8Oho3UC^fVHJG;bZEY@)%?Z%clI%;m6a3v>2rujx>FYIFD7Z%|2;V&s;m7LQ_rUQOPl)oNO9QNJQOC(d1}?JH{byNafrR!VYpmuUOv z&X$Y*Qq_xGuB)RH>uENqXTrlI2aQ%&zo}mHvx-p-%SaeC!qj?=x9wbUh`P}x)#|=U zo2_fOY1=I<@O*JCV(;uD0t;|}TOZ#)1>C~Fe@fSakb)Svkd*`lxC@_#-OZbJ*2AO0 zfuYf#w;oz@Z<2@h0BN(Yzy46OEUM<~8Ft72{wi7!ld79DHQ#QR-`(BUDmyfx?jIbm z2e!oFaG=5X7Xa9B7Xgd{9KQk_&^6X3bWwm4LVyFtCiwp84Bd!62gYXP&d3u)fCI>; z`2K0c+Ebkl-OLNPv6`RvPwCsJK(?k8St3w?<6D3OzGe#vUlibk5a57uEqwpofCjP=MoAfCIP^}3*eda1}nv`K{*8L~{vHrdj#s>v@ySaOND^~uo=iA`SH)iG=5N^Y| z|HL6Y&S!$}sL_D|6N4!)SFJ`OYPAYANfas;$7LoxOG=8#NXk!2+L)fCHQPQZGbyRg z1~%mQwVun@EsI?}+$)K4PZN1)x$Dqc5XI4g)-hU;FW*{FvU<6+pf>@%(<=+FyaDI7 ztolzbXTPBEK$>)85fv*Voh~7rEg_vnNXN<}y8T^Crl<|tSTS*&OA z&017G1?!Dd&n^FhM^AyZtv5;EP~<-S@)kfq3!PO;y=NgegCOKhGG2LIM1P8 zF+aIs_`gx!0Jj}${x=7=V$~me5y6>nWab-Wbsaa5~Q8AGxH6|_S7L{apoIe z${TQX@grPu<{O{N8?fz_MA)M8#;fuMaNTMVxZLhOtG2+IZxvSFfN*an|3&3ZAmt4( z_jy7B8E3u;sJsE{e*Fk)RNf}1=1dn>}5yD7425S;l|(V1@mc;E^G7?n4kl{dia!OTBJn8@V6=(`EvyU{HV2;{IEB#_Z}6UcX? zkKh6IBnN^TeK$dUH!1MR%>55}z}ck-`fkejZqiM0YF!fG+)a^HgP`wLPT%eO_iuMZ zlwRFP_!CdTPt8`qpTqP|w)ZH#_NMet-Syy4Z~CV`C*V&i{gYQJr8lG+{nJP{_~T3e z6fy?>B+)-j`%LLgZ%+R-w+;N+M*p2@{vBl)*HnnLM4mi%vdGlbH63-9t!dSBq*^538ElfGsK zrLVPs{;7HZrElOz|71Ry(zkG;e`?y4(zj|&|J3#^rElAm{;7Ly_+v%?bQQJHf0#~nK0(| z@)fJ~*u8x8+ZewM2+tZq2&3O7l-~xNXQvR(=(h>!w*l)pEeLD$+XVL80CzMq|4JTi zSyDo0dFZz(>9+yyd93qKoae^p6Q;CY0f6TRlH~*aHh%p!pt~TG(8aWwpS0OuQYNmF zIE3wJLvxWeVT^v8AbuM_Ud+^gOv|5Dmsp6XrP33P7e3+%VzG;_I9x1V>MK_3Y9Sse z&aIZ#>{9lE!YcEYpx?&$Z9sNO0SRC9+xYg|fbX(CgfIGSLilaKc=;j182vV({5Iem zQ?o?Q?7{>6HYNNvz`SxM!OU!Stlp9Wn;rUXjNb-iSKTFK(Qo6+Zv(DtS`)75xAEz> z0o&N6gf04Qy!vebH|{lo%WdVeY76w+D(trb;dM;@i+-Cxej8w3f0P6=`fURGZGbwy z4nZwXw~~n$^btHhVXD*~oLzdL-=?hJ1_8cVh4AKXimVz0{kC%YZD6x_ zOOA|JNREspm-@Fb??2)^Hy-D?(N{_#e*ZX2;uq(+@jcItzSR$mw>2b;ah@Ab^W3rhD0)IZ?&>=LO*>D@kn=Sq#bROFy`yH$)iEud}gTT zXlB(Q`Fx(v{>%A=!-Ik&TH9RkO+7TSa8EDq#6&B1)twZztg?qAYY)J*9pQIwKpNPH(8n4HzdT5XPvz38lRO=ac#+a%LAEsJ$tn zy#eMwClk!fX2&Wx6xi%gdtUAfxssp!No+FPIS2sJ#h3)(wEC z&MOr-yYxWqOW{MLz^*49g%yxlb0+$ibV ztj&X}nS|_DQ#X#bNpd_{@XjFY{=Tf&2To)qea$>@Gw`!{tbG$|{{>ID8z~BsH2v2~ zn*JRrz`v_fh-S~Fh9O1GPFuqfrFh(dd zK;)vZGwse?{JQC0pC#*cGIH9@%Lxzt*i#+u-_&$)hgWVyVshP-UVx#V&bdO!qU*+| z>qalZ!1sPD!WUgPzFjv*c)}DI=fbl9(xez&Hz8a%Dd)UzC30pL9_YGN&~>9XI|%28 z%>55$vttz;3T$@hx)r%@j_`yjkbQK91Tea8e7J6o@PsLFePT?wqU*+&>*fegm;&3U zp@c2EZoIi}jzxDFpZ!JPqU$!}?f%f1p^aYm@9=eAN==_xdvph7_`Gnq+_XZjn>1&5 z!Q{W_x(VRAIl>dBuu8m)C4r2tn?SCcBRpXWs9!VppV4&_)OC{r&!0$QTOM$B>4C1B zGOn9+Q!IE)cyl*JRt6zPG#{WjwjYro_dgS)cRuaGHy7BG0(OnPN1m9~D z#^}27HXYO2yi4IbuAkFsj6(a>VZxzkZ1HTyZ2{BqTn zpA}!Xud{KP&ThkbhOy@@y1AQpnwXe;k7{sax3TxrgO8V7KKyyg4s$7B4@9o&8a1od?7Up4dnSF6Rl>R%7Z-KY{f`nzaME2DJL zN&kVZ59pX=wlM3y|G3J6zoO=~6!$Y(v~6~+3*nzaC%L@6-z!-+;@*=<>9zL<8+iZo zXGEa#XY-8lMsIXFl;OuL62xe{@jKm(uKs}VCu>3&jW;2TH@f@-&Y#y1&S<;|WxUbl z2(bRbs{c68K62kyukb6IINgm>p^Y~w?*EwcPc+^->kcYBKFqh}k?={)>c#!*GG$B3 zqSNu49V#C-YaV>N$9pA=H|d`E^#NH)xO<{v#UC1P_YV$$`=_Ge9N_|%kMG?GU^L$N zHQsb6BWe?&i^iJ}#v3qJQ74SicoW2U1IVgl2xLYAK;z9fD0-)6k9`)_55yHR-hiyy zIYJhVH|48pKgkBSQj)OI(y@_7eZm)wHzABSV5~WpFh=7|DB}${YccmfigRYSozQqw z!gvGBl?IV;W;Q!k!GXqGf4!k*Syau}Gw%2w`0_RWY;C)PA5RwS&Yf_`^^RJ95w$&a zU>Z~#o|46QiDWUhrWfPN2MAd--uN=!fNK?||AEFEpT--o)n)uwG~ReM-T-b@=KU*f zbDvdPl;f8Fy7;v*v8&_s3u@|{{}Q+SEh7e}yN=LUuw~}7?9?MwVi$WnFEHlccmu+E zO#X|;n?S}JVAf~eze3|pK;sQiSN}+o2zde)`;I?8YO2(urgYB(fExys3Y=Yfpz)@x z@dmt&ZW7+yO_5cDpz&5w;|&0t*b=~Kyzy+j0bbM91TPwILKttrxW*U47>zfcj5qGt zF`Uhti3wypJLc!?m}|XNp^R!V{SR)3U$F|1-QoWwD!@12MG^Zh=1+3cSU)u1*lPab z{yX~^@Nc~VVe^J0iqU!#%6bFN7K;gIwBCfY-hg$Te+g@O8Wy_-tkB>wwB8u&4RF^R zT`F#N1y~{6_@-&mo2In&2Dt0rBDlGGqGAOgdr!nUZ)DCJ0B+cx07mPLU+WF%He%I( z93|P!p}SLmhrZ6VjNCv(t(0CBiAzg*mXs88DJefGX=7%R)@=Kv%%r3`aoqTQ_MrA7 zliXy={es9t!@ZKNP*B7}cS7maO`Z!&s6w`vrvuN+GY3 zK|UveoFajImOx&`hAc^(E(MUwCmaV~=k~KI(vIeJ~W41b9z8B%-@-4!*-n@8q{bJ(g#fg>T3O)`J%6bFN zEuNIfnO%6G^`?aN2AEsA5zNeH$0|4!*z9o58#Ct($hO)}$fEVem-Pl*ZN3w(Xua`i zy#d=cLkL^6-gvd%0B+mk1TMG1&#EoZdaLl9Hy~`w-bAdvA~lfQFKuKA`*8FfERICH!FidBI> zH1NEai*Kp8*;OE# zZ%WR41Khos_usgCqGClLdrw63jhJr$*v*E-Gn#Mwnr}e2_cB5k=e-GGz5(Mt9|&Wd z_a=z>29W!C5y*@Tz^Y3WkO9zqqu2{@|HFhVns0oYZxFfe281t~Z$g-Fz8tf##bM<{M!4bRw9U&5l)YD6rX~`No)UKz1-Q{}|0TzRWk^ z>h+mKEShh8ns2~%=s?02%{N}nH-J0rAc4zm^0R6SG~X(0z5!uxCjUkAO(63PFpH;> zKt}UTK=Tby`!M%E(0mizd;{RV%=}|{z}ck-ns3UQZ@}9xm8>P)O_5cDp!rr&^9=xx zv?PGheB;@C1H1vz1TUIzLYQyBc+^Y67|l1H%s1|N8=Py7=}RCNKi6b$dU)QZ(0LnI ze5*QgmLi$A{_ZP@JWY*`=*Y5 zzxk=TYbVPo>QQ={yHQ z7gbW{2V%BfbII$T&h4 z#W%jhH{cre58;a98=vAEu$|SLuto8WSMd$t&fZ1fa(n!&+5*M53X5+*cn*{QqWC6| z_y(AxCy+o!@l8PS4N%X$Pf*MAu-Fv`K6I+|L#F_IKGXk~2b^7cp!lY&_y)WeB#{8; zZi=iL1jVcllaCx2!ivrWjzUG zJP3+U4>)0)Sv7lxcimlD1EeQx{q=^LWl=R>&$#1%;LF$ev$gFGemq&QJ9ok%*E?$c zMb!4xfoX7($#vbTQAW$R5VG99zG8JByRVO~8{@hG;TUy77+p7^TsPpnavb4|L*9gR z-GKEfrvHJio4~Fc;9lcQLR%hgcHM`ro06^@;EpZH|5mK*WABORx)Ik60IxM6@rjsS1|4kU9>n4cn29V>s5Xel`$Er&dX#b<@#<*@kc4I6di>@2r zt{d>(^qKHQ*G&l54H$1>-oHWDO{gJn!1<5!B$kVFW)~jlx+&qh0p@MY{SRidV-*|< zY&C0=25@(+CUCiJeO7IO zu3Lp&Hz2&5$$!yx6UcP~%zMQokkNG$&~*dU`Y*>jr=iGyXTaZalkgfcMB)62IuW z3E{c{1JMbGoawTQj5j|gbGNpYWPRVr?F)mI_h@oVEehkC{Q=r)sAL-?i^u^rW^03 zn-kTNhJ2whfy^}jS=}fFn*V6J6`5{Mlw<);okhr^>Bgt&CS8Kl9umH2y76tgN$cu# z=KaUw1{S;iXe6Rm%Vv0IN@jRxNM?AalNsI{hQ!4i7u3`@H`jl#(o3IJd!&gxwA^** z+Jj1dE1s?|qVY*2a%JKG~Fs>y3siU5WdRf zzi7G%V7k#Q55Rogg{%^2x(Q^u(MRxr`oKtjHVlprW<|a26*p!5WHx*@ol=%T@PTK zbAm8N(~T$7jayd3am9Ue0vXF{<;rTV?WRu|G{P!>q#sZ=gclV&~{V8c7t$!uSPI4n;om*P++q|+l{f^fb56ige=-_eA#Zm_2W^( z6_4eWJC+|NF$tGTOhOyl(EOxN*rM&mtL+AGKL-)G+{Qkuwm{ph!nPX_{+G#r(RLHa zb_2}+*^)p;+f6{*4N!mGLQtdaHaRta`u483jy`SQyyw1Ptpyq3ujcbM*G+oE|GPn{ zz}ck-+HT6)ZhDk4HJk9}Zi=iL1Z}s9+HL??^q2rf+l^=24e+XVCwS3z6T)@_#%enV zW3=6Pvfa3)pFU-*!OVZe(vR2DuR3L{Ifg_qx3#ZW>BnyEqwdD&Za`QolMqJTO(@+B zI9F;-IHT?+r0xc+D{myMQFjwqcLUs2s+NkIUHYN!rljr$xOHa`+}u4;vC5CVC!+2~ zbT$CS*}}<6CzFzD7xeFY0bW=x)H+gqeR+T*E@$ZQzp*2lq`K{eJUPb5~__H{fi# zki>Fv&g{YibvGq+H^5xu9l^|OcC3O!fz1weH%4~@vSx0CEb4B2>2AQa)>gt5bvHhB zH(+ZnB5bke!E4V08cG(7|H|#`vuX>}-72iR0pU7HBz#eK6G(Rh%=H=(%&5BwsJj8` z`pXGw)ZGNv-2iw))lz}8OAplDl-1pUccW0ko4YBpY7o@jDyq8yV9RR+FzRkR>u!Lz zi4DPvx|IufBs|reJ6|$3iNh!_x4tdFuT8x&Ku*r0pS)&B!6Uuo5&MhqoXLR0#bl!k< zs~Ex>oi~AVE)Z`xT8j|K;Z zMt|OVXvw`v9@+z>{k{JBL(Q_Nny+Wr9sm2QXhBS>ZqC$vyIp>FcVDaQ(1eovpeX=u zdzAo2=Z#5 zosdQ6jc?}-_;xx>_@eVBg!2ZByO`p{I&Z+X#|Od|oi|>cH-PKZpTNbg z(u}wJLt}7+<61wuFU(7@_@5T4|Lv?b>4t??*J0u+)a^HgP`+PQRfW+_empw(Rt(9 zc>}!t82=lcHzAxiVBCKJ31M{JcyivjxieHJ1DN-(Fn8uHcjk(3hLrKZK_r&BO@3B` zsT>!dP<*47RrYYCDA6cR5eS4m5(!}x--Ht1fU{>E!WqRkA;mXfJ$MmejpCcY;v3-h zV*GD;xYAaZ-9H~ZzQ_8d!k}BAbU^5VQ*yE8vq{0%s)l(jbHH%=z2FH;fvy% z5aJs!7B41@QG63bd;`cn^5);L>JkN902JRC@eRoOGV^axeB)bu1HOKH$nt>Vn-Jm~ zFdnH#7^C?1b^KWW!?#cLo0jOCwfItm9be)ba1NMUB4>8tf#RDI;u~Ndl}j+A_!gfY zaKbjTYW588y1TT@G;K!2%yK^TTa=oM0Uqo$B9SFT8k*DMU zZi&Rgu%>|Qm=1(2if??0Z@~38X8sL|Z+wbxz;3*S9K__m*u3Md zdH0)SXN#5WY#k`*k%kzMKt}ORK=BPwkB=az<#|}_t2S05YOM-2Nfas;$7LlwOGQj}_3Wj?T>rsJFMaR0)vMxHERQ=&xo3(zbgof;ha3)z1`pM7 zwLdy{x~i&Hq|W+_N!s;)Pfm#HYQM$Qv#Ea5Rtp^F?6BT%ZEgKcwbz)zEly|oXP9pp z{q95GE@@P?nwDGCuZYfxbC+uSikkhdqG_jgdFJnholi z@G!|iqt(@Ks@MFiVpPL25=Ml$v_b_)w^hr|zj@Ts=o z;u`=DW%~c}fU`>v6yKB;-+=c-F$r+)rl?rcgB{twMAPHV&M$vo{Ody%-T2Pw|9SWv zf0nlIQr-RUFSL#>eEaA9-EF_vz3TJZgG%=vYmz!ddfQ*`bxMKW_n^4#zb}c8_TRen zPS(S&7KYTCUHzfGA@Y_i)+;27b!&RD4ofD0QGDZBd;`3bEC^l{--Hm~fbnGJ{bLm0 zcoN^ZNgd?QQ=XGR#-xtdq>d}U8Bxa5nEU_SM!#Z(AiL53OB8}{zKbIEUCf{4q_KWz zzOmK(#r=2oF{qW56Z+|Mh)23pBkgFLgfU;oO&)FiVdFDHHAge62Fd61boO7)FB~2e z9MRh5f^X`fk%fDDc_$`XxvTD^ zbztz$Mh^@7?CTia%RGB-uh;!UC+%y}koT!?0C@IA0vP2te&siyJBM}ui8CY8&7r$f ze}}%#l$Wbk0}-`Sc0!j%pU}lzlAO?Gk`uZ(Zu~xbQ2UWdZZhS5LFA#~UJ1_TT2t(^ zxrJ+)v$<}3pUp+7m+NfqO|w>816KcCH9F|d&9LQIx^ba1}`2XSJ~K^%~s@4_s=<0eYye2?>);5%w`V8Fy+I(FfP?+R(` zz6cOc5R23O#NlFbUq7*0R}1k-ac;G&W|y)T6jqtHB-_oZXI5HfuNKY3RFel(QROI z+t~Z*!N*H3AO1XLhq)g$w@S=qmFLt5>)^E62O?K>jha{tbd0-GI9ePgD+0oi5k2w9Zh_>$j%>+&UpE6Q(t z%5T7S#Y@5#t-g+oz=i{O?#~xV-H9B{IMze zyN!9DdU+YOkM8XdxFp9YDu$x8*7whbBZ}bSz~f$*3Y=Yfp!}w+{00HOZW!Us-4t0h z2u^*g=+rj=y#6!+jPe`L@*CiduSM{p{3e9_28=gG5ymLL@g%=-GYv@UHa#JbG1Cx0 zv*#*s#+30Eoc|`I0tbZu!1-^6U#jRvG=I6_)@Gp;IN-bu=f64i+|qG(+swQ^*+MFC zz4W|`&~07WlxvJ1irIa#Q{Q7W?f8*uNy`EPLf?0=)c0qz8x|0bXU2Y`3s{5PNM zOtaIwOqyQ*lV1f6=aqzeTJ))#=d9ynq|44fZA; z)D1AHwawc%D0-)6k9`)_55yHx;DGD_H?l^k35qy@zn9ThxtndZN8mL z_l)3PuiA$4s=xukn`Vw@F^z$MFmbE1r9Ku!TE0~hkJ||@AbzP*Q)|5aDX}q z=fC|A=f8mokj&hFlxG5<0(WcnnfAZ^TkBr@k*jgaDsaF%MNH}sRN#1@{|121;rzEX zLF*q`?tOKARSTXKIKX?sg2XQ>a6%|>z&I7>zqvZye)x9WHq(xac~amQ_uHB3NFM~H zG4tPW;~N$oAH5;wzDvu{M|Uh&U3t3yM&4@WHa^CHbEeu*CX{jdV6t#<2i_=lA?k-& z7G7RSl>2#+hq}8qOtQG9%AObE%9>zzJo*(W^AD&SLKW;t)6ihrmg3XMZE1Ee|)l8my3Th(q8gl^AG7N%zFd z;|OlE+`}76FVy;P^A(oT;`n-D_5aE*fxr8*t84JDTzJ zrGzoQfXe$2IA^LQ4f)0g0-2ElSapd4b8+wm)S?$qr3>(_K7=e9aC{nY(k1xzKEf9b zIKB-yXn_bQWf|=RuSOtdyn;i~;D;fgl zOj*-u#QlzhEDnL=!+>+9TG3o{VT23ir{?b5(h-Ni@nyg{Q#Lf)Jf{DTL*RHb;H2j} z51IQP+-5(kw!k596&eC3%^4ms`7au90vK@8md6t#vI64}IDrf}=@I->X8w)5zBK#J zKMsLYY6zSZ__KdVY|8`AEj$Xw;k zlrnznN}`zC-dC*rW4HJ5-BU_swM3w>rGW51dkJAw-h@)#fb)A+{l^(ZCVS6G_8M7Z z5fv+YjVxXA8d7ScJ+p$2g%Y!9+lj6>1W?WY>ji_I_BB8 zS$u1pB8~Dr`zHL*H@Y4H)*oh)n3m^Yu?xToIaoOKt!U_*6!*usrQ&9ne-*-w${VA+ zL3Do-6WrWAQL*Zey@;UlMwB-I{F!zCiL+Gq?K8-^hZFtmUY#LeF|L;fu~U>fhBTg7apj*DWSXpX0_`CGqc&T3JwJ}J5=5n;?rdE-lY1Fo9Ugexj< zd@66iR_h^Qi^?0X${WC~WKZC7n@X(O0+qK4D{nx!GL!$J@+Oe-2AHd8lR!r0O+e)h zQ0q=0sO1@0?2ZR2Z%Qj~0KDqsQh~Ee4^-ZiRo;NN-ax{eyD7425LDhOs=S#{Ci+JS zU{v0CR^9-wfg!<*%9{|%8!$Fx=KrJe#*^~qEm7W9NPMBz(7rP*T*9oQ939j%~ zgEBGcMDTK(`-&BR?B+fSZ;bE;giYfKVHDnk65fDwO-;fXXS@kH;|*Awg%Z{%ya_zx z4RF_bP%3VA@rS}&moal>kHBeHYOWg)xhTvg@%Hm=e_p*hZtc;{L+nq_T>D5*dub&q zB4yw$cyNeo*#mC#egrpnPgJb-WABM5yb<9I09za+fN{nfzcb!|Zry5xE(&i#2yei+ z9&`SQ!kZw%8$hmqn*=iB0I=#31snhr-WcHx$TqYgWKnqITX+M$jaCr8D7*g*Qfc1G3F52w4=~_!8cL zYx7BjD++IX3U9!+#Wlhfg*RS>H-Ou+C4tNB?XzkN6y7QO#X|)n?S-FV7B>} z1TqS50t#<{x{W_Ujl$dH)con&yWTqbw0-lQ`+~I=WQ4z(Kf|x_HiJhyd;;#Ds-v{< z2Eg0iC>1!n^g!WFS>X+M+d2~7+)a^HgP`zMQQ-{$x8F_xqsXGP$l@pQgjPyCA^MuX zi5+wQ5rsD)gg0Q^aSRDz6yA6e-nfAb>qMt)0vQ9DpFrkXZ#5~CE=>P}+u>KN0%Uji z(RyR7Hy~`kl0-3DZ$epbz}ev|;f&Uskk%Wp?lzjRM(a&r>kV*s2RTdnhKM}e>nmry)saqz@ePUqC9|a%A3kbxo~K1Q0nIyx)FU2jLpcMktc}t z29SF(^&cYwu<8;8L;$qj80!tlx;17N;Bga!0=?bby}f1QkS7My3j~~Su8_v=ivaNi zu{hmN94;33^%JXgwGfXK=T^&VcByE_X11GE&#bh}UM-r5f9g4#pF$_OyuIHmSvTU| zlS%2d_Xitz|MO==Am7#-@a;X1@I~uQ26H-W4-!0g4me~i|ffYuwJ9y)}q8ECx;ZoL8U zVVR`@XO|vmy(w$G0dMb)gg19nWYr*Oy;anD1Hj_-1Tb1}JX>#o*XKWi7iA%SWFdcv zS-47K7Pg~6$nq5v#%R6qWW8}ALm>N|N61dMAHE&8&9viUo*;9zH#5p)WD^3J+u~QO z1pJ|U=jD||xt|w#sJm-J@9vr^yF=>A>fPD!-MjNCXYcO2C}Q8m{7Ft4>xbqWTg_kG ze`gt!*y& zrXCtuxTlwQVxpD1>Q0JUR@no5dC@4zS|NMg6bJ__B!p3W6H0pn&ZFKF&ZxZ!sl5T~ zF=E16o`=Pr^<5zk3tu;7UN;5YzcKxPdAQjnV1;nw>!!@>rht1~XA<4qJyEd=ki93O z_C~Zf03687KSk}0U+oR(1~L9O4ton#e=TT?fuD4s+aNopl$JwCn$#oDkL3-k0z5i{$d1 zU)*~0;??zwiJKQER*Eb5I7le%4LDD7Es-<3@IdWN3GEFqPd-F2Gn*Z&;80+*L+y>x z-hk}XDugU*Z+vNQz;zlk{|2=;KD9SsJ3X1iENXAOYHt8HqBeoc?eMc|3)J2!ti1u@ z$VfsMwKsvZH^3bAlwd~fO+f7pP|xa5P^0!Hxb_CXXQz}3oLzdL_NJ`%2E6C6=HK`R zjT-Ft$Tli%RBw;1Rxi@3xkf{aA?Hp< ziPY?kGW~MZmY)@0x39CYjm~bvd4{p#Ztfc==fi_)_1si`r?)oSss`qx8pb5&zU ze;18uWt1*D=|8ab0UeXf7G~Y|A6HrMSJb?g;(jKJw#}|}A^iAneg7JF^WUp^UVV)R zjrrU*`esDuf4%P=OUi5b<)4F%cY5g>pB?<}Md(@a3y0C&tC@uCKc{XSZIk49GWON0 z@dGxj%|H4fe&LYFEm3P9Wmd5;q*RM)I8hxE*+$ex1y+{V_D064)t5FKaX*Ptu}BGM6|B;ZF9erZo9>eg; zl~<`ZR;S*0J8MEz&nQlGSuly};zUb|=$48SmAh8|q(pTe+D)^2f4^|)Jr7<`;5xqgq&M1ss8RZfL)!!W!NphW<*?F7r@}SjlrE*mv{u=x^}6`A zF|n)T^ow@~RSf2btAsqPJlAaKdIRZ%f2R9b=a@gEjzSSVtc`ET;TmK56MNJV}GTNK`E zzXDi@?(t_%q^=*_B`5D@`mm$nH?t=`@_zZ@!>7bEx@SI`zgV^YL`L4MJKr95IywXV z@z!znt@54{3K6GbS*RgsW{P+A{vuC<&!+=KDh2GiD!AaNJ%k`NnVLb zo)qi`t5U(16xx{NRXoXK>PLEMfekZ$Klui@v_M5JX#L1d1l1^$P0a~?JndtjxGI+j z0@=-TOC_ts+JcE-xe`HGTmH}?-=O%~Qqrc0iD1czAVl@n;Uy9+DWWkEx#5yxXGP5WwNU>A&SWz zf5{z?-Oc#_^2jQ&USM)puH+8B!Jg+Nx^caz$a=vo#ewKPyAp|(6w#R6@sr%a{RV>(#zbdb?j>2wgj!KtmKegh0{a3;&6t#Xh2G%1tQ z-x#o%We91n=5F#bnzq9sK%=6L+%ctHQ$ zZe~rv+Z%E(C6#L%pe>g_C|_RDVdrqK89u3eDayGxbbyYpFE+RkaN@yO^)!&UVI z{GM!0O)a|L>y}HcrCO_}jhwYc-7lo2wdi);k5zOmsDq;$sy13`ps`D%lIXRsc#re$ zG_{(W&nBHsO8QjQYN!9HPaRe_i&(hr(#>vPEmZBtZd_>OtXXYVRJVo;)ejp~GJ9pA zquIx@mde(8n_73R+MROgU~TO;B4lWTor_hY>!rDVsoHnNfxf!8n(f*=&p$~#BhO;y zc|*_Hx~>aF3(k&e-Qbn=>6BRuX1oqb4Qqe5-;`sWI-JYN`}|wgm#Mvu+6~QUxwpmJ zfg#8HM!eXz*|Os{`=a~3?ix0l4zm`8*{te0en5x+u8zxi;ua`cQjpj)^Ppy95qt#t zIULcr;@{;WQ~${OE==xS!gr}&k?+FX^!=s2%cyA&5_NTSW+u&@wY8P&BDlg-sQ&z9 zL7OA*J^d#peLcHj?6GN|)h)JCRaD&e0c`Fs0bE5$UrZJvd9d-9;D7T=Q1rmgZpq^0 zMlVj84as-0D)An03BLeUMbvKD!jvpon9fNSrt`9eY14|5E(FU9{)wTH6II=)gSRc7 za7L|;jb%oyo;2}ns^7F#@|+#k`>n05zp3^bGq}a+EdLDiEu-Ik=-VZYs#eo-i~1GO zIdSe%ZC_Ed-&Hj2v{I6*yF}YRceY&gm#SXmatP#DPqRTi6CNh{XtlcfP4$|eRg7v_ zM#88Orq*k`ZRd(Z)QvW&R`*TXY+b`m+pb3Zq`j)Dvn;G1h#Tt+sCR2r;mVV{a`yE( z{Lih;&t9A!|GDll`}k}K{&Oy_uORrBN>&;cg1oTc&FdAD z2NpgY8l5`g$<776Z`n?Z8+2l7d}`}x%^Oc2FM2%Cthv8`=lDOnX>F?=HQg*B z@b$}H`G36JTD0=Rt>LPw;}=Y?-fOu{t9Mr(O79Oh432c8;#6SFaHh(^<0fk&$Lf92 z(5Ob;ZoML^x^tyj166;I{Md58M|}^!nN|Avh<(K3|1?)!J2-sW`ixaw9IrlkxoO5y zkwx3Sj+OsVotvCgr}_mIuS)8=Vb$i$Y80azb!A!K8k;ohQVnXEn6x@zyLY*JYE+7z z-?p!sJ1%-!YQ9=E@bbAZJ5i1jt|}(2re`>ZaeCh>O|Rv z`E==pdAlI9kUM|1yq+`Kcg>yaH?9Y+Fbtmw^QP=a4+mG7mHxOuMg%+Z6$|FW$Vp2v z{bB9Mil#qIu1UiNVfpGJgk9bmKvCFCt|^6m?cY*im$~F%!Np6##pIeUlpDR`+*nS& z3+9@<=bBQ;Hw#OITxQ^7uE}GrDcxpom!AKWxy@p(S&m#&%KC3+{-eCSzRdW?T$7(% zlPQ4dMG4ZMy9;qqD(jR%zyg??j)Dn%&y0Kzd8ct@E=qXxS)QX$dQ}40_woGyXE^`o zrlYVZAv!20pb zQdyT7|Co;Qla6v%epr;AGUxxr7o{?P0kG*Mzv;!zF~Qq@7Rf|Eo-dcKJt%kN39z5P zE){l}OAQ`*3UK5J-{s}+MlqQSZPK8Csa-pr5RtM8ii%Xjtt(jOlT`(Lc2yE^Ao|Hp)MBA%w zO-6NaR;L2`jex7CqM+hmMvL)3<$akF_hs^B3@Y+vO1k_0vws;@cfNs5(dBoe{m0E) z-_1Sn_wy59c8+m4HuKOSdl!+FVQ&~@BI5ocs8FWb%>7?^xbYXE_zVc4eh~sBzX*FI zOH^-qiK^Uy?9yjWdt}SgMalA%DOsK_$(E;0Ys+{4UHS`fBsB7f>LU6A zyoPA-P#stMqjRULs%k~g}6Iq(axyf|hZ#{Vylx$NM#QxtPaH{gGE%y0u>+@kaM$CvHo?f-Zt3<@$us-52)%=kDkWSpyhGfb)vR^U;4Ol>)IT% zA;+2?`Xgugu6v2vZbK{e7g1F*#MQ$bc>KRe%-tJ>-W*jpZVdA_(I+^35&Pd&v8Z}=Z4eh`E z&V}>gIm3Q)$q8(JIc%**ea-$V)V25#J>k=EQz6)!^%^n12y&8COocdmvqGuR_wV1H zga-m%QIUbW3ijRZR6DRE3|smV>PDR9C)C$xtiNB7ynjrMGORR0@qbK}GI;2t*gw_{ z{zs}%27fB}Ka&2(M3lkG(dGGX^Omd;D*#Cksx@84R%hP-tj!1kC$9N{CUa_b3bZsm6*#ajC!D+J(M6T)@HLKR_yj<li)2>}EZ+-Y=7`&DuPunn}q1*=okoHc5_CyLXxX zLNB~q#?_eZ=Tja%H#>GU!?Sx!->=jyl=&DC+5Hh!PemAeBr7_FVw(RFj;-Ni03k?s_w zKYPG5&Tn-Ohp9u6F8JJS(FFxO?gu}DWAMJ05VguxKh0# zs7;M;5y-`J>9RK~ELln@S(v=Q6%Mb|DO1xf1hc%Fy=>HYL{hFJ5-IK)drQS#c7}{+ zh5Vcqa@QO9MKE(FUk2Bk^2`V<`-tTG_wO;3{u*n@7WFqrz@L5ePrI*D`g?5YpN=ht zKN<8-$sZ~Gv!m#rGSeyjOI7KgF3+X(ZzjP{hSO$IhSRshPp0~-Ac3U+EvB_=Qnk7+ zg`et7S`gj_=6@AGj)5%%{tiX?skuA1bS(Z!%l<{f;~2h=V^k?q^F8E?$U7h?d$+;k znDQRS0PQ+V{L7?%=9K)QHN%MfZM@#)hz)!;-&iOw&DWgA)24z~mAMC>W4g+J}QCI1^y1^xn+(w|DUuS=|v4Hk0vmWTUvR@ z%e>0|^}#&1JbA8^wiOfq@@SX6g-cC)KYPmz3yP@Dn#lEU#68I`7)Tg@^+XTU4c{F((~L}`($}89Eg}UR-rRq zn-8*#cUpnmj2B81_&c)vs`&RU`?n5HcKJNnr8j!0LE13$zvbm%nDMd?v{*MT6%_5H zX;oV${^ijw`*#2{UY;^u#sQ{_aag`=moiI}oE)s!rx`$dV9XncQp>C;Pq}b9_?2G7?ZDs%XMYuOWevL1}di_n>*`3u&H(#^S`VXFVPb0r1 z9$9jCzR_J#V|D6+Z|~Ebb}P*p6jQtTrEtoL?ZCvpJle&!89B5u_vI<~sie|JPvuacNvxY+EslR9s{7I&NTK%-*&aUFQ zugx@B?h6Merk&r=S#ZxnSr!~MM4AQ9mlXA^(gyyHEDJ6k2&Mm8wo|mU;a;=;HA)+} za296wulM5*TG4jPuC5C68W%n(ZhH7{&U5CZJic*k(V?LI9oF3$HDTks^vPt%#$@S` zjgK=z0{7Z=x|{Fy^u*TuW?|JQC7S%9LIsEV(3gHk7C&C;6-B;8GtP}OcXt>63Erh| z`Y`#btl0wWOH$_g4_12VbDkKVwW4}xf7$!uf45KiJIaxQT5}es*1W;0HT`+6HSHCw zHS5ozYt85o;}uRzx5RDl@n7|-C+|%StXliK1{|(aE^K1jW_7LDE{8t4n8z2qnH+s+>FX;Gb%UO!MQf|I zOujbj%3KTU2jZSO&2 z5ab5ECy@_=3(yZck$?*jHyZ}os;>(95||AOl??;ueq&1IT>27(*)ZSPFswoS|07?d z_!?CDwu|Sxa?W=H6}z9NOs?B`sw~%qBNNk}AL*>u{k1IXoi$Nz)(hni{2k1COB=KB zzsY*xOL&BnFM(NaNpmk)^=JjMUZ6Y(#%#%MaLTDIO5buY>s31I<(|Jl4ltOWb5nGs z7I&NqJo>sf<@(ZiQvUlR;W}KI1g=8D(BMI6-;IMPG#YX3`-?n1Q!XcS_oRO8=e3 zQ(rz$eWCR9!Th)Exo@m|paS_XP#%H#Z`qUuJoROc;oXX^+WIm1ue`Fa^hF2@5~T|g zCS#^Q2;2nm|4w!U%$SA9n5CQp0!rmvdf;Qm%xA{zM!C?DKk762BA78NoiY2Ab1E4> zr@l=7>pn!5|H9FVskbB(9z*3RL+>*{_pOt z(djLGkx*v-H>S5rr?*x=|4A^D341q`Wx{aWVmd}*M}$?D*%9+j{*V((D!6tnvm2&Bu79K__eHa-dk+k>1eQP(W58c9=!rXsrF6aK6 z{0~BIfp!#8du5ld&(oK#ubq`#x=taNu2=s!YC!Jo1nCQKA3`HPs9vHU+H1&pXzvy4 zp}jjiKeV^0JP+-`D8@Q>9G5)3U=;22ZiDvqZ}E9=K4+%)Rln~S9ld8~XG84{x`R4j z`fu94s@v9yVjR`z$69=~v`s3HJ-pVTph~+28>Ss|ySeu6e-EAI0zmFvP6C^8AFM36aVBSA$ zQmzRQB=W0>s07(+bV0HjB}rDJWU?BqEXN24S=5h@{GjrIo&hm{b1x^1buY(_=X*KT z6ug)7UOEG!(e(T!vl~qsJLy8f={MWAuDyS>&|)h!Pu=w$h!q~CzgZ#N=`fQMkMWe{ z#Bc;-8diz^%u$VH+3|wwa;-50h+eD8U+Q*?CY z%)gt4-Dv-x*5YR$jN6Q#S#K&^bkC8STu|Jy$4 z->8xFbcq$5D)u9*iXF#u728+ADzdMEGZziLClbcx_Z zS9sJ%R5Y}f#d%2&67%vnml5}nEGy1g-gt?ho%4logH;(F3hx}bxvXbk&g@Ptrq}v^ z?41Wx6x-78dl;f3Afgfl6DAZ?6vTw#U;^bRA_@v9W(Aa>M^V9qIR}gw5d{@65ey(k zKrvtj6%{jJP6&L}!*n-2boUHlM)~f3Yq=Kh-C6hD^{=klzp7nT`=a*#`W@%qnQ zyXGcK?d%V&2ku{e?Ur6x1EOqM^2YR|Z$9pxdiO%&JI5}MANf1(-qg`Hx_uQeo{9@} z((~w12*6^^J<=j53NajFQj9`$oQB?`bz?|dpu~mmFG(9lMM}t^^m=Okkx6MWaR}jb zmE$x-qK)q4*Do50#GJpVgv?YzwvyHwVQQ6HWnejQ9s$cxuhCgyk7M)ZB?LCF(GZrG z=?4~MUGN)(&AbGz0<&ze8EmnoRQ(5GGpk8TH6>R7n?dKT0pvH2u$k-AmlihDYd-)G z+to;mpu>s*n@fH2;%h&Um(W=l=1z8akpeRjniwKL?GzHWQ-IPtA8;y-SXkEW6kwBG zQ%Rek@KV|E0__wEfBk+6No&j$HZS)fuo-GNI-7%WfZlVJ0O)mX837uOF8B=s=)VD= zA?Mu(4@n8vTrq*2m{Shboh7O}gUBcabncMQKc;$kk%aCMcM+P zG<%J-RAH_HwP?^eTA5R4G2cBx=}Mz?$?gWCsRoX&n5~=^5d&^0iDXG4>3|H4{>Q2J zKTt_pdhHpY^oc3d7tN50h>0->kgFV!!6v7`B*IdaDC>xcFsMC)p*^GM2SDau@ETs# zv~WflfvH<5tqO_Ld+8jdsg4cmt%#_MZ8FM^Ig9sNO7?&WMJA_JQ#FH${~YEa_SctZ)!5$Ywh87xNqbYdA&X76VE3m{!lkLIQGmBtIe(F ztq8kzuUDZF-+Icf74`ZmX)FlpWxPV+xK<7Q_eQEpgPI%2?Qgu>yr+6^tiP3+naAj9 zE=>=v;wv?|Vq2hYzdqVt?SAWsJxj+Xy3Nftnt!pb(?T`d6};u=Czv;VZ+14>pw`{) zI#yrRhIMx?D4t|8ZKmI)+pj;|lMB`}jnRoXRbaW@^w%e+%m!NLG@d3V8Y!=^bZWOi zrSXKQo1Vra?s~TLQOG)Zy|r)6%n^CU|K`;iEQi&~bMt_hr-(B-8(iFP*XC3!@ zGPhGw>-Sq5_icfNxZ56v?vN%)-!RSd6F^+ev=mJ9nmYcMY*%bZTA!w%`mi+t)lmP@ ziNpWb>s+`(;C09fM!beR4t|61`fq6e09#ywkwnz~QOujvu3id ztu>vQLF+VQPOYg(!v<)GnX83S}`u`9!>zIYOS}t*$rciG;zs{Jl%YYtP{iuM*AE z;5P)^iGFk9!vMS^SiU@sSn*Mv@-$e_Y8TP+Wrp*>;Dp)BMjz4pWEqgwHPN6>^tJ47 zRyp%?3|%d^-#h2?&y;(^LT1_P)|R)?H(8o=FuV@xOcpI1-8@HHqb=vCB3odFdvbAwBlaa9^FJ(~Q)j@s_U%63dj7oT z^#03r0&TnlS7N$+k7GSl`7yEDA%RnB7bq%fV8c7C5319*#)6^zMS=C&ML9Nc^q61k zUw1cmH@DwPn=)cX1?))O)ZOOx%XhoyuHhMVw703bkG~`-v038evThw6)tsTOL{Ga5;#t=@C|_6n@pIxzhVhLvLPa=Vo@!TO;XN4U?Wq z#+L&BaW~(p>2Kccql=QFVh^_qN$YgNY@%*9Fy7d#>uRdz!tg<@0Y7M)Dwg zj#z02Yg2feM5Vsnl2dB&A3r?fReq_7Js;IcYV9X$oqx|(TmNumaOk!Rt>PZr&3N(X z!aE-B{4T5jicd^q)Tqnf9#wF-+%ShOWR zEA8p@ZKFNq2lKFlFGqC)tI)#{kV!@&X%!TX7>__Hh9f$YLl@Oul>bP0y--Oe2d(dM z=Re}(5n}QxXL1Ng52*cTq5(;K`j6_^40UX!RnT;uic@&?E@)H4o&264ZYUV(+`w}U zokxcW_TK)Sk-Z_`gWn+bHrc?@q$kD0j?Ug-jjYb3H4uACk-b6c$6=gOi$CiSdsm*l z=~uQuAWyrHRzXJ{BYT(HkSxCZ1nCVOsG(CWJCu|dT_|w&>r@h`LF?z#`)5pAiw`mg zsH+^P!7MNKkS>6tOy#3YadFYD|CJ7Hs+oj|z0-{d_J$&o&Z9`2!e6x~C_Ge$k;38F zgWn(u|2rrg^6l42q%{zQOOe7s>bEAGQj0$z5rtQt!s+)Ca6Y~}kE|lu6_6C~CFtM| zqYt@GlvNlS?ZuY@QAf9A9o?XH-UUvr#m6NC?o|%%V3rTgq*)NSR|&Wm{6DOKQxyL2 zI6>i1Skigb4(IVt$pnw@9>B=sZ~($@5Rd;IJPz5>7wY~C@wgOu9HjnA^?x%-E&kj? zJYIPor>=zt!H6Xsd*26>;fTp9;>i!SWU;hlQIL&G4FeEAsQy=`imiC55oC*pY@yb_ zT2P-gLqa1y*dWNRQpg^J4a8lbAdxf+g6t}R>`Qp3$^TjpIMtED#N#i4vP*azicLD7 z&f{cWG?F0my>l4J9F9Wx4I=ZugUrE##qUUKATW_4nJZ$tJV#Ec#UGZ4%qvgkTLm5b zF|cA_5H{$phry(EFee}fqP&og>Sl|{EEtgI5QPC<+=B}NxYt@?{o zYQ*T|=&Tg`Cg>yxaH6(`vsnbgx&G~OB+-qt)5eiUf7smbSO&c)_^n})2(}y zp!EZ9gtWd!G(&{n5PT^5sY(z)V3TdL3%d-<^ z{J3%^f86kON0W1}D4xsv=Y-m;!@0xsJB?Bx)=%tjTe5!Qn&ZphZO2k4o=SaZ5qB|k z-kVFspB4HlV8c6(31TW-)$dN)qg}c7uaH4qg2I)A3s`*Gf9}6LNYTtY@EW!i-@sx^ z+raXm)CQK0RlI?vS2;JZ+$!0?a_N}iO!>B{Gq-A;{vPF(^~&GtP)*ptk}!85UL_0> ztrBWSa*8d11Hc`$xGG^;u^Cse{Ow``v%l;4F;44J-{8>H3tK!3+xxz#X*nbO{nS0l zrrCa%5>`$(c)YFkp_&t-<{wnrUmM?xb4$lX|2n3Zd$QM$qmN!LPRMJs?L?-hhgPgz zQEN5W!Q$-Vz8UO74@y8by1hxeprFKZe#$H;$z!^D8KgZ>4pqfD6dmh9Y=a)0VoMN| z5Y|fp>me*Pctl!8G%QJQp@gdGOx5(;|05mfOm%gN*_A&L%ns!$-MU9`dT%(Dp!Y+@ zjPwqPCj17`dxnR&YP)Rc9qiGlRQ(IlyTtnwD?slcc9T-|F9xwCI4BXlR{_0K)h~Ov zbOwMiF7^H+x^%9B>KCm}U=OWF3Yc!Q@nqyjNG>%b7mD4SJO7a&(jX+SVkF074Y)x| zPtpw#lCx9_O6`miUulO}Gg43{I|N-s_SgWNL|fe^MP`4}v}7N&@J$fnaW@qnc8F+i zyd-FEOcXrsmPE8S?%v4SL8R2I*~@*Ggs-}NogVmA?koSn6pbN)CT$drjhru>4dTgL z`=@z4m0x$>?T&TBU3RLGb(hxN;?~PS+euqnyEv%niOBk{uVQjm-#q?vR=A-DwxrgE zn{sck(PrLP7Dfkd>KU}aU}5%CmHoYT`8L^L^Eyb`x}TEbVq23{5l!w)d#WM7Wny{W zT=}x-VC^Z#3sQdG$uspl{N;Y=h8Jfe9$anR$HV}-hyL{#kPmMt6@Zi?1ur-B<+E!@RILAuK>q`*kLh5Oog6sm1 z0Z4QeCB@qjs>ueo3(SJFA5GbXVd_AFQltb|gk`4={>OUTg<_khbBZlNa6;%W1@s3u z=t#YPF3L<2xB#lgGgad+!=!3;eA=@z-R(?Oc&&*<72e1is_awnEC|Y4x~!;`lh_g*nuzGDfauGR zW7qA-2_z-@QqOxyl-AP4R>E|VR{u+=;>FkK%~9=D3K+IH6m4d334IMasa(jUw!6$_YQH&QnxkvdOvDxTtSB& zhMuj@1Y7s#?Oic&9#rbT-(h5X$UJ*Lr_>g?;FciJplW@URqOHK50`*8KBPUEa8rV6 zfCL4xvTfWY1u;1-+Dn>$dmDFov4&n1hL69`Q%)o@yNDbC5(K5PM?k4zp6YP~tjPDCy)5lne0qYxk%7wT9as2IpLt?K>UEp9N0L`w zYZYmnd)3?W``1S?PMzJ!hm=SkD6W+9#Y=v;?|XR zrHk7GN<{;dkv;fg3qVq{#}d&d2HOzQ9->VQVtm8AyiE+Oe(|?9FQ5{Gtr14m2X|(1xLFQ}nPFiBl4w zD(6AHV0|5*!Nk+zzd9xU3L1w&Q^|T-Q^~hdO(j#SxT(axoJ}R>i}9wC;Gb#y1`Qwm z=-N8gxl5Nu9$9m*{ZJfS1M4!ywjr#fISmJZ=DlU`FA@H#8?XSYJLdrWrK7nC)2VY5 z|I6Sgrqe(d|EqZtrqe>zK}-EBIfBtudV= zb@0DrQLTrbC1JJR;yP6972*G1T_}I)_I`%{Wq*~Z;2+auq_nuuOL^C#PrgP7giq|U733YEwQDy#`L23%)c;o0tdvX ze#mErnn$Z@r2y|Uyap)O)gn*t?-a_apQ%`{3a6u0;1st}ezIXA}c=+DwsV>m} z_&S^-S3bBSvg9I5dT@tc@CkcKd!XQ6rNJGVaqy)q!bW?9i&DWw5P9-gPLV4gQ4lUl zj*DQADPKu@pomh*h(Z<5a25w~J*oF^Ma32Kp`+qiC5mTPO0LQWEh5O$Wd}i)P@2&l zdl+X-Z%u+R zAg=keME2lKYF96XmoS#S&?&WoGnEVD{sEoMzVVbj7&>Sxp9`0Ts{i+Q>-3KvMD{o2 z6uI(2sI187+Lvlmrsu+7j~Uea52A;&m=7LZ`?6pA;^YeA{-;WQ?4sg|`Op!$R)SnV zt^qV{m~f;_(NlQYGK;GJFoBSm?;h2k%haD~MNgr~0j1_27>fL2zJElUm88uwbTfyV ze?T8S%e8uWEB$nlp>1|9wf`FxKc!Oq1d#)&{bx)fS3a^Jgp|Da3HAv3#7a40j#7k> z+z1KcE*Qiru9y!UA!H>G@^smv=h!7g(bEeq+r4Y!vz9HguD6w|}Z<*$repsnCBt*sgFL8EWARE@Bh z3C8?pYqS(26l?Gfk8wNPH}Z8&P@1 zee4~|D{tANzA&KtwerG)orgES+R@)*S@M}1hdZWT@7OuOBp|+Ed(gt!Q#0W!!g;?sTipB7$@AXb@Ytpg;^S0CT;mKs=d(2(*#}O6MIohu5tm?SXiwb;O>f zV-wxxW*g1FSl4Nxn(YeSa=|%VZr-8m3~l}Ee<7X4lYY5%rgJ6eypgJZGJVf1@rC4f zlu=bSqoQ*qM3_xq$S+?Md=OW5t{hi_ydgta@m6sEIE_lpTvHNy-E(JB0(+ zf_dk7GSn$i>8JhXhWkU7)C_feklbA5^DrjRiybivsVqjdE<_=rO<6 zzwU1CZf?JoHf6+&3fPglsk_bXm+yAZUBff#Xm3+5&EO`R~CsGALb{odB4%koh(-`3di z*=$byt=ubh#;zHRIkmG>EK`L5Q*2wqDYG~SlHjx`D|6Yo_PzsZ(JvW((<=?t#C zhkeNpUQ}LjA3MU$%HZbalI@b@U_4WgC{v}BBGWBd1erovNO#|PoKcUCCm8k8NJd75 z1P*?K7*)!Q3NlAs=agBTcOgctZ1uFv;U43LEF%8eK#KKjI@ZFJ>oIEng9*39)ss*a zRa_Mn$6C<�cukN1fDceJnERC&jr7EOPQaX%SKG68F`kDk?`6RVZ)l09JX$9lQu@ zD}%L(IE6AX>b}(kqe7`j_vkqMcky)zTD=lfhF0;=1%)N}4Wd;k(yCDAgaA&N#W@Y4 z)ymQ;)#M^Pv(Kn;T14C_ftp+xn_TD!3N6=X{V16k&`6xj2tg%5P|*2Y7N^ct5MWSc zwH%dIut<^@X%U2=Ra{vGdCybzA5nS5ee4K9D}$iRi6!X9OXR-t^P8?S|P z>xF9sx2Ei8T^$_ykM(t>EMhxx`~+Y8|H+jYVDsQ%w}q3kR_XYp?C~{0SwTTb>w;=42L)~k z3hLFlL$eNX`=;lS9QS;0@*TVWtvvXzEHgujE{}e8#80h$>jPV~HvRUAonYuN zc6#2ahWoqtVGsCbuv6Cr#-^~vyNbU7G%WLfP*gZ`R2S(Ru!-EFk0leW#?gW0{G zuTS@LbJOr}Q?N192pp`Sh&kun{MP4s;hk2szF%JLku`qN*Mxf`-ThbpIFp!rI;2@& zC9Hw+&|G+h@viFD2RV&KIfND#iFt)0LtBDn7h#_6hhX$&ZOZGg-U@! zA!VCQ%a=j(`_%gft;i{K$>`r$e7@YNyG4ukrX8A6g^rR8K%u!-{`xID|CQ5RW_qPX z>v7nmm)K-QT?|hEcx(xPgiFCHngAFnR4@U!`q4J4aVwLHjYs*d3yys^`g`|gTQj{j zP4WKTW}DnC9%ddt^gE1ZrIQ4E!HFPPC^PA%ufkdOp)F!I} zG5J>;4;a?kFV?tlYrg4|+QG@M+V@;N>iOa~O8Xo3P;_*Y=kuBmb;F{DdM=xz>FMbi z^$SU|H#tnnd>rsGjF{bpO5)zOzm05wZNpA)c!-{S{2K!C2=i~{v<7` zf%I96NKavsp37U@D-zX8JVv>iMm2DlPIvN~J@{SI`-kHz4WCZI3<6^q<|^_bXqO;BC!Bx|gHU2Sa2 z#z))7tnlod-!U%Bu3PrWZ?8t3+F!ek=P}o1@k=L8m3xC>v(0>=h{&VIHvqRM?B_q1 z#ZY{cNqnOxIndPh^eyFND9I6fM^dQ_lU`~E@t;xiFHGq@m%pGS=Z_^h5aeDMkzOMT zaa?5qRBD&7)Lw?fK=Ygpq~=UGz@uH{E}ehdj@5?Iq<6lIQro?1S^Yr60#~a zF#N|W(D;c3=BlxgG#689!(}qW*Rta4SV0$i22ABUV0O48s#5zuO&B1L$!g?8?PwmB zAso$*PZVq+St1zCUq+7RH!&(;cZf-tH?$T{=8yw_5${p67l}pB01sUiXi`zj$ILlSuU6qVlFoT%Jx`br5beE8PsPGb!$7dcF zetnsrHz-CYbNtks(~mwmd!cKJ_4(YM2X$cyN$(p&VOpbXsM0(_s%|3T_$R5n1ffcT z`OoxgX{}%Y^zBRRo#Iw}05pwt05n0m;+s`?0QAk;{I!0rj~k!O4-cN%J-Fb>>jvMQ zZBjGmJX4G){DvTHCUj#uOo&Zn z=$b(CYl1RWX^_5dDyQ^JM+qT4qGnl9Go8Kw0O|*kRzvh92{l)OzUbofF+J@fB)%%q z10ZGb>F5jM>(1sBpDA9EhdIg@WtK0}(HA0?etXhx2z|@He@`;>jix0Wpt`|9R@FH@ z#kIT%N7;26)hpq^i@)4TlUL=N1P_RI z9Uu9$;Ei|A>5r*#Ij8j1M$S26nf=f-WUjrgCT(G2*v#xL+d2wNZ7TZ_~gpEJLN zeT#2#sr9rSLo5~c@v&a7r}qL}P60yJE!;;|rOy%63}rDrtwA_H8?GVv`4RD$HQmf5 zzajWp)VYwEOE0(l6J{<`zyX6b>_Pe^Q%cHdGzm_zvMFhq4CB-{$t(sl$Kgn;# zkB6I2^e}(9F**HAtNgr?o8NzEF}qE#DS?NRFJJX(`baI@D&XO}t40=M&nEYpbMt`b zfb(u;w|BUFoPKI#?S!nnQJvnLJaYfx)Hkb!dHPuO%Dj;0r3l+Qa&P(gf!*ki2K3SB zCuuk2Xk4e*A96H0tIK10O%9O;Ly3}1SslbTrsm(6#IJ0WLw7q>bGJihb%=7!o{&x> z8s(Tuab$jl%G82g*%e`R@Rk-sSykus6vXPXVf8Oq!>qo-3O4tMbnIP2sb4=XXv3y z0n;-*L#ob1Wt9z8WvL8xZV20*K>W5=oZ?qDRFxHJ$%~ZwKYDHGDM?fWeHD1q#5}IDC+W21`ClQtH zC(evq4XG&nhNx^apxX zD6$*z@uYG50BGsl_fQlmJ%Z+aNR=}A4V2z=2GG# zOqO6|v=Ytl^xP7}?*r4In1{SW-?lbPw+EGtSO}A)(bEBT>vxQ_8^UDSU^1vafZBg2 zsye5qAWW7GChq|@X6(Oas$92%yhm8M)`7~jfUu#|rq?4CXKg!6g0-Lj$H>}Hn1kPt z!4plQieGMBb*v4h8+d|Kd`^EvtSvLvF1Z9u7Zm3D;*MoMm9$$k2Hd0U*8iWuuC8P= zqMP^^VL4$VwL|l6-7MN4Js!dfU(1iI9j*hLL9}@u)7qNx9yIz^OVudxO=4n{rfrP{ zL-~sW@0vw9HgWWrU+Z6YH+MI;-%6V@Vnzk*NZr)k=Jv~XyXUUq8FjR`sTsZ?tD$^f zxu9O>_#uj#`rgr9maOUJ&JW49kHLCwHVnMcp@v4hN=ly)UcKPjYO3UzU6P9U=iTvD zyR(Lz-e=k&BYV=V3wkpiANG#@_d$Mv!^fE9e||>P8K^MZ%H{Z|fP%xXErOTF%<`(+ zq<*LO#pf))ChOHIJHuk!(Eq;OjNDEZp+;jZ_U}KJdU1rXaOABFpQ<4xiA`<6e(C(Kw^) zr&pS%>ZnHau)@NAI<6;26A0Cz32q4W{u^==+lRb_u|v(HRkh!nGVKES18(9en9WNS z#I@ddTpRii=_ZX?mx0h-q#+MWCH8<^7wiF9FW3XJhTH?viFFT1Y4=#lk5mb-4e@M( z65B)Mt)#n$VpoNifcQ_{T@X=cao%dLv>Bmy+GLN)_rKS5`hruB{(F!Q`--&tI=le) z2p3h`k;;EW)n+?EoXe|rsT-Z@r#4)Ic01R#(?6qw&Ln^R@qP7vXA^_GcD>vD%sBkg zJLk=jmca_&4castx^J=Rtr@YO{&Rn@TF*|c&r7d2?OZm<-&XJ6Yrbk^qOzi#{h>t6 zEh1B^wR~o#rbS#%i_`wc0?d=_zE9b)ZBN{WcT?7O20P8v@@fqOkcYt$BxUU$#|YMj z@|&KW1@1&nM+oZ9S;0u%kc7c+5Ox2N3O9&9q7|q3l<(P296!Og|5)Er2#2U!X4FlG zIKT;KYX2`2P9TW8pdDql!e&TvWgw2OI&@r(98P)*Qk@IpDx*3*!vxhwrE{uI#V9(B zp$t<-873W_Avn1Ok_HonIkv!r&^hO3MbH`4cF$u~o6R*4I!hm&Ct$kQTVT5BtAK-b zAGU;lBk;dosl&gLR~F0No6S_=rl{NYAwk_xlGAfKjPtig8-l-Ig$nuGPmp>Mm5Gwy z5d7VYv+7L`uP{xVP9(e61j+7ZL9)AnOm@3+-IM3&Y8etcXs+Hm0%@(S<$KGBGpfHy!Urhn5pu{O@2S>Z7%eDop z)w8!JGo~JXv^`|BMccIzkoE>o@q9!pPYlI+V}Kyz2b1O!J<-@qg<`CfV=UN-0`#GR zS84So{37jy0A2ckE>wMzH>c`s9^xkc1VL9oRdEGG0lF}pO=(CPOf;M!K)2a@Fq`pa zyWD_|s|{VGQ>pz2qH43b20A>Xe0V@@dkrk}BNzIWzsODDwv#h~+fbF$^JtCpo!0>Z zyx**51aC;(;5P`oD|*pXA@By#r|ENw&gPH^yk!F3^pgr==X7fSxoGS}Csk!nDmp$x z1DtOs$~hQ%dDs+3tsym3v6Cr#%i9`)<1?u4ca&3gwvdGIS?2X^9O%GcGu%jnp`fIw z`mMfNyAbj3%vK1WLG78>S=DB94TR6q$7kCMPMGe6G=SYdR_CU`d(0*RyrEL3=M{z{ z`0SPhg1?(r2Ep;r3w1sC4MK34A-GWV0BZjQs(t6Ozk&N_YTwjVg%D5X4&?L^wit{u zCYfYRbcTmHig|^Uxe&v1G#Xb;j|%Q2pn8xGr|N7@f*AgfFgzG+K@-woh~cHn@Syg> zb*yT$xdvi*=`;L-k|!V;)&NS{YQluzUKRv`L-kH?b_*QF7oQ?vJa;l9j67~ z$oj6YVscjBJpOZ5xSq87+A5l;?!B4eeg21<#dxc0PBZ#k&Q4kWWUJmH7lR|N69T?ndXkZS-rV!!-a{+Z z>VIpU9W%u_+dF%W>$U8wi0id=oz2-E;AtBE560CpRrfF zbnvP3WZE6S(UDzX2g*w$uWMwVcqw~rZp3LXDvc7cmjqAfm9ffXi0*I zwpf=Uw`6j#M|(CTJ-p-+&=M{+4-QLhIN*YY1Plq#L}e4;)nAB zbyoK4>~<=0neisxg2F`$RKpraE!7Xv=GR+%bHXIcn?GLo!2cw99jBn$s`9%Wf_RCJ z{~b5IHf`RVe=isIt8=U%a!kv-uj9QE`gEF>Ffv5#n+7)V@Q}yg90x?j-bU>|6BV1) zF@~2{>{2UGs-4*G0m11?qo3ECwcpZZj_LR-MdvSM`#Y^zx;eu+xYhP!_ZzR;Xz4ON zV@v03mA%OuyVkC?Jk{AiZK`+ortePh!VEVorkN?O#mDvuaS>7RIgiH14_EmR`=<4P z8AH!tC;5wovQ? z)clvI*sP9$Fk9M~-Qut7BWJ3cQ-Hp9B?0JA`P18xhokzztpuunwh*GauV|eRenX&o zbIx5WBB8mF_&QytU>kc+hyw&sw7CnVNKL^Y34s>0M zIkju0+G*|{&}UVmW7zDNfoiVDMjrP!J2L&pgENDtZOZN$Jm^RZP3Kkf{C+siJ{T~5 z%9-Q?D-ZmtC1_^tfwVyff_H zcfP*^JJn>GDypImx~Z0%oFC1gRc~6M z20kSP^&Kt*s6#DK@8A~vf7J;)0@=T|DTD07$|3v)A-k-QU8woVBu>p)-5DXf43M3= zCKd+XSTE9AqJbBka+N&g=->_AcyXDe*a*C31Kyx|0=55>Np;qMguwd`fH&Cd4E6pw z3Q0^=gevJ=!Q(%OeU?4`vpNO>Z)tO$c&Y`#`vNupi>$@b>iI`*N9gnq zV<8tGQt^yo&VV)DLVfEM>RZQs1XRCF&Hp0$=gPVM%yb96AOZHe3Qi&%l1KC}Rr&|9 zQ>gc!Ma5=4>Jj}*oBn%M@n-YN)Y~a|KfZ&2cc>ZaC8^*TpLUbL_@CO07!OxF@EgLR znIMJr*h6hIFppB+wi*1;1I@2f?;kU1&g$C;<7I&HRAZSCQPMvn50P?X86C!91mxy; zN^=C`97DfV*8>Bp-^$`toi#3@Qv4q%#e=;vR+9D-mB)Jwa81O5Fy1ke}MWCVJsI>K)d(95ze4~hE2 zdYqcG#s>uSG64EW!PLTa!N6)C+>0JvAgyK00GE`t`u{V;)zy27pAR;|RvBz{o!Gu` zQLud>Sg?IzA-R2_b=mzSrIrGucChSr*66_T5MKCdex!PM{VU@ZgkT=i+M4kmH2PLc z)hO{zVq%o0ZH)y(`HKQinngJ_arBs9>tA;_cQ?1+N}DobMg{Ch-PGOY_RDv>=dR%y zb+oss8NMK^p?qJtpkC+rA&Q#%-qBr_tm);>56QNV!Fp~s47|~yhDN+fN}mv3z2Mqv zs;XO@^qu=18k4kJrCml5KWzfn#zB?moipFme*Wgik9*kA%skWnO2){C z+3PNi8`a{oRq%jk4|@JAvfk?T97%u78NXUeoqruKQy#WViP>Zv&+cH*}C z2`vWAeP%ptZo1#gw*6oyfWy)$!O#p}l6;l2RTlOBC6lcv%%Rx|YVzf`RjFMv)y`JT z3)d%4-&gQx$D>Tc!}-3U2JWvOM<&19xu?~8|KX9f_wg~0$wQzSkVp3q7@BZ z>K{`Cu`ERp%X;Cl?CBcPR*hKq$k2Slh=-*RaqN;Hjx7+xu|P78wP4*MQ_3%b`H@QD z>JY&+GoZ z1k(v^rg_zRHGR9yR zr;|;_7xWI+`x#*T?TNO2-j1i{=X@jYWWPPvAz?wxKuedk=d^w_>YI{1!u#2#zMTgJ zkD2o8_VLXw^ULlZn?Bns?&smo&m8@Y9$w4N@~~)sG~ILSx1UD_=x^;|h21vt42LZ; zP4{8Ce_tOwQ(c|{`ZO&9=%Kc#cYinT$S>v)XkRGDi1v_z!fz1T%M$HD^PG>Inp2K1 zGFFVxUIu7S$3;MwSJeAg2p4$?V~3hYt7^YD)%P$n2(o*TctJbLY=zB`Cz23L(0r51Lj4(Lw86 zD*lUV&EgXX(WQ;(MbEVWaQ`3n%QMyLDYU=0n?QT0H|o6zz#;#`Xae$!ehMMqN3=Q# zzac|A=N=ncXm2dO;>ESaCP!D_&~?p_5ouV)W!_z-jG7+Dde~xYiM_LQ6Z;z#P3*5u zJa)eExoHV~W_{6De{jm8tCRlh%utVZ3Bkv@^WI?C&sl@v1xMatQOQ5v;gp;ua*V8q zA_L;fe&@{TGM@QT4&1RnxLUT|4 z_%_|*zj5863bU`VcV}7^W*F8B-(vH5K4~lz;>#YVf1nBjR-%%EVOSqh{ zDyrL8J`tmm0-@&LwsUIE62?$cAk(Bk7{z{2``<;Q7>jS9x_!Cp_PEv%i+&y_{X$f0 z7N0;#fwYr?;+kXdq@b``SMMprf4+x6e5gR`eQ?KN|Mwsv>^H)mF~UCNtMD5H`;&@! zOv`h=5>U7O+p%&V-@6nA(3J8oJRjc`&>8^Y!U| zZf+VLZVEPL8i9iq@TXWMJSOSLu7K%d)cdyx*`-~_$B`X0mn#+jQ7anjebt|6MWe?e zuob_6JUCD+DqDD{Oe~^WZSc4cS}RcVKcZT*9MA~arH$-GmjT%0?qIs{|Ks(13ik8= zAz&Y>l6t=f0`}`G+Ys0RvM`Pr`*Lo z32PYiyaBYXjrxDMwEhE^!^Oo#Mb)`OpQ*-AVSm90A@=J*rBYuRf7)4Js}GR?$VV|I z0P=$0paeiH;4qdH%3b%znGz(g8;_XsZ)HlV2@W?F#PRx6{f8-AW2yb4CO9bnOXweT zj90?+wWzE7c+r0H|K-YuVtbfYD$g6;RyBmd!G*!2I@J3(ka1tnG|zaSdGSA$f5o-Pdi2!zSo?=|zrzo`>%GMnrZXP* zm{<-jlSJDBH4l zN9FPe(le@a9!OF3?i79udy{u8{$$)erKvMkAJ6vgJnxMLc*d=TLx#|1#e`b7 zIKrwmi&s=GkEmu^3$6qDMQ54}d*wZ~c6x!*?0TIuw`j&pe0RbuxK*8|%kOSzX60fv zCoT9v;_d9WmplA>(Q8#F{efLuj_^FG5@xtzvAm8`P;FKDT@FFKM8~)m1LJaDUN25* zJ;Q6|wv8D&7H72PStzLgcdyf^;YJ3SWh>8`a2=5Q*S%UR>9!sT?*dAt0(Jf&QUR!F z>Kpxw2LO}tL{gwUmoX`T1Q>pUk^&hf1t59rQuR;NAjCF`@uz!EaLfecG3D-mU^z$; zW~yQ*Q}#-RnbZXVt~oRkdm}m-#-zYdl}HMp%BgQM4o?p( z7Zd40jqzpD17Vd7ej{mmQ0geIbbEx>|A!qxZ<;?{+re*jpDxblf@ii1EpUC)InUyI zruX+EA8lBQ{i@!Ob7hKbV3tllIVER_ITD=2W!Ey5-Vs#IB*)W(xm_|=&)J*zVP#O( zKPjde*_-nV4dZl%8Fb72op8b7SZ4O~D+B+KIH83kzG4Oae3 zxH+`c(IG(`6Z7~5SsJZAk6+0CI*oo=0CCKk%Kt>;7>jqH%YvH~$tb`xdQkfhM72g| z#=-1Ab6Ifn|K*Y(?*1X_(tyejS~(E;0hB)VEf?XLLhmRdQ&6>M%oHF^hTo9ko}sGE z5=2m@fHzWCBvSy%Z5ne*&Js%`IAKwyQ29)Oe!@c3??b(REgJP%oCBS(5}vSh-iKDl zeqAZgU})uH(Hq?$RQLt~H18kHsd>et5#s&Ix6jd|5m?LCm9!R$M$)`N0Ils(S+!>I z3REMKdX0#h1jI|-a6S*3Mp{YSBw(p}1p-*f0hTT~6dmkKz5j@E2DVNZ#(}2_Y>%_Fg#D+V!uHWi1ctdFY{Mwp^q1&9+ZC|@(^QaMt zn8OvGqq2i4yuxuD8}6cNYkhLb96n#!+se!=Qhxn;w>#DicSWm4)?He6%SEGJ4%$xI z+SXZJ+LLUHr$kZ>o!A~x6bRFzCJm#^_fETNibJNV1MhqSe_dT!c(^r@NioIEQHg;{nqM{*r zU7LQ*TePloR^vVMvyO~g7U+v%V?rm+f{TRZ0)w0u7^DN3V8kbV_#m%_$0JuKlAHTGZ{tY2A-cvJ0nm4=(E9rT=(l%Mn>wg?F-# zB>nPy{-t$UQ?zZEy}qF9*qYzh^Pc4$mb;z#y#KlQgy``29XCf}w|Lm{M-!pW!K3>I zm||E%(iF%y{(x_66!=Dxz&EV$VC57;8bpWY8$_`3@S5R?hpWLnEQxTCRDpw}3moJ+ z=^$H5I|vc3cCK^Xc^Pv^;W^fDPy;V9=dpQ7l9as!n=etol1snq@~h?B7atN{`ke4S z9GIMb_T8;Un@{s;>NB&|oalQmi&TRPeDc0`w>l{oW~fy- zUo|pOSy9gZP$K3Q(PhN82bo@z-o}S^@p$%le}_k1i%&$&)=lqpDsbVO_bOy0?0*FB9pk?~w8pj#d#6Tx_vx5hbCqJm z+&1nxBc6v(^L;#CJ?zY?nAI;5PTYB55Epg`E{;P~ZmTbw+e6XOO`gwdKGY418roo_ zsao8*fXtk^5wpVzw4U(uhMqbS-{)hlf9E6%9cc47xMX5Sm_<1I|M3Ze%G@qkm=d=O zy@KHO;3(WP7+7u~?inl%OYRxq;}!lNk!dtY*hV&j|Hwn$p>JCori2ZlthUWw!EM@~ zXq2J113(dE)D2`U$+jZ>fwm@k0UZX{-R4r(;y5;3UHku2su+R_=oK!YgXZp^I5n?$ zJS%%@qxp040Qc0aEy?^jy?_qZ8ncVE7Al}ivw#j-k5y&Wn#C(n0bRxgbU5M1QTZPe zj}fVc$eDo(=#m%E=}-lZF`+5x7)+A0I0vfc%A}f0zwQ8QO}x!%E#x4Agrzdq9dwR` zS?kF*l;ntGS@#}R-If^8d`dj0<`oZ0h-0g|5d^H|If1km;#g^NENJbO!KyWjS0Ij+ zF~@?Hroqi0M_mye)0IA^=^P7^PtWC)oW(g1$I66bj|%RT(&0|Y3G0iG1Nd$ut<{VH zs!CgHh^y<=iJpEw`27-?D_wZMWF_cQFyD;dFC}dt@0YrYIpHn4b+c%H^mqs_d@Vn+ zcDN4QHEHuarnNQWJ!tf;ma0+Wo5aK@P1_m^hVmB$-ZhJIY~tuKzt+F*ZtiYwzm+y+ z#Ec5qk-Dk7&Fz=(cF$eIGwNt>Q!{))Rzvx|azVY$@k10f^}VCJELqdbogb2IAA|MW zY#4Z>Lk*31m6Sdqyn4a4)l^ltIO#k0J2WOKNu^yz5kGB$?L+nz-uSC9kwZT^-SNGe zz07LJ-j(Yjl21e`E^4szj#_<7zp%nF58rs7wLW*|*~CrBKR<8(S$pLWgKzDAg&g%R zkh`tkzZdNaFw?_!Rl>o9R`H#d&z-h@(;J^U?#HG-&wBXm;m+xLaMLtXW8xjS0`wO( zmw$gwbCr%~rOnl}BIY8l1DkexoZ6wfnTwf%zu#*8ef)acEYpE${`sBqb9egeOUt@^ z?DEuMtvbU-xLtNbBSCe#lfcF6j6_b=k&|4|E*U!gBTnKeaFSF(OzVxuwEw92$Hr6r zygXg|I=H&h@5u4ORcQxl$iq^Jcy?WIU%p-t&(@IftkWs;?xv3$H|f@`$%R%Yny@&> zQhuaLcy0XNT#4=8yp^`#-aJW4*XFw}j=E2*%V*EKKfBLYtG{Z(OZ^vXr^lZEXc~|;vS#pt?$hcpLvTZ1b5*c4Hq{SVYHlI#Q=*W9S~yo8)y3?BpkUbObCS z?bJ+6gKlZhfO|l3Cwa*rv<6fiJ^?7|^uKL+?le3=OIT#xnz3rXntD**QUh7XB@}i*S-rW~!uv;T^1JCa^l|GNXBbw1tUa;ek>mc!o}1e=iEG`d(Yub#rYwHc zEPnFk3xFLl}(Dh*V8& z7grlX(waC{wb>j4VX!PQ7!P7_#IJKF?SwE`>KH5(ef?cd(b*gXVX#avm@1$MW7$UP z{sWyHOc-35lYBGvyozW@Q4ipnAwePSqj4F|Dkb7VLCPgE~6UD4Mjpc7-R$ zaq9jXF)c?T@n=egk%Do}YcRw-9J9fPm`|FMKGKw-WJtv``=PF`-cw4}GK}P5*N8FZ ztAa7+ZGti85OR#U1J^O%&B(mlS)&8^QihfM$Xelb@udtH+fs&2(p}0BB;itqPcK6* z_>EhTd}qqlixJ)5bM5tKsFS|5tzfXW{I;UT&a0iRn7el0{dE4mIW#uY%kP?B=;*FD&rF)M zZuX0A!%sb5?HJ^XVYi$dJAmYL_kbCMxFMX9BllQgGb9;b#~^$6m?Q{gR|TQ0Cmzb; zi%DBGWZlP5+BFP$*i|Bor3k{<4#DdMp=20q%eIZ7lv^y~M=FHZ!gnz!vW=V9l|*G(dgyTQihZI=pQieM zM3rXth~ec`n%cxr?UaUtz1xWnO&7->^^WyFZCbQ;VN9~Yv9W{x={VNX<-zYsjZ-c9 z1rHeZ`N#P&-#fc>2zWao{ZwLh)Y~ok>6aUooiVq2(D&}OsV9S-A79e4NFCVCqH9n~ zi$%fRN51k7;J?8z*Bz5B!A8?W5B9UQNN;G$3BRR1qLKK+qU=Hdatni@hOVLOnj!cO zhD*G=iWxOv2ZJi>4u%2Jb&AA_I>ptA-LEyC{GitAnGr1-Jo7BhEY`m}<6PW_QwiVK zj|Ha~-e9OonWF;2&pB%TiAiu~$B=-e$_h?yV5oM2gKs=IaQ&c@*M}$B+kS-Jy)!;Q zf3A+X?~WOTuRcul>XEnl)JNMRxjJs z@_DpB;C8O(*O90E`nzUsvZK`b#8#~lIT2=eKslJDqL6F^xL}JCr-vF-= zkuIJl-9r?qn4=Mb*NVU^D1Djg{}5H0`HV*JDqDC38(m2uy@83Pm^}i)t8-3;;1vW< z>C7oOvtuB5l?A-gvu`-oQ(uv`LIF&zUC63L;6FYO{ZPxbHBEnv=I|} zF$XYIs>M#G?5$9#79_u3#3^}&Lm0ZrsPs(+y;KV!?8YI|R-z${*)h<0U5yWkDCV@Vq!+HJYjN#YUB3eheIey0Ve;LMJJXjc}rOFt?gfZe6~KhaSk zO1q4mwi4dz(U}!S0PaJnT-a%gn3b(uScMf@ko*C)|C34b3I`&@tX0gcV5>)?NcTX2 zs44>yy}$uVXI^1dn%N@|v&xoP;aGk=g|rc3R;e>92>yh6|Aa|!X2(FxDhp<%E?!4K zQ!NCr?4_K}fouf}B>#9T2yQEw!F&#VpNM+Jyh>L^)GL(yML4JA6?P9qy;VrPBe0&h zd%ScYZH1^;I@Bvv`qf!hrI|efQLpT%S9q|$8BN-V3Bs7~E>K@W1^N<%N5$LQoPsku z2BKbBP%qVSF0|GAd7QRF&cU`eUBYV&s$h!?4)d+KA1J|57c)zLXBDC=NS;52Q}PPO zBZRJ1jILm-kLskY5V}eaT|wzj|FJ5~>=6iEWs9z0qb~-ejS#v@wO}heoxiT)6r9;H z5W30&U5^NcC9lKi@Cd9AK05r}inNt614+?sHFctA$r=RM!*-pRAHFD<9}X6*L0Cx6 z54RR~u6MI&e-z(?u$mvK9$p`Yh(lQSAe@x$9)#Tz?m;-z$ziV3=g{P#&P(~BVW$4E z+0MQlFaL~6{^!!NJ?V1S)anl}-af0tolDt!cFkM4f64LWqLHHx<#ykc_+`PZHAeha zX7huv4K2;E`zSZk!@;z~?Aa(Znyi?=SXN0cX`+U^l2e}RE&U%o$ zFhv)3Ph_4P+8i{edk72&{Bq&c9C=8T8L#QMHEzp!J%tl)^r zbBbHHT;^@*a&g+lcj@XObM19CJ=_#*%rpWAD=1>lIbl~iKDah@@+#-YgGO4UUb~(7 zrRC$e7A{@iS``!;LjapSXnHa^@?GkPFQ)j{SnNIo(k&W`KQzkD=hGG%p}jl@SiFn7rqXg+oymtWh+~)e)4PJT=u{<#8OU{`ti`~s}Sy_`Tm`R12|7OA}mO}%PEF=#VB3Y2U<_OX=m?UTM4n(pt zAz6Ax4kvsqWzt%rX9kOmRK`^mY)Pm<85cCyXw0cO zi+e~A$PoNiHT*&#t9_HSmS`Yj@eWkRtxy>kJfjX2MTBzzqFS?f1v)gOe`u8Syb)zw za0#s!q?wpVjKwVw{8kixLGpUTI3;KC4hhZ-RK~4*8J8-UIAguwYRW)cfwY!r9Aj|~ zbY@6+W>5s{3~eI{n1RkTQfox8yo7S@OeiP83N_cw;nck1(FhT2RT3;7jle_n*OS&l z(MVe5TcOqla0!bDHYR&j+#d`oKoGh$#FFfe$A33vZ9 zi+7+Zu1s1OsA~=(Rkcnf;}~)fsW=F|n5%>tn4gC8y*p-hn_? zCLp_9FdcguCXrpR4ls#qV7Y;`mKLo%=rhKB%2+>dY=o_{9S=J#oIQS+gwGylTujVM zOx$%LQF){GA+&yeZB0Yh?b}1vj~J1NIb7s9DmbXXOl1atxQnW- z^~oi3_Ji5`Ss`B?pQb6wM8|u?$WwjE*kZ6(00<+)-DcedLpvE>#LZY)i;m- zoE2{9fi0=E;ilYMw;9U3b=FUBWyC5-^a#TZZHYFk)vefyHakU=loB# z#|2(=U&!b8GOl;oaot-d@B2ZjCyKH5y6gQDJ%`t|GvO;1)Kt>3?c-LblUZ9u6V<&p zGrZ6LSk>sW`JDM14;d7B<#au@FFGYV{pJ*fGuQr63oYt);$Ilja( z?0MKZ!ZEH193xfW7%8M<>@4jVq1&9+ZC|@(^C;XmuJ9a{9aO@T5iMok_#l~Y z%-2oIA8q+tzsUBX`}kQtLCHt$X7}G2;bRr>J7H(%4*joXXDwLQB2}xN<>|*`9{UV< zu{t?7&3tLZ;L&A$DRnP>pEvO-ZMYz$hc*Jz8E$pbmA=V zjg10>Bnb>+g&U+R_5P&}qd`2pW_aSUYA_E=B21DhFiE<=B-crkY+*9V&ULOkFJlfV zJjWUiYG4#|9-C2;q-+#yzC-~_F8!{{ua<9Ld`NidbHe*@U~>A|cefgCnrzU`@$7%6 z7UnNbPPh@W*R+kP&&*bHqVK&dQVlNf$@|{j>ZDwlp;qC1)yPC;MLGLJiI`hNml4|@ zWO_|{8z0)muS|QZZhXbSS8&^%&dq%VA62Yv)9L_ z8t$F=`Kx{RE0~5}^SufgNqHJ_O@b14KjGQ(Rn%gg8uY%hJBpX)-F!H2617B;Fc&v<+l2=xjhse-Q@Ya=0n}E zsG$u;nyST}3&_lw8!*hPjb%M-ukU~hyKrZ{|2V2FQ@>zbgt+DX<}|GL`Pl#SaK4&1%X^Ijv1(2z%-{SPE$ zdaUa1S0}XN6TdwZbYU~n?DR=Li;Ih=dB{8TZEMq(+-G6u;Oa_u4p&!4@D(3B;kn}K zTC&;#th!C$ZqeXw1F*s1ZaV5KNUJjaHxgE5|8Ml7{Wn7AaL={WeB?)54(;#*JIwW3~MC5;6^y^L2V9M`I$|K3Pd zX;5#ViEzttai?T_bu-sgEg@3Wr`ICFHHbu7m-M|s2TF6Nu0Om?z%#M^19 zzO!5VDq2cXazpIajjBUTd^pE4%esa~DQRV7UwBb;wfvOB$jA*BFB(l`sdt%j761lT zf8Ar?ZC4%(531#L{{|quZo|gB?%<$6bBnqDE$($|-b_e2{-w)|du^l+WZsyu>Srtc z>w&93tu#5eIU!~COQ#qQ&*i!YZu)0_%4;96dRn)RqT>|Q560cjkql3B3caV3X=?T~ z%5jH&$?Nq)A5QLPr#HS8VLN>heE64T%O`V=A6iog*m{o*L}Ui==P@-T2s*Hcb5%ib zkH6(e<_AGaRS<*(%?pImNYDVd?BaEq?L$O7R~Do0LcGo%ycgTL*FVWFNw85g|6|?a zWbbahm^EOth`7E)LY(wOZ=<5|eS99UJ9_PfKjwqx3bQAi&W+_t41V~N{m4q)pY5J> z+HR4YW`L#cR?hNPT|?N?i(`*!cAMN=tn-e(Ja6YKmEALlXBSK|;Wh8A{+f+hb-w1`|MBZL^fmwH55L*I=A9*_jy|`u z0;L_LH1_vDGyLtfoYJEC2Q_mKYI!div3}*)Ki(@&8IoEO9PHS3X8Vaf`3usZDBVM> zp)XRt9yt+|sW30H|Hb2(d(XU#Zavwe4eW{AC2?8<5hvP`1b_JBN&mODoKm%gt0Skt zk|h26CFxLbkpIC0ft{)cfJo=SS9*M#jkDOBy}Aw#H|u3)wV~bgg`9;P&fntuA4M!!`$y7#!*LH^zYX!+ z%~BpZZCu;q?5%OJD$;jE=C)}i<=$b-#@_x?-2d&F)-6Q52cfDcD?9jtZrEPayIgiB ztI!JZllRQ~h`-xEEA7@`j%JK=hqza5)JKV{-SHZ`P%E&LyOzNR3vn^Ak(^s`d*wpM zg{O7@@o>}d;irF$Xjdk-N_X7p=n?%b#b>l3lD|wAg||)5;PAF5l7IiTz3tz}fKg-K z_CIkT+S1;(Y}TGLX|pc;aPaH7vHZrPH46O|zOOI77Wv6|TS9y3cCT+tSg-iUg4TAFZy$?J$QP+-zKVxVGA9)zIa=A=-Ylk|=lpqO$Up}-w6#yesFWZGl|1TR<& zEm-u97-W&@uMia65tAMF&u0XM-4Xls>t__vts)wFCh`(7@XO~tb?y(psk|qpx$xV? zd+IL+zmB}8p>puE=RJ+Q2)`uWlgSH0ezFel$$TFC&hefW?A)|P_q5ub zkYCf6_q0V1eph%;dwdA_y_0!Qe^|h;nD=xtfsj9SkoR=y4?_O33GXSkD;G3R+trtlTa`6}?FI12WqqjvM2OcRJ6 zy4~Tax5*zK+PrhYYAs**m+)_@S{SwcfmiKdac8k9JBt8@zEuJ+W)=txeXBHi^hnbw zCR2Gt8hDOd^7ftVtAgCr7Icm)l^XN>Y0QIM*C^AV0=EK-^A_7+>8PaWD7*Y*P>nDUAEQ=E56-% z7JdHjJVS4d9ToQ#o}VZvRl761(B+nmw~Vxz<#nqr#ij4!-v3Ax|04A?_1?uGleCRR z;$vfIUxnDU|MdJ~k4Ju)sgJuxmxZq%y!dvj;&?*v$;0^%J)NAtEm~QuQLrghN-5^;+Q+{h zy)GSeBsBfccb5_pJS>0SGEMfXiDUSbzGXq5Tr$tU|50F&`*~g2*ZxJtDWeRE-xVAh zsYgi9bqW9+oA@}Q_&5gfa9H>t939Xn2q@AI^s1t&K*aNxs5?9m%6Acn-TbaLP56mp znXyb;0fI*&b)BEDYGMRYf+6T}*>;m{$+b77>j(WB{c={A)cS6>_N8Bbr(kd+$UU@| z{g#CC$hqP-w~OjspJNpmG^G4w!XJ}0ZW^x(tYv+wkp7feGG&sB;mwsXYm!74lzr=A z>9K3mL8so1ksD!sTK&R4sHgzzlyd3aoG(gOwp#agRB+NUx10Rz zs$?xDgWV;ylhn1`QqtMQZvI+c8EdFUZ3jNtF}yG8H7l5sX?M@ zZ4w|;RA*7?m9(5pArjnf@uuh76+9!Cy|PK%<@zmp zr`wOnjDMAQ*V3k9^=gBfJK9V7MbR-|wcXbrjd;G!?9-64)ddsF67R`m-dCE_7RMoXLMwp87s=GROPs^Kl z_Y^MB?nz3i``Fe z^~C@FwhsmXoB05u`2Ys<0N74J06HQ7)WQySu+dcr{^BFx%Sp;ePErY|LO?0%;#p=Q zreXl0bDbqNw}~Od3IxH|;(JM?`I$)lwd*`$zZd;I!&^hYht2smZQWM-6;|Y}@Y<2E z!|9Lre-3V|czAw*Vc%h|_a%I>zG0bret0caMAsANiX;2$LEmVCOFwaDUTo3fzp-HV?N^|+cE&^Pbp zg}n(k`u;es|K~+1&Nv^am+r@`h>Y-}E2nx+ElMspn-s6`bFGEy z9_LHaH8S(?@9Vp*y!CQeNzU}|FE-|n+FgD_V|(#kpI4bTu7vTLf8}Fr;c6LcH$Nb% zZVo^|EFtIL%Fs~I%Q|Rt{O;xel?JV}YdJRuw!D%0bbds!tN+18drJFHeK7sa*$Xek z_gngoI;s)BtDCFd#}O+}%8W>VxToB=aPyW<2Hsh!wuT3nN!08*O?`48b;K*50mDTG zyc)jpaZ-k1o6Xm6n#R1^q2)n4tT0*W?9#zF1Y+iqN zaiBILE$-rg*s_<}f0jPcov735vEjbc%l!`(Y3W<%_co66edg$3tE2mImgqJ6jIM5Z zvu;}V8!_ot&iYwb791Jhu4XwA;q*MOqg+sK&g~~fab0Y)f9AYYDYN~berEqRli}Kc z!+a~S%UA(?Bvsc20Le0OghZZ(q-KdX^Y_;VYL0kok=F(a^8XrhGyIR@fJ2JQ-9p#x zTl{WL4Eylx&kAhoWVsAWN7v688P2mlMIJe}ZsW=qu5KR2iX{;;_jlMN)Ywq<{?l%= z@spjpyS_GJJPUSZ<2JMT-A2DtyYDLqRHT2_t+Jw3N4+?9FXRc*^CsbC)m zRcW(T`oj8VvNPY*0E zQ!x1WL)IinDZj+1l?ORqh$dK&FzD`i;nRAH`qbZk!UTJP^uu1!*W`DOXq z`5Bj=4Y17Myp)x(Gk8E`?>AlaFL#}5=N&)*k0#8TK&BIvgfSL#^+!QLIooq1{iiN85Y!S7Hod<)+oPhOlz}a$ zV>`D?V1LP*j&7YRLgJ6;axjK2BPEw6Ch;y#h@e0rw)jF4HY zzeDk6Nt^BSK2IlVM#Y@J-B*sWEc{rS?vk*vY`jTu3M#e{C%Io)zj4Wmfh$neg4h9cfCt z;qM1_?-bN;xb8dmm70zPD@qEV@wU7j-!3i&8gvN_%)phlh(>LWPL*Ezpmc0L2=(23B>%M3WKdWx4<-8dMrB zv`lAQX9gha6c7-sGdByJ8K8JxKu~ZdVNhp|04W!%%yp>D^RjEI%y64a(KMbXHFE>1 zq~;mBYn7V8+J}Erl?-b8GjCwyqvnRGMK(T!JuU%uvh)oTfY}Hocx9HRZW&A{TwLBs+}&5TLS0ODv51OiLVOiInb;#e4Bfu&|_`Hxs{W=?Mg5*JlbP++~8QN0;pT#7(2u-?qD-V8Xdj6^s{ zLyA&oW4*bF^=4oYc^fgndNXr+Gmwa$ib!C+nNhtNU|c27zp>uTxZVsvt~;XWs1Af$ z@nOALC^MS@$PM!Q7i99LRC-u%Zd$z=5X5j%OkllP=z22%fC-ybtthNFGom*Ggm`ZR z0qf1o=*>XkR$cxRrJfU@`G@sp2{Dmrr(+4una0g*1{Al+`5$;@Gt)DhfyZ6)`!85; zZU%ZYAW6K2az&?Q_n}N$;GwbC-t@H#q%*J~2f1@`8n51_I4Az^OoY@R4 zQdS}sSZ`)bZw3&lECd4U%}naez~X@qVuAH$Ve8HGNp~Qn{ZAgzz$%DPKi^G9AxDmrFuM!x5nHKFC5v)IM6BEzm4e|Yb%AEzS+A5=QE)qW`WolE(j z#YlpWBXWo_JURDyDFV|IeH@Ygkt<8fQ4_6QEiIjwy1DQ^$e_d`k}AKJsCS?D%}Oml zZYGx-x0~Bhg6nmV%N;!|Qp-@|WzSv)2EA_ezuc<|j;g`S*0+2#68jFYAsk*2OGF$* zBc3b2;HnQ4Q&XOI>n4{as;ej~Yqnv+!f=Np9Uin1d$Rr3c9U?Gev;~vk~@FJ?w$K& z$9b3MAMe_|SUciT`@zI5vA^tCZsGcyhzZKhg&xF1qYdnV`^7u1=(Cgw}e*InJ!xylLO6Q>WU0WbDuo+S_5U}Xn7||In?`3>P zVDRo%sJq+kwC`5|ikF)Z3M@J^CprU(Y$-$ni_VOS&Hy8a^#8Hw%&_PTICA@;*dSdg zO8t#R=Oz}Nfk7TQ{{V~5%!$rG;} zj3-Q6&Ii637o7n}kr|4P>OiO!UxR_*InIrl;|xIF6d)jE^2VZbk@LG-!Cl|09+vWM zTG1I0ymLSZu;?sw(HQ`|e~SQM(U}p^86cF9-@m}3Gc%$yP$>P1LV<2VQR+D?I{(uB zm=%!`UUcPDVp1!t4u6@IR@P~GEvNxJ4|y1%dQw({1?VI?`!zrWa+KWcaR4UO%^ zcYR)E-nbIRDk6yTF}5zyAm7bz{>8w7gax5X0E&;~{TD1cGc7s;kIx-ZXkgK~8Hmn+ z6}8ZY(^V0J&l#Y!GDJ{n(>tlBuL6{SHTBLam3A}hWB~iD z>`7Ll%|<(RM3iiR*V2yVVqzV#T{#@{)@qyMPIE^Y9g{Qf->LuL9V54#)C||u)cnbw zLA8QTIL9>+ywbWl=eYUo0CfwfA0`5&Z(q#U#JVNDU6~ zja6I0StMy*RhkQXa5J{$lsNA>HF%FN>;?ycK&H1Z#W(jti49r z4@!touBf<$dix%E)1DDFy8h~35Awy8U42A0Ic@mZ(Z=ic!cA=UXtl0)&Gx=AcXr+& zdHEMHq?@06tkaw>lhxT`&pDPadm7iP5D(m|qIP=WVVONa}-n#00KI#f?d8+EHW8gb~*Y<~Z%sR+_ z^$Glr?r=h!=q^FI|6R+gZ(v)b=Ih4Fb7Pce0MnXs|C|{MG6Pf3+G)0Eu>#```4S7&*px#>i>oc~@YdS$Lx|ExfqLOAC)u zVvA#Ak0i%RY%+>Xj*V3bq(?>JtL}NS*>QyF9hRAxX&bAHBem*G10nyUf!oSWln1i8 zzt02ky-JIAeRnacJOd0Vvi`%$GsDU=;OL-;Hc_NCMJdIx^4!GAGcf2Ff*4@snK|Vd zNJ#fbB(U<#sPYUjI!7TG)eT!}(cNIf7Aw#CMbR-|wcXbrjd;G!?9-64)ddsF67R`m z-dCE__J1s*?=*W&sQRjZ=&gHK^84S_flw>H1_Qy$b7Pceh!B|2OO{w(oZ0Dv` zo&kZ}e1rfi&q7z80YJBx2mn@|8Bv}Ag2G}10V~hUD9=Da5gc5?O4M}xpmA7ve&FtJ znPA%~AS1iaMZ5I9ME)+{QCUkJCpk2|@(d`HToDSaJTt940}mC_|G>&~Gf^@(f&hY1GSwTImUJ-i?*#|3-NRFsg|N3_ZP5Dn6_{3qg4X7QIao3#>dd zraS|PKA8vvR-Tzuo`FR_d&B}O&%#!o$y4w7#83jfwf<#@1|IY!n*BTiWVI%2=u3p9Yt&cl(f)Q?cC8`=|CYg_kS)&CSfL zc$^a!_rodo^8n3?BZV?noL2wBdY%urkpYE9dlVPgdS=vmo=@oUfM^~;K(O`9xb-}r z7|w$-SQ|mX)^k;L&VcpoQr&tUavy=|O}qG7zxCYX_ttaGQvDw_y;YyiC8DqH$mIEV z9YScY@X{91WvG&Q0i@>--N7#J?XcLRHu)JgR@&1Lf!ac&fejKYKY!kM)N(hsi)@?` zF|ms*v2!nnWCDmbinyL*kPk{O*FBR>O>F;5W?GI>*u!m<3^wDL0`7&jLo!1A-u5WXZoS zVNiYs6ei0N3M@Y}Ek6SfkOHgj1z`EP8OYCoWU4Qa`v|&1&!U^)!!k)#thH9{Z!x z_56&TCZ$VVO&OHxD!t^MD4gF7^RTyryT?2Eo%8Gb&bcwH3or>~iRCTY-A5Pmgi zwamkbCxc975jU%5e%K6fL+f%Rtw^=Cj~A%#$2{h4w78Gy_WMnJIs+${8G zfMThIpw#AgQjcH-r~_*{o_Of9hE#OS+Khb9GiyT2J7=+rXGMlxHU9A4T|Z7o4h9gP z+G;-(>Ok^C4)X~u0>E%m5Ev{L3R5g3XFylp5Lh%Dk*F<18rUkS86tQFbVD`h%DZNZ zOA#3QW{c7)5#S_*y7R>|pc|+`^HwV;L#)>z6nM1~=4wU14GSb}TOkrygJw*F=2ho# zJHT!)f`K(?p;zad_`UTg=&jEu2Jvt>s307qNkus}$1|XtP=n@W3ShA01Y&?SXl69% z%DD-Ser=Y46?0QVm^rDq96gRV^P>yr@}dU~f+d{~2SVhy^oWy>{3EU*U6 zhz4ERvE7h^Kwu4;DGj>vXm67vVu3YiVQbI{{1W6jlpu?U88F1K`{r^)V-QV){%2TZ zRUWsrG@EGc>N0tu>tZ{5CtFKPM|&qX7oH()$0D9r8q!z);u+GH4skQN+_=NsjuKq2 zgIsQcX5HqFs)2!I;()J{)hFCfOrErBS87Uf{{c2cw>Z{N5j)Al}7judvLF z)~Vm zdN_r*9`0z15Dsv3Tjpdr-qg~v`rzEU*1~=);&Ihln8;rX1NduUVAWbUS|?J=P~&CK zUIqreZuP(1>o-B zUYQa70?oxFzsg7r4)BdtTftc*Y0j@m$K?8@x^XxiXv5djGW0Sl<{Z6Peyn}R;vvh1 zUVR!h+o)O%$9m$M5?4`}Vsz@nNY+udigf z>pm#R&xtOSfeBdcS`Q8v=_xB=bV`Knb#BGXXBlr_TbUN^R|HE}c~5^>z^|D1bTWaEKXs7z zbm}ql-wD94|q>cLkRh2A-t#DqlA3kTHaI1SVF#ZA@8YtE+PLn zFGLl@+Yt&9V|h=~V+n=M<9JU>l7xctbly{cQ$j)AhWDhSK`7{M<~@z(5(;C^^Pc8q z6AJTH;7M_m2>f>Qo@TBg6iwrKPhJxVMeYIK(>WzVG0dI!lx_h(OL*!oVFEw?rb&Aw z%y^y(|GyqX2NFGW`O}~)4*fF?8vc4K|Bpo%qCrjH1s9hqKfj2D=&ELx(+P)pnTH0v zan_!gd-&Uv$5J{LhId>`QeC1-RqiY-c{r6#%czD6F#P4vf`v6)hBaJJ)c2i4I7pL+a%_t=+$Pp=fx!WF!~ko!%xSnlBH#)l zfi+x4HC%uZI2ysg8ZP4+E&vI7R40(CI-1wM#Tu?qG+Y4khcyC1CU452EY@(FR>K7Z zhYJw`tl!vz3G$nU>m4VMuO7a)WbqbR@{E;AY~P&np_D9}9}N{pNIkhOc;A~R7!q2r9s(YL-N!OSh**FasP#ph-P+$$0X$=>6gziE-u!h?VG+aP( zvIjzfHQcI`3nr%#SB}^@b+ukD)JhL)xI)u#Az;qTL}2LYol@~(4Oa*nF0eS8g;-z> zmoW_&K!lO!-&n(CQo{un=PC1VX z>xB(yi6WbWlu8hv?EU8_d-M^+ZHev|#SjC!1VZ5;Kt)cSiq{gq57gt6*18S*7#u+i zC@$?mDDVhk#zznXkSk>Uher@MixI>CB{C94No_h|mH5v)<`ba!t9eVK%1++QIvKz| zD|?caXtU9dU6+V_OpY%i?-Ns20nAkx#!xBX(o;O8DsNzl$AfPh zG58ixTqn;z@ZekK2HygS8$VG{VB?li;}&4V>_9NEam%oA3pnDsAso0apA{yxzT2&R z>6hOr7~BYQ5A9{YC87L3ughzE&>{XXVt|cX=8RkDQr+rZx05)zJ zF>V1u3I{>J#w|0(El{}s8d0DtISo7Ct8v~vcWx|UdYfg|%Ct31>UhMSVCWwfT5>%O2vnbTOk;?5G2p0BNo`WWz4t*5YJ&^WaWW3Hg1_T zZh=L*4Pt=@-wNBfrJpLn$zBHOKjTv+VV)|{y<175`%6z07j!A7VRbq7ZpUeM@S4#| zjF^`{NF~Rwe1vt|GC$U<;&UPoGjb=!T)lSTgo9LfDD@z|58K4vEifqViWuMyi7-1P^qJ_;t13ByNMP@lQSTODd>DXWVDFZ3 z?-qc3B)@-E9SF7J!``h>yjuYBc`Aw!GI>)fJ?z~!t#=CuzWfg%z}~IUy;}fKM$Z4j z-Yp~EEkOA83`GIAq11B%H2<)7i@p@vxJkEwqMV$6fW2F$y<6Z> zu?mF-_HLVjcMC{SpB=;@tO@nP>)2;MEQVBbS5uy@OtcMBj|%|IZq zcgv)A3oOK5A{N-Y6}ERvKg*CLlq8lS8u%buK^^Skv%qk<~fl>MeljKz{$V zHZTn=&ufCgBX2iN2?=s~+CwKeAl+%=pVu<1d=1^CgNJ6h9kuH=UYuBy>Hx`{RU^fB z%s>%CPw|xUT!8L7HO1qRw~ZKi3o#;Xi%?+omO1qnNOb;yNMQArQS}yJbS3ZqVfB__ z^%ii*wnMQ&+B=kb5UaOMtlk0xIr94_SiNOVy#*5T@+cZ`yGqFIs_NI1l@v}Q7}ZTr zYUS5p)6+8W!2V#ry@BzBX(G!^!c-FGxLqRZn&Wn%iRZYL3O2`$arG9Ox!s4=34~hl zH5f>6P@uWRT>lmYM9T78yA3fCvvn=zebOp?Av)7rKW&k9iZNRuZ0QK0sJ8$_=`RF? zOx~1A539FLtKI?vmB|PJR&Rx_-U5Ig_YnZB-ZG-z0)$?q|BTgJX4G4tp!ytz0^QA_ z)N=wf|FC-dFV$N>(c224!0Ij2>Mii-L(c!g>TNSnZvja^ZxkX}y{($7!?bz}T-257 zp)m%r!0Ihy>MelKOhF*9ddsAG3oHiDK`gL(D{S?a-ad!3j3EVx25z4- z&_1WTIZ}j@_DTeV?&dVCJg2%jmWK`n__eSv()9SGwQj?{(~3Wr<&TIwe&Mg2)U>6o zZlsKr3jS$u`FghxSveKU4ZVMAFH?BAvftdy%!ym8i^n0xr!lgCmz7KV3ROHy5;N>%PGEO|QEdxv`5)FbhYa)&YczBxkmrBZflwQh7P!nlTrU@Dr6<6lE7ot7I#+Cv zJR6I0O2+H!X&&jS&vV^9_D7}b`58M+N|(BtGAPwmddWRen3z7y!`==KU3c;iU9a;G zU9|}SGe-x3p{I9B#fSAs9eNk%t+%6JxGk zyKwUAA8GT6|FMW2>GnNgD@8xa02GThAQV`?Wn8}nAXd@{2-a_#g?5e83t((VqG+iNOan`Ftl$1O`YnL5dx5~vQ#_?a7oY=AP4QU2ZG?UcC>*R23asBU zr{4mJB_)Ui)^8crZvlqmS_A{@w+!pIfMZ!(go8AADD@!LZ<|=Z1qRN$5Cg2=GN<1H zi4}5)1lDgE)o%fYODKX-y?;o(c-dgv6HmX@FN%)&s_nl1XvFh%W}k+XtuB~YmUvGl z^S;uYw$QZdJI!9Rwx*REdHz=&2({vCFc3Wbc6N-5%e?7t_Ir$Yma^jTzM#Wz6?8Vs z-wC^DXuHvsAYyjgYXOjr`~%r2{(&&)CW%&g1H%$|!73asBUt=|HV z^&b%rtlu^R{T7gLHzOoizpXmlV|w~6aM`F>FBfX1hxJ>bO}_;&o30=*^z=@t_^^H} z1pO9RY#D`EVEvXc{T4uYCm|46zhzRt1s2<8BNkY{6}Em$HU}3Fg9vD!Z_h_GaC30r zdBuSrN1UBL%dp7#-L2rR?^O>=dH>hVK~lI~Kxh*k2&J8HjfSuiH61@_9NorgSV2y; zajJ zUrnzQ&%15tyxU4Jdm~V^)CQ)3MR`pyc;4-%DIq~lPkZPD2c$bq{PS9dm9L?jbnwtD zx1)C5#)}hcQXRfSqqjzi_b1Q)=qa93lnc;vr>1x;+%{0ST|j8@7O#CzP>f*VmKou8 z0Wp+E;s6Jcz``wK!tDY=hX*6zJ%WLSTLy(&URe$}0!jawEXyhNAQo<$P`Itk6o()p zD(iAA+%hBFR%VJnwj&Z)xMfVZt?ZEm%ODul{Z4A-*I>UB3%3mwZYzNtA@9Fd2STm* z8Vm#rw+$6;E0g!pVJJe#Gh`7H)+v-15%%fWpb~C=}>M4yB$Gp!tV|+kYY4^0rEV z;?zTg0t>fH3b(xDJ>YR>G2(%RTZV;O-Wd`gIr|kM!NP5G5N<2Eg!$IXg<9zeaOjHX z-TwP|x0UHVoc#U|J-t&ZJ}lfev2e?4*}~#=;UWqWEZj08-10iM0OI0s1Of}UObNHV zqdj18iM;=fg_yQ$pxW(OsO5gi<8s{;Ty;>&31smN{9DH?_2E zSUFC0aj2M;Wm8%vlb zvdkn*C9N)wyhO~gkTfy6vUMGsE#a)Esd-k^FP`(*Nae@{$+KOybvbxjd9p{M_va65CCZ`P_JKA{NUbuI7kW1Z%7nXJwhD{m_< zXJX8e9;P`+OkMI=+9O`+$GC0_K(3PhGuCaJg>DN_u5U#dqc*FPdgv-Z9)o4J^rPg2B3NV{}^p6LS@T!Mbg(Ym{kFfm?yb#e}iNj#=lIy?E{XZo>o7 z%>S8tC-hAU3b0bsS2$~6wGNYb66vE zTR;&v7NNkpEpxgpkcdx3B(QGFsBQ}|Zdo80ShrUJl!@)Mv(GA}S#rleyhC8bWU_RZ~t zb=$`1wg4o#ZvUBD@iiC-)@>W3+X9dj(tjqCH|6jZ>$Xj++X8~r{%DcHhZ;gS)Y#1T zoum1_^I+b6u?J@n0Ib_GqT2$5{|!PAux`tYZVMD1Mj{GyABR%UVcqtZ?#HZ%jPRl> zr+Q8;N-j8?6tD1et%d3y=S$L|)zzqm&$q3-ne>>Pe}{Ejrgd8gjVJ#@p@DVVW}w>w zl4s=i->`1mTy$ID^1K3tN^M-Im7V~Hu2{FNo>MYjUr+N$SACxA?y)~AUC+ z)s#W0uF^~HiNehEVIKB&FiK}9f2Vt$ztiPU%S+#nz|hk>rQ*Z7tq^ou2$IYmhy~Vd z8Pjb6#LF`X1lDbt)NO%9whm%}bz5QUw)Dmbbf$7{BO16dBBaI$UAgT5a>Qm z!%A|hz20(9M}DIQO_ZG>_QDBeXO6j-@sPPqjV#pL-1R&E(pZUIIK z=|5xTmSN=fjq11y|xou+Q78rbdg&1JvmO14XNPJ$3NMPlbQRNn3d?Ed3 ztlauotO-3eX5eyH$MT8;A0BHP>!%{{QPOTb+td5Z*wlUU zQrh|oU$Y7h>)q~NL+r`u+!*B+fP9xn%N3ctDU}{pZktxQ1q441BLrBv6}oZ@0RC1& z0Px9@kWQAi@NJN*d>dp40mpk4#}Ndq+%luw0)=1w5CyuEL#gKkX#U{|xBqg&ZAU_x zxQtL><(6sX7I=t|^AE6c+YFRjK*D~8LZmjelX|OiCW}ZAAy%?nIGh)$v4yd*T6?&e zT<%CeZbu2O*Fi3Kv|dfMvqjF+Daik8%+2sWiUSTQE_VxEw{P*gIWg?Rvp*}at&`<4 zEFE1xXJk0f`V@KO*t(4?U%0w?7%P@U$lTvylThObsrR3Dn~k6B)ZO(>Lc9Hkok_*( zzdz1gw!qb;%rlxe&L(=Cwh;%j^B_N$C-8ImaLDD#t(@!SLap?$aw`?=yUy{Tr|0&B zyE5;)s%;r173|}nDs8q(fB2!ksW0_|+tsZ08l~I<81dE!3_ZP5Dn6{-3PHIA783gq z3#{BSrrZLEHr){jtlToG+yaZXrx6RR+zMN{rPt)02<3LVhz737nW@R831z99hyq>6 zX;@886>_k1+X&|tP;{7pP+;elapx9*bbNt;VCS}3IJW>rnwf8bh6-NZ4y5dPakZrKyiO1YF#<>L`J$}{+gj(@67ziG7+Zg8-fb`mhfRM?XQt4sm zwrQPPK%mwMA;8Y9(4AWV(Ay6Iz|Jir&MiRbD~llD^F3zH_u!PZAL;+or5s8Tr)~=N7nV=GDuETIpfu zR%p&GfEm0FfuW~&O2vnrTOl~Nz(R}kpRsexm~#stwD+Q*z|Jj`&MmOeA^m6U+zQ*d zrB7Ienp~Is{trD};A(I$k-y7#RMt|*Ne)f%(Gxm~&V;faIsch%$cUK;2o`Ufg?I~43QW!=&8(9F?6a~bS&23q?bs1fvH@O8JC=)yb;x$*aLik)ZH_z5 z9c6S(&b)u8{)2an+;UPgTvJo?Cwr2^9F3dzt&^0F*?;;n%`lPZAm8U$t4DFg9YlI@ zV?(l7ChXQyQRdypB!+~ulAZg^AwrVc8x?kIC6Otb-!qxmksu*1w#7u<_gt^X7qXQ_ zXUWX@|>q{U1zVK-r2iiSy}5)Ez{WlM!W?uqq-q5 z^b}7i&IRbbQ&T(^ZyO=r0*cWm5DF~bGAG^wiLn}p1Qu@@6>kB?xa$Z87H=6AZvn^n zNeBn&?ojGMEZ#P;cnb_Bkn^9hc*~r43nV7ZLD7K4TSmoOfMN0)!NB4zrQ*Zltq{apU}3odvB2Uj zW8y7&31smN{9DH?_2MT_4l>_+XwSg|3+BcWrim;w2~$a{iz6=)b1WoHjIL~5$7V}7>uGA974?hf zJT_7}azXNJmu+1R-c}xMDr+t)EBlM9diku3WkE##uCz!8 zDOd9HFJeeHKlfOtIb9~Jv&G8Wip!Z8bEJo94iZzBJeKx|R|zs|-&zuSJRo*y2ne=s z8MkjOiQzmb4mJo1wr{HnbVlvlN-#@))eEM96*{(W|Gj-%nc^J-5Ey!jr&Q<-O!3&h zZJ>Q?NoesFuVpfrnBLV*9oU?c2&s;WimD!1gUO_HAXRaDR$OVEdLa`?j)2veF8{!1gVp_H8AQ zRbT1^Laq1&Xj;t+43;VB*hxvL)2n@RJK?dn4IO)1nY`C*K|sjlO{w&-ecN>QttFw! z+dr)%=O1DFR;c!^B{7%>z;io_0c_t2-M+OXhVUSGwMP)JeJgzXwzB!hCBJ`6*K;WK zoB+)~JoffqjJ>VgDs5Co0fCRcg@5$z$G4&5z=q}>?}3W2DID>@_ASHqE$<8okZd8p ze^s01Ngb^-voV%uiyW=Pqr(p#;Rr$mMZX*O-K(RLtp}^yB85eK?i2n=( z1Pi#$Lcj$m`w9>gEZ{a90T;j=@TwO~1B-Ml;QluPE`T{GhrrNNJf%n%pa)M)@mRoZ zgn$bu0*@mUSiogYzy%VA1|kwzz-3gx1sH!^Lol#_%dmh8ID#i49HhfTsRyxu+r$Dc zFgS7_F~9;Ya{?}qIBJeaU;&p=0T*B#%SSMtH;Nz(ts0nTmW#IoPUG`Tt)<3fN*9vT8OZK%Zz{v6wY-; z6zGNyrJfU@`G*DEe<|Psim(ub0t>iI3%I}|d=TP+Z3ISagaE!1oxpdZ`O|eS+(Jld z6Fsqj>lV6h-{N<3V%Udge^y{yC(C76I=X(&$Z($ZDe}m%bsJZ{aCP%ARxF8-xxd3E zp~ewX??3G}8$a2pyX%{TcKZ)IlZw}Wf1J5&fvZcIXEbq~P4qZz1LXtjV0A7RE$Zb$ zt@KPABSz!#x7X7=(p8`5x_j)8O4su4#4c>gSpNR z82AbBw!GJRAsTppRV7ae|AG_U!j&PEuan<@r7JoOtJ0~84z_R`Vc`Obo5m- zS143!<3g?Uu!SqMF}MIGdkq3ZPw$k94_mlGuyBFJEAsv;ws0AE)ZYryLLKV81v$tz5pcISO*(g61#H>GDa zVIHSDIR7}5Z!}y0Q>um{hMwXnrMdu}cxsBr8g3&rTtM;R z0z!c`T;?=fAn{2bk-!=*qZ%&2_?(DfU=5dH4Hs~HosDpiCJ&_^#2RiBYq-FmjQsu| z)^M5AaDl{kYZMJw!(~*%1sFd*A{bc1Wn9ArAb)SH69~29!y2woG+Y4kvoiuhCT~im zhc(=$)o=m9uLB4H)^LTc;mQ&!ELj8qYq*SPxBx-q4+H^gxXfs{KtZ%8qCod_DD|8G z%|EQ+{!0xPP_zm|D6odhw1x{j#0?M+tl>5T4Hu9|q#-1=X(D(SE)&CWflC|vdbv<5 zJ*?q2p}s>OhATs;NQxjZ^z=@t_^^g61PvEhwDUzQu!hT+h6^Ct%ODU~!(~##1r{BS zAr@G}6}E;;PZyBKJCWai!0F=I+tO33T%I0ST&7^~@rSHQkXAy@^MhY6l{HDapqsaH zgo^YH6d80ar(u;jRm;KVZ6nNEK+$D9LV?X&#?4y*()Bq4g3a4zVcr51StkUgHo=ox zoC{C{))YK%YH18G?5(Q3nRPONeOC4)E74}79XldQHo$9X$8s^T4%x08j(Kaf&2gu> zql}KpnfLG1fAEfxTTW_*Yier#WKVLKqjB@Tb&}FC`%hn{872}P7$@G;cQMEr97p-v6r&OarU)nqXSwr)r!| zILymDG~kW1_Qc%7-<~{{(y=hS<64sH5>={lXJN_H!QMO6t?Tn(x#UkImagn74pJu|GnA&0FToTOiT>G9rP^ zTSm=WfT27J!NBG%!{#mEP$ADhNO^}+4`TDSiOpMJ&~p|F32fdnXWjydURj6)Hg6d< zZvlqdQUn8=w~U*&0Hk+Coj|A+A2x4=V%`FfzU28InY<~L9yV{A*1QD-{X3$Vz~-&c z&07GVz83+&<}D-UEkGDZo_}NWmKpOFC}@!WGySZUQqKv{{KMw$zcgi z)8;Mk7#xLoVDq*an74pLYZ5|&&D*NOJ*EfX0vGMvdbv<5Jpm3~hZ*Yh9ipMF`m$#) z1A|_-`d{wVBG2|17@S8}sQ`@5S_Fok-YFFyHgB6mrP@Y_5`@ULiD4C%gcbo>+b{{l z0-LvtnYRE!Zw~^2&08kTTVOF<5wXDLt+35o`Vs}{V#FCl120iRT%zdOZ8t*2Pz!;e zJ2?$2%c)Kd)@~c2-2w`u7=!|Aw~TAI0A$oO1O#ih%|g2cD5LWcl-lf0tlb8;3GzSY zc&5x{==-+5VrM)DF3g{sdvdw!OWXFLH+&;@kJFJm&L&Q6wI50lEc#4) z05jIJUN8+T&9Qd--)Of0#+W?+pr?3BX)ZwLotomYcH0Q;7Ep{oiedz7x6Em`Kw_dg zB7wDAMzvdjF)13sz}hXt+AZLiOy2(^%^gZTh_%}$)^35plw=eVSi5CTy9E-{%n%8z z-7>1(0*vY8`5)G98P{$B$V^uh9o2zQD?Y5<3PrmGAhTK_AY}5URC-vuZCdRX5SVR6 z2(Wf5bnO-Zn72j%uy)Icb_)>Zkp4f`Zkf?;fxEaMRptlc&j?H0H!oLes!YNdy@TcHiS1u%;~Au#mx zPO12?b}Iz!7FbxVM=Y?@CPb(09~b=w-+vxTfLv%p`p;OqWm3BZ7PkHQ$3EjM{1mpaz zUN8-;&arv>@6FrF6u*MJ|3^>plAA14IIww~U#$l|7Pm^AU{dq9^svWrIae%fJKsgZ=gf#uKKAEHepHNvn$^ zFA;MrBu$L2Y+c7@OE~LkYMvGKi|0HxQaN%#@@$uFT@Kz>9&IXXE-Nehi>rG1tc+zr zME%G-*|nHY1VhiMKH zQ5e`E7j_~vb8^KT33KhxD5Nm*B&D*NOJtha++eMc;?dCQ1-TiLPQvjKs?exY#vLSOzA=$l|c^N#kw_4JcM zEUrLzqTo^Zds)Vbh?yw+e*H{(guCUCe1%ZB7+)uzTAG_ZCnbAisZ% z-CM@pTL5y9Jpab-ZL@H10ZL#xTClKtTUDMj?%o2JL*)FU+Q2lhKF99uf8*W)n8WQ* z#L!bbr9Ky+{Z38s*u8CpdkZLn0}u-A-ZJOj0*MgH{Xgra){9+NEOW9PZ)#~ddE^-5 z86&5SC+3<+K-cPq2(gmo!r{EQ7h4z``zR&0I5zf3a;(HAquAuwSd~C}R207Io+q0f zN0{DWnTeUU0f9%WPJt(+i3Of)!2}+Qc71m->fQp3qvugrVE2|`_ZDy*H$XT@eTPyH zV)wR*-CJOAg7lxUd&``A3nczDLD7KSTSnbmfN}CUf`Q#z#@$;0a@wIzAk>NvySGAd zZvn`eas-4--jqrYySGj2-U5Pi0$R)Xzne5i7r9`Lr?FNiVwTDLU3<^#Wgp?0=u`2xwin~`cDJ` zySGfbx4`1&cEkd^x5BnP!}un2D&K@2Pp}A;SXo4)H|^Bh)O4b?t7TOlx3nC^`=6H1 zOWj;}=iu{ML~7+ZxLykH99-)FH963VP`va< z)Z*xbu`z}+`<)z@M-?vnOSbciJm*34#~q%UeIbFnb;~BDwtvQb-uS=W%IyA{8@Pmj zKTYCUKb;R6t(>lo?VYi9XI`einudqp^CNH777R*ri@!ZVS&?X$>O2d+BTlC!La4;u z!L*t!trPs%O5(@X7+!28kp5dwN4H9wr2e=vXAwzNadn>`SAP7s+Ku9BSfrMr#><|) z3=DeR>VLVHfsXcts7rmcX)*PRSR&#e3NiJ9t3FUnO?lp}n_QNtuA;20*@g)V!yS%v zc+f`d$@W{@O~O_BNvcaq?)(+IckYuN=UtwEyleMj?TAP12NSo%{<34ah3ju3CMY`> zdJqqdHn0co7w@>D&r)KD!o;pdg&)}+u8w1k(i!<(Vo2;O&3GiSG4DA`dO(8&*-E~WvorCVewcZ*X4G*5)Fi{~)#$a*E zpd~I*K}Yp+3OkPTJ|p*Pa$n=ux)%4X+=?9hS44%*`|{+per8Uo_u}n?5BX?T9MS5P z7~5tiN0~U$Mv}Pl*W^JCJA#6^m**|8zZP`jr>jn;jEz&9n|>Cy#;Z00h$}Mou@F+{ zsxN@I$@|Z}7cX;MWbb5c#n*o72W9d51#p<5PTwIK+N#xIN%ZMlBKqo%OrC$&A%yk{ zFKrQR_VcA3dV1Bxs$0{t(iMo6@%_Y0v0IWpNM&{XJ9YP?@}kn9`Dc8F1!v}5EV;f- z!%|*?DSE_|%ZfFNPMKqH z&#~b4xX5d92Qr@+94fi;=x(Cp*To}s2Bnq3`llMWJQ`rR(qO^*cc=dKuWHy*^=#Gi z_poYatD9OH0}Oks(j%*7WUHee?iIjfD2c)0B0Xg#j82KLz0Q3s)Kks=x#ECd`MH&> zvc4~ZGtUeicGcM6t*gH0qpruRA6n@~yAt2gK1cmB`htAF#(>3JUB`8{&} zJ>HeoH*JiGUD;Avo`+Q_$pS@4^*mo!bSO16&)2#dwTwKU_ASo^wjQ($uZFWR#XBu2_}ZQ6nOCbjI5A<-zZj zGM3rAj$U*3=?2)wx*Ru6sIiTOxJb=LaZ%eVuPZk2HdYARSo$Ic={?N{!K$7x>H>uq zu_j)`=(UC@q4IDrf>hfctgb~2*BXM?8uWcHyz@t)^}h4Ef&+IWh0%?q?|Wexdt!=c zRbR&HiVnQ*ZOpzGBI9Xno$tD?sK6xkQh(2B2+Rzpm&Y8 zuXVQ&|C_dLw{#QR@C#K!+_{|oQ@4c`9>^3LPGF~9!>TJP}ww>~|w);up@HBR6E z!n=Nt_rLYMYerO1vVB{b-%IfRR{#~X*0^ZY{ukn+6z_ipc;yS%GO?wi#|OarSpWVP zA8+ur_J0!}^e(PAq4Ft`EIDfFDAx7Lv4qeV34xae;GKWzU++7wt0u=1f^a1SUS|<# zeI@6=Rqrg;6&+YYXsCox`L4f>MUhecUDp*AxQi>)E-nPb_csUPE6{%Cw`d~EEu_Ufms zZ+z1`Fkt=8(3Gd6E0ozrOONdAI!*lAXo=mTBHo_jn(cb&W-nURLB%^>OfwQdES=qrCbg%3+y}Kcyi`B^r@qn~|OKkU~({_vGG@n`OCUW#d zf)csg25}b3`h;$gzT*+~@$T7iUv}?Fc)aT8nv_r_kEp)aBQKgAc%IWH>59V;+g09^ z#vNS}s;#sl%Jg(;-p~G?-W&A;Qn#Nj*-#{YLS>|w8Ap`O>NAT&gwJw%<=Z~P^~z+A zPp2oj_+@o`r*^E<8?|EBzbqt!Sj`6GJo|g*7^K6v-gSn9_LqnxHn>kX~O$w z7u?|{j`7Ml-2ekUjzowa#Eg0|;5ce#akQQvN00am;bd3{dx((zU-fF&(|;R7eObh# zszorCzX)#SFM`{v7QxZI>G#y@kDoWMiML{b1biJKnH3Qw8{oCHW4V}Ehiq34$Go-L z=D5?`QAXEf&HH!iKR9m7NzHIgP0gR|Ne*)~Zr-;}QaWb;>B}_3M52RypJS~a#T9oD z>BWr=$zqwXTT4ZmcOR1&63$9??lXr7NosFY*sYaBrf7c8WMW5xgt*ui6LsHny&hl4 z)@&u`@QY=mrS{${GooLhxtQcv8L7bmzOia6IEy6B`J=bSENi45xx!V3- zwpOoC+`aqH5P`|rL=d6XD$!xT9nK&)%6p zL;1FU{261*GD-_&Z6isDB8p1eue4fHDx*l7eXGdQrkx@wO8Z{4+LF<hyw3lA&vDM2_x+vsyyyD9-|PO|*K=LhO$-y1ly$m3VP}w$H8HGk z{taZxVFkbEV?}z$_j)=bj4&Xe4@peyh=r`>f0tG!7mAE|)ksWRZJbR=ur<@ZE++k1 zUDifJ&VNoZ<$-jx#@^{B)LoC1BP%ErMTWf%!PNWrhvt0_pK+^ZibS77s#S&}V!2Vj zMB)=J5Ap*8Z;!Oh-nKb@K=u-RfvW;X3vq#QZB_VdBe1}s2dmKQgZ_;sU!BBxY1xII zKYsSzE~+s8PtN!JIqx4&4Tx-Vls|OfmPg&l$iuD;ny0C6jTbzTuk;Q$ADL9JEAO#T zro=3xEn-(F=TWoGDs<636O;hosj_H zSD{Y)DunTOa{`nuLh`7{nt+1H zkWqG+10jdEtBv6wntB+01Y}UF!j+*jd;~eX%Qzees2+t1$P9uUSpt?5+FMRw{F(~F zuQ{4+_&It9a(H)fH~@$-WvFnh0D_};Acy}mhXea)WADGPQh}p|Acymo!?6*EW{ew$ zW-CfW{|M$g=XJCkH2{Ho=amgV;Xu?xs z@%aw38pD6h9{NnnUGQJC6d6mK1=Gd)zxpw7d#hLD=Ma#)%`;!w$+VfTR}U#w6v~MQ z6ya@?C}|YR;Dbklk1kyp5-C;PM^d4>;{6%CTb}dhy|f6Rs-%X8*(mADUNKm`?)`)E=#%N;SI@3cGd3LYTcGtDmm7av zODOSKx9@EIlQoG|_Ve!Nn(XZ6xf4RcK{x%z(T&tX5C7Y?_LJmjE(g>KhOP_Lx5}MtwB-_5$~Op zd`guQ)ND!he=;&ks|&z{%5EQqIb-?sMPhkTa!F-ApXLU5m7O-K`ei<#$~n8i!SSI1 z?M*EawRP%1iG6e>66s2eqbZ@UjVjTfL5Vq&r_NY7dHzhA7IO(mVv80}=&neoYmtU( zakRA-)fFF$CnKhh2|!uJ3%OEKv zRd~N{dR6*G^TfHF^Pg`~sh(Di4@}ei+R8Cuu1anAch|hA%HL%MAAW4E{i#}Bl|IFw z>T_AdWId$s5{ED#$2Phgv2-~!X>u6MN9E{^4iD3%aCf2!F^ho2wg^GrlJM#gO$f!u zs1QMxNB|zOGxszbXZM$uD)d`=z_-Zy8<(pk2 z`3`z;DChQPS%buIk8{J;?4Z`iED^f5n_utlVwvM2|Ts_HS7me;{)IX3sh;_ZiiP&U?xFOf2|0dFta=g~7)QMe~IW zNY(PckH;#0d0bedaA&8jvc0T>j>YOZ@8ZSIDfDY#m}Q)>#IP>Dpf<)esqXzvuj;E4 zYNuR_s#@?SBkx-J9zk$s`)1gK9g%HMipk4SCHl8fq9r$RaZ)gAar|#;>NzI`U!?fT zZCSfzePSGoR;2{G)x~YtI`(bjRg%K_Wh!f5EUWzD;PKf=^YRCh<86f|i&2A1oD8?P zR^8aHWBq&b3w4Ic;L|mW@7Aa9x|}y}W1egE(G6*dUl#g%9epDiQ!VO3OHIAkMguVp z&{IGY!_m3~w_MKjd#7C96oiw-T3 zv^sh?G&1<$(ff#L3c+01R174lTWrsY>s_3vzHpbQ_AT`~cX8@yXE*!$hQnW9kDa0a z;!v8Lqq6>>pPR|EHvN9GU|o5kqikW`ei4&P5)H!63b@UeFqZw(h28LG5|Egd?$ zpPH_v_4%6bu_7nuquvWqAuO!+x&LG9DJ#ZS zwLPb<)!$B1sZvh*>|Q-V@Abs$Td`huZNhVMe5Zr^#LGWxKyNK!J6(V{x&UKn0{qtp z^*|p~fYui~K(KKw5{JN%Kt13P}d)_DoRAueM9iahGG}LJ=8r8$12Z{5tTc>EX+`4eCZ)- zjcuaUho}iGZV>~{Uag8z)rpO)Us;{+l|fC^{&UT1_x0c$nSvxB!XDvuUO1WQFwZ+@(0z% zbW-3gQR!=GmssSYrT0W?!{z0x-j4H(&j?c~&cA+;nyB*ojQ;tXB(RC2tY8HRsL-bP zXDo}V(2t>p*y=vVMe#2pGCbtS;iHnY;vaUW>oLW@{Yr@iGNskeJwmLgc9LPa??>ER zopTT=3EV%WV7>k9wN1L;3U1^^m*2TKbY6K{*_G7$vW?zLhaYgd)wj*bJmHt`pbZc5 z$5$6F_?5l2WJ*wdqWbQt$NP%%67O82760m|+JSm5A&8k>=bT5SkYbR6@mdGU9Guv- zwHgdpyPm_YJ3f)VM@_793pujlz!%kd&lh~Uay?rp)N22fQ|k8v2f68eo4DbE)Wn?3 z1NHkqZr>qk5Ku7O&hUt%aNDZWyf=q4CKl~eo4}`5G-1Q*)LcW6?Fj`xLfs`+<`V$A zrv0+<0eS?}^+={CC_P$&nx}*6F_5JmEtflRn@DcSQ>pYkl|j!_&(J(|gq0%A7rdtg za}iS#NLShfc7>1RE>2g-7H%r!`vo2I$DaT4Mt7Euz-z-pmz+Z)%C+@vOO$6M?|)~% z)=o$Fn;Cz?n%n{I#b)09acOzxt!bc+0(*V2JW=CYEf zQ}0C|s|t&db9O(s`OwPGi<5}Q7yXbW+NKT6wX%2nk(=ve_C4mr>CIa=WV^Y0&XB9V zA@yv}Dry^`Qa-vf*vayQpnpEv~Uvm0`0l@QS72(vB= zSK#W>`le^v5nWnV*!3t~#$WpxwkfpHweC^z$CUYxD!-(vdHlZD*Ng;8R`y4He0EbG zOH~d~+A)O`xo_?8zUCYCCq$@ZeXCOYgq$%o(2b!NW5&Hb*abhQsU}Qhd5* zhFjltPx!%^GTw8|Y9P#Bx-hN2?U~f0!Z051t!{jvHpS`NUhDLQ-THdY@&4KBrl*a; zyNWLtH@dz(ys}o-;M;F$<8W+IuaDjP{st18}I zDHz+UjTRF&E?W9^=kMsJR9EXzp`3k|Rr3}_x*AIPX5~FxsRdBWyx3k7Xt9f~MXQf{ zW&x-ceHoG$cJ(vn{Bf_f6g}_b-hMvM7*qS%x*xwJys~*4=VwPUtbQxIWKz^(|ffJ@ZG5k`Y=4(Su-GyjL^m}tJC(!!H2@)5x!4>par z^M`(ZF-0P3?=<;{MQ*tTT34NtVlJdkj9DiMglNjK3qeiM>f7F;-l!1$8H8wk1q9H` z>DyjwDOz0vdrrl6N&lm&V)yLb`!$@re?r$kZ-Gta*>_t#`tP6MEaYW$uhG5u{LJ`;y~I-C8@-um0`G@wLkTiB!dT ztAc{9N{KD)7Py~oidLWYEFw@X1~6#Ba1#V6iqogP)?&224fdQ*d%t(ByQ^-n$R#A} zS8{3DuemdheAGXAV||6}G;P<@1NjW}L^SO-ZifUj17 zjm}^d(2|j;8k!6rS{eDu%8F^d_TL^AP*_(dAX&}S6;Cgm+=`&@20q?GT~v*>&BHC! zHQa!4zjj7?nyT6=WKc3;9N%iOAh01_jNOKGHJdKokWL)H#fEg2P29D5>a}n8Sj81A z_e|M5>(+L!*G<7y3k*&3=0?N{10^Dhmx7oxDFHys@)lHr)vo$3SAt55|y;v6U|06YrOwtIK;dr!<>i zr`Mk4Z+qQQ28YbaX>QrTOn(dRujy%RE-kHD5l{)$&{89}DQzSHdEJuIQfL>+TVeE+ zcC;m>8R>Ah?z}mjtt~^=2@N8E3%?j2yPIWd*X}!?)Zu;Sxq5jGi%;FGKP}OxO3QIv z{M*3IgJ)kX|eS1W2HJsFeWZEo=qg>>r!<3c$fTJwyQ7j`OT3aB0n`Yv0GZfs)Hj0XK!FrpAbI zYe1nzX7Ww&mrDC(oQ=rLp><)&SXhF;tF&J$LlGGpHQFyn9q@OK_G^;|BIBh(`?W(4 z{N17bIN^dQ?KhqFOF;~g zRir)mBCBeO$c|b?`=z6f$m(vV{W9`LWT!^aep$XpWS0#FzvQOyfxjTyFB30B&h$R* zm)|Tz&i^p&*Hr~X?wSYfSI$!KX9a%A3mb#Kg|uH|CxgFvv|ron5&1wn@N0<9Eznmh zPy4m(D>XM`LB(M@`O0JvgM; z2*BAt@F3#VK?H6*RCs|wFEIOOw7tN9n(&a7Cumn0r}ZsYnJ7J$R~b-h0sll;E7q;Dw}HFC;+14clNgYyISGS7$Tq6AbM*0Rn6iU@HJ; z|G*0guP!8@GR8;(V5N>1g{^d)c^Y2k+kKhGzR%DKVxX$Lc`_;hywqUt{_^hoOtTI? zPjGeMoX6m$hPRg*>KFrqR3vl-LU1Mu(qiLIO32qew&Bdq3@##xzDK2NG^Sib}{_DNzNb^H*+&~%YouBRfX49<0o(s4-a6S>>^@g|C z8*DS4JEDn*AVc=8MHT$JT*t)J%Bs~S1+dYAo6UF%vaPo$bnbGUoqQsQzflq~Oi)tR z>H13{vSz@nogqg1__kS9BPYX8{COkP+u**KhQf4R4Go(W&g+L?+%FIylQO?vLgTdN z8llgjW-lI$Ax9@U1tq)jk2^r@yOMHWW6&IN<-OG-HmRnBJ}Evt)W{<3s#~HOsX#Qz zd4fgk(?X+j4FY#u90DX3aU1fi?I`kN<7BC-dH3pL z)vqrpIUZkg$m{6`v#LS97P%X%ztR>QU2w4*4yt9?$g4Y8XDf;f*$*b0nz0yLXUn7m z7bCANt0Eb-=-iRlj%gvsoff~3ulp3aZid{zG1JuL3^qL~tUo*ObBXc;S%YDDwPNAF z(h7}?r@LelJOavu<}6k%9<4WF&8D9ti>gU&CoRLq+uf&v>=KnrXgn>(UQ(;{dSs)ZDKS)^&fxYB})b!@HW=)#p29a2O{ zfjdP^U88<)OkEI=`z`fUZ*+N*SWM~Td0JJaYKY_xuyp6*v~}QW zK{!P3aHj3Fa7XlLf*ce@1sTXvkd~bm3pww!U?oYH?6l~ZD2m*Pawbgk_49^EYNXzW zfS3u_#(R&=3%^=v@S<>8Y0@F_>It&>TAn{H7+mz&(5O7)?1PBp!tyImzNuvnW3jZb zU}Iwa)_EDZhY!m-sgH6v+uePAPTlU!5x`E6D9ZrDvAt) zD2(m@v^{xTY)@#JuQbpdW?(Svf$HsHE%TL~zdb>qAHo;lBcvfdO!xhYvl5gQq(i+` z0tN{PC*>2S=uG}~RYK8w+d#6xUMDg+sPVMrkUA7dm zZ=vGWa^dwugp?lnZMW7wI%uwR!S&w@XYc!7U%!9<+dr#PLq#ct0ZklV4-t)lAuGL$?_v)VY(}Y?EBM>4i~kMwDr8OxEpK9W!rt z$CL@h4v)MGj;XGe0b@%;GuCbf!eHx0nuR$u9#@zaAfpMx#k!HU!n6!3^(bKq^8@75 z&kw&!S`>3xMJ=SCbF68sky6o=IO;&DCxNdc?vSJ^#Kn&l$h+!(-#mKW#rMtunRl$s zU1}@a3Nt5Z=$rKDkD=lDgU0LlIGXAFI=0wBHu?B9&nW_6p07#W4gt&YQ~tJ}Vb`0T zdEnBb0IoO6Ws=R;o6#qZ{=MD+#hU-sYQCenVs^L}_>b7n*|g}%zB3jAHg5r&vFZW} zW?`wg3UaPK!}h|Cv==gd1rS1p95umoK##1M0PGl#EcQ-z8Eqkx3RPM{Jpe+A&2&|rT@yZbvBKf?e4A_ub-fU|$#XP9pL3cH7X5UqKO)>sm>0S%(yOVJr6R6#Y)AYnp#dB@7iX+3zG z3|xP?NP;cb-~6F1b%~bi@82Caq`7m`{@WL*o}vPFKJ}a(9is~(-z~09N-Ny*R;_wy zwYlz6P4CV6c3;HTuT=FZO-h! z6kC2+&F%Z;LQj*8k;O`0?_SLr>2|TApUFH>W-W7az6Q*PHHbD#ceW5$IwpubH}ykshC7+uyVc$S%aRx0O(_j?Ztki-Hdnv)Vr4_sZTF2;&i(u5J)2-9 z02W8Y4YdGE99!+Tp$Pz{t#@q$HDoxsL7`SuUSQ%rTp0XGKk?FKAneD zl7qpg+YS%&Had)(7@7McUo>82ePX#@(s~sjnVf?Qm=+=U(Ium;@oGM2uKb63MU8W# z2$O8!X>v|`)Y|UG`FqYZ?6%GD6Jg>njOS@M8n|^LXfhq0Nwo!~ZcmtmB_i6H0@xO}ozq5)z zzHL@?B>oJ$iF#uE@7L{Lzv$>QpX(cnzEly)_!C5rks_Bj{_LY)AyKTQpE6;QTipry zrB|%-KYa0UnH^8LwvyyoDWB#uvJcXB|6pmHbX?`cjOw33876Ri z;Jd%&z!yG8RX6Z~l?l)QA4Ij7@+fPw)A+Hd#ke+_}V&T648Y3Zt z37+djEOS8t{$-fDBZLeDxY26Bv8FSeV@Gs?l#$F~MK*~2<0q|26B;#%)V<6qrtEhA^ zX>lk7{7ceetiXW4r(ykn%z?-E3nb%iA!BgBr)QyFVMS*6hdit{|68>g#6M#(Zv63W z1Id_YWX#w|19II9NhlnVXrwgYvH8JI++8XE2O4R>{O3z-{u7cl&&e7H@X8o-4(#LL zu94<{q0<>i_eKbnjv4UqNdeZJ|Dom#0{_;JZQ$_{0g`pMkTn2=EG<+rNY>l|0?W&2 zhx#~xWwLML#vk7{kgRz|){G+pK)77&{V!;ot~U;2=^4!7=6%MD!-CK74|>>?@sCXz zK!5^!TmkSi7c4z@Tj?1{S2zNd4gxaIO3xthMJL$?9^Wqzkh=wtfn>#Crn9+$37+#^ zf6g;jFhTrFUg5?c-!>4Cc?QUTKTW@(&(O|8z`S|cK2*5TEC7wQaQ15){_fdWNkHB- z51q$9pnRlIZf&Cvo$o-0&ehv4YhXG5oROZU3KngADnlxSjuLEcPB!Md5i1?$x4uuk zpkVL!Ze+4WFQx6tm;9$p%8|BEl~f&*yyJpKw1$R8BXRcH#p?H-*`_L-Uh}@EQ%*(`(ws&tMoy+mO!^4+g68p;3{6A!P+LER zH(y}j+9tv(ZKX=Tyc;8rS_rZj1_ULvDnp+V1FO=@)YJ8=(zcu(LaMFJ^&%g9RO0mA zsA~DfhDiYl+h;9Q+TKX=ymzn2ImFprH~5FgWcTd`#)c!rd$pcfcj>F&yXf5aTe<0* zOK0eezoVz~#8Sjz$k)Nw@-)e(k&+_EXfVaOl|KI5O1koyxYDs^Yg9Vbj!4IH&qmLZ zj&)uR=Mg8^WPx9ZgA5;cKD}o91-^&*B0SOr01LiwEVfBCL zo~y&a7l8j&_ju9W^OBzUX44Yi=N+hfMsl@i19grv0eRW7Y2z_H>G{!<-WD|JsdBMt zqs28PNQJ;B+NzCnY*%f}>Ds*kJ3G2JV0G_W*QDCb+v}HQ*;Q$2jm)dq9Fc1z?h|{= zSw*NE>t!3LXFzucCs5#S+eX={(nDa~Mvbdq4KbazZbOOfx()lTwatZ&+Q!EA zznYn$f2#Xr1(gi`W|q=$`0CDjGXqUe&&r`-SH8(J( ziKJQuM!fFtrLKQ>{t>UZx-A3Sf?9auseJqIi#%dv4(r5Tz2xCxxlreYef*WP;nAPw zfQ@JZ{hecgb{MMJBJ*eqxxRfzGaf5WfT+fjvi}9u7$pE>13x^V1ns}*{EGw_Ez?{8 zB&hj?D*=ZL2Jf1@yK7>!Y>UjMC9v9CsC1B)Z#Q+M8r^PMZVvq~b+)1B;222Dyr<>n z1OIn1DjF+T<8a_ZS{{lvL${Zfn+5n|iz@&J=RjKKAuVI|0b0fboY08vzh^#dIOqm5 z9vw!=_7%IgFGnfD9G+gNB|JqH#FuAtnDSB?Ae> zd~qeG4*EW8QPChJ^PG}_ z0Fv1LXI27ma1Nwo9#S&aZ$rD`fQ;AgJg#}5cC2?ncYnW)ATo5oN=!RgjOMm4xAU4k zfCN&dxDs%%55(;5!fYVjKy3aEVm1#k8-!j)7Ih7C=s7qBVs`gnHXsGr7pQ2gD33!? z2R3{BTeAmnjr>Af0XR4ZVm1#koAFBy&`yzrs~t2CE6irO+=jbODaMkt5HfTy_Wcu< zq`979>pWHl5)8%mKeCd5L;ixS++C~;q#KrwrY~k-Q9s@V5WO%`QM zOgmH4S+;JjbFAG~u3qC{XJzHI{_nmHTLO~RvajPQeRMs7KDvIqWptgY!G2$dz*F^~ zb4LC5EG2DrV4!-63Ru@MD48&hZ?#xZx}c_5O3?~3-;HUXWuzzhW%~PD4-e}ncv(<{ zpk_n9I`c{jzs#xol=~D)oy4e%wuyCmht+)do=bmg)SyV5wEo220rQ0tjD&;u`2u`| zG{lGLzF)Di_uzp0f*RD5HiT`X4Uv=U`YF2g4GGjBjv@_}m6jfJUH8sG)5lwg18god z2!*`}yG8U!y|jA8K#g}+x+!EmzVH3!#R^nK(RdpogYW_nr_X_0Mh!mDQo zTw<^Pd~KTJ&olU5QI%3?d0Dp1EdlWE`o#}!d0-55 zJF|YHexu%Fr5^?i?DPX0@!GFn%hHY>tlz$Q2567 ziC)u_;9Ob~)M)ko7u&@h>EpV zNQ~{SqSUV4RWyll(tzRrQ8eLi?>8mXI2I5oq< z5qKw-Irf?fXaZAXialrF0P1dE>|2(1^xW8kJyG`r6GLxRia5vKsc8zdoE@s0`(h8t zeubEGtc&C9vmQC?17-)*JY1YKeEiay0ZWgotCu@{EIVTxF8Pew_S|`$EAHS>ARk*| zZW_0^VWDT{vbcgdh2?$U9*I=i0@il;PjoxM>fq6GMRn7}UEaaYH(VS%?Du!@7fRJD zg$}c`DLwo6u~cyg*x#}5Jt9NU7ED>SRa(%-SyO|MHRPwbm?EW|LW!0%7TIb>-0nAE zOtkqhbDJ&V3#`f3Wbz-O&{sE>dml>;HJqMVT5-}Rh@d!b{&dj`Tk{6<&*bwrN+N~{ zO3FH2pRhB?$eI{dIR6GR<*ed;9UeVbgkB%?UG7ExD5@zjBUxER&wQh0j?!A4dAq6Sb)u@3zQyW$=dLYz zp^=z+G+PtMuY~LOn(ciA2Y^rHV{PUg#vow@jY4%}1$>-+ z1F8ETsJmH&+1GJJ=#+y87Op$3aE+CFnv>rgXH-9U@O14LQ>vrWg|#Fww#>sD3bkb!`G;EZKnA zn_OcXd(OUrC7W(5*_f?Db%W&0^Og_V!2_eDmS#8%W>XL*D=#$k_WYtiX@6Z(!Z}AM4h@JS*0qiHns8 zoDBr&o0r`pjI~pspN$Hxe$Yg0==)!$78peU^TM`R{RbAXyQTF5h+wCMIf2DJrc*K* zMCdL;XrSNf@2Gyv7{@t_L4@u&LW9_^1(S?yt}mQ@0};A=5E|&VUK!Pm6}@rx4MgaF zL}(y_{S{mhI2#BeG%peQKKST>@MY7#e4EgQ6#>a^nV5e> zpP1i5pP2V=nV6@J;F26N($iGc1P4(lgmJ{xq#&>}M4a8u5c{s(8Dhlc&X5}N%E){3 z&Qo8fyV{>~s?l9)Rp_hbbTf*&UHP!9f&Oc~rFGv=+co@-AxRD3Z~QWDrJJte7l+i5 zCF3<3PG}Efm>QnrvhH|zm4UfV)S804ka4C7Pa>L1l$7dTxZl5{1Ex;ZD>sOOsneaz z3ObL*RgireL=_y>5d~Rxkn}JGSrxG#Ke#=ss(SH|-0D&J6$Xc#Z+lJ4-dO82d3=mX zYVB6gCAw5(@iw3Xqm#hsi3>LW1D%9!-}xVQk}us!Z|J#j9xWHTZ9ttgnyCkv3m1WP zeQ#Rw-~)Od+(yrXJ6iIfkvbQfL+B1#g?uVQDuj;GR)-j~T^*9swW~wscesxvs-U`Y z&5~2~t}ljHxvrH@(6PyLmPopjQt9?|Fj#-^>fz$kz(9%2;difP8-Df}fhIA)5$y{< z%kHb56@J?Gkcb>#U)L`(0qa!XE}!UD9zm7sUwZ!K~*R&4H5ijx~N1wPK0^$WQpF*CuQ1QmJYk>$VAb7is6Gyb|WcYgu z35;M^UQxEH^w0pJIQcyiPS6$tD@4NBt`Hg5wJSshcXWk_iK6=b)9=^sol$g5Twbz1 zFRv-h)j4VTA4*<$Isxpi5*cpsv&|s{QrJdp{*jdoT=<9!oNFrs%N-&;O9om;!raR_ z1-?(y3d5_S7C9E}mi5Wcy_kIV;)gG5)NXD)e?F1=__F<=`luQD-q{Zv`rfd7UTD2J z@m2WNdDg!tubIiPd@SeBh=DQ79Nsv1?rluCdh8F?eogkKn8gEfa;Mff&^GZTZgk%S z6vS}O4lzLhMr^|NU$VjoE)F7ybFG%Zaf@?|I)K3v@9kJZV{tKpIU`k?VA5=VeCu(15wewgU=&HkE_4;W)Tj_tpIK;B(I4suK} z1oEx|a-iTT?EPB^b6~RdR)18%(JXL|SwWMP*6VGpten>WT~b2kJROdU5TvJ_Ac(R~5nV(1)VMa!x%^ZsU^LN-Z=e5IfnPa&jHaU4`8b>?z zy;|W^xmk3sBy#hQO97Z?^+Biln{~ULj;k9pKrrd1)DhiUttRP-y0L@@&A}TJ>P0JT ztKQ~0#?I4Ksak$zex-c$<%gB;7DOu{mntnLgXlB52F%{Y%*KsAbWIy{>lW8|(p~eC zp6zDSvRxbz)omn)RU}M~8A?E2Hm@PkdrV(L;zwUYvISj3qRMs+38Py|kP3lMLN~G$ z&moJKiWyJ3vvV_%DD0%Cp>c&j_&)jdq`@cVh+XNwtN#(Qk*TzWw6t`S|M1&aq^!zr zlmx!G^=s1w1si0ic*qOBPh?MF!of-JpI0O2{P~5Jj*=KsJpUcCN;hPb-}4e7MYnx? z+Z?uh>$A%5q4hQ*(MV~)WAlTbESz?35xd=lXblSXpg1hg$s-@IOg-Of=t6Y2is1`G|g3zkU2KyrS{jiJSEe^MZ5j21GhVUGC>8 zQD*L;uXdU5B?0OG+Ux_c%XQ|k$AgjlW=voXI|r*QYj46s?JO#QubqwQ7h=Jju&6pn1VS71EPM^AQtQugah z?}alZW*Idv9+`iMf{-ughKw=F%e_?)UlO*|>89z159gGF%9WbPhbuliiGrOmefGJ6 zW*-99O4@w7+*L&d8_l9x%Hee3VxFNb)3sVTBBZQbdgjq%sp60UT1btnUkx!G0e(>; zu=9(3*ZSo`NBvSf`}Mb!x1mX_M7^4ryuqZ_^ z`hq)*NA15l?Dh4If2Zay`ixKvqJN)^8+{I03tlC8bd_X;Vvw^QNTa$j(;0_+1);d3 zPz=KUFpzE7IamckG4G)m2=;gqDj0-fC*DFa2!9Ip{hw9%Id}#_G0&iw@%j(I;OR!x zILrqO2i0Jv<3*-p!26`tR=GMxR5v zf;8MMGz@ghc0hH5$*Rke6=SUhggpn_|G*q}4pxCQ%zGLJhk9Np>Jv!AJgzE(@E5e| ze`V3>!of3;hIvNAj7>5i)4gx?{sB~uo6j--6HPK;lCx+RmhxbeOov~t=t!Feh<((_fKZB<4`h#<>ii+mqFOe#-YiIIqV#)0)d+MKn=LyBewq& z0yU3;8u;bYD%3Bm!q34o5U6Lf;8v`wtj zJFMor_gwm0qXtFdr1dBE4wx^LU?d#G&llh$q#-^`_x*~Ey$1)}7u2Ahv>|L8ZHSy) z*H6){Z%Cj9aTIB&tTehmcx!)~3k^bHFT!pS**{hN$TQZxF2`-)*G&PJ&+j)M6BYT< z>GV<+wVIcM14N}91f`V(^p?iDUZSoj(h3l*Kc#NJ{+pYw;-G8GV(&_xQwZ6?KhVOz zj~M?6Q-1=*Jnp|ybKEkIJt`@+8&ZDGG%jO~xm{fN1|+;CY@( z-B;9ftfXU;^HAwVbVRzgOEP+xbgY)t&DfJ=Q+wDoB-e4-Jm<*x&=-jYvkE3w)#=2n z+^sNK4@rG(0YXh+^bQ!=`0au#0Q8Q=J1gB3vL0V}ivKQ6vbFTSb36KWi+48Cy^~2# zd&ac1_eTwtPL;b&8K`fD5s=K5RT&xdlov!#c?Z#yr_X6sMzd$umLlr}2O%~$C+{WP zh?NfWTi>T%P_XxVH!|6xm(up+Oa4@EQ%*(`(ws&tMoy+mO!_!TKsb-T zWM~?)hbk6dq7AL_OHnQ@i2Fy z|HP-2*#gVJ>V>)27Nr2s+++@X!#TEr=j4~=?G1d}Wf?sy@L;KoLDKRYE0=}GQE$KY zD6mkTrJuWNvs}z1OKRa$+qkL6zl{z^T}o296nsnen!}d?Kk9Td3%?A%ExJrW-@47z ze|XUyWe10*YjGuNYdmk>P;=M%qF;P2HvK!zAoc$yiZcC#1W!RQLb+Uc*>>1_Jc1!WDp% zZ@5TzZ3Tc>-I4D$j93lQoh0`DH*>n<|xkM%!6tiH4|nYXoPAb^xP>KRr7aPkco=S^9VS{#~froHx`gjXhJvE(Ha|Qy)isA~;2}ujWapyEOF(xe_o!lc- zI+(P0R&xe{S1@E7cuszSaNG@W3=Vk3Z>VIf*o@O5&qV`ZTQ-9==a!l?5MYQat^k~T z1L2rwHD|{98Ax}-MnmcPki^ipHMzN1{x9^@2qMFVgI1~L&(aXDd5+gW0Ohr~0(3&! zgLvH)cnzde8IDQ^@tS9N4FW&%INQK;@(aZ4Zoq4R2%{FDl0m%YdF>f^M(ri80GxaS z@tS9N%~%2j(y8yml@4A_J9%b+Q2-%im?jAm05+cUx&{pd7=yik#_E>R3CRmWa#uhy zkWPCR>KzElJcDEq__0*Bf#>8G2+7?5$v`q44^%P;$vlT-Ab@Tyt^k~T10k7bkQ{@~ zLa#+=L+HcwZlKbsqKsx@YGq|=I?LA03X{%wjooTD+P`Jxv>x5N!D$G+1rH2#hZz_Q zd!TxISWExraD#kht%;}khxM}xGFg&_yhu(J-mMCjNSm*Gm1u)hLpXJ z&d5bunosMS>X+Pjf3jd^pLbJ9#(N2()YaN!BQ)+&8!VS6&AYzYJ@!#z<%NXk%ha={ z6C~#zG1H64iW*#3XLF1C&}iVp3iC(fBhq*NmNT7@S}0Z_{v`BxqDI3B?O`btk-cO^ ze4`&%6N(y&GB3Po6uZ~i3;LcToVoYyt?rn^47>&M;G zs68h@$ zna(f*wTv=--3FHB@azKGoikciZKTsyZD=2+ui6MfS8YtBt=hmc99yfT5Z(yC5~FR} zK-g{CSjYXQ4S{|Lwpi$_1Z4&3P;Zq0uwo-ApD;yd@~^8Bir(7>k`4Ack;y3!ir&py zC?`sq8+7ug0(tt%m!x-Umrw1orI39K6}OfPuOA|$^vG|!wf50LbF~eAEfo?JoGfTg z<|h(Hn30iaGx^|4zw?qTm&7Uv-|}!w_>r1e_{Y)HsKB!7?qLIeHw3x+ZP8rdl_=(* zPsaNHn1haYv;dNBU(neeD?KFW(glSQ^#Nler9O_&b-VIMDcLZ!@Y9=I$+L4E1NQ7X zH|s#Qj_hcKd-bnQ*~U?)`932C9*%U4rQCiWy!qFFwyOe46|cWbk9zhZ{ln&q;YCx% z>rm6na&?C$EVUYYl^+}{o1~n;s2ahG`B<4kK|P|1uG!$uCty1&r(>n1lY`kS0welW z2Ol}UfKPI_dWs6z!PUu2X*y(;g&nlY@gaf56K2hRIpIppybaODmEXx>+SJY8@6M~5 zxV{8zX~;ZhPHc0kfb=v~A2;-PZUOw<{?NBHIQP)dOBXCSu_bxYD7QV-+Fz~`TAxbB zdu!@D`BZGS45+Qhwk_=^qwk%aH2h1liD5vkl~mf^r1&R=a;b$!v`VY|%7xAjW=L=j zIWbS;Dt+yX?sz3cC{(Ia(Dq{jM00Yyo{wG#2fOoP$ z5NjWZF+wny3z*@6S%$^k1;(D~?$T)x3}Qd?6K?GPFiKWME;d{il$dIoJF(pa`TV{bI|d80*@7*94m~)W8e`B?EMqS%{=605c(w> zP^Yj8J)T>*I91x#?P2k__17_Oi^q)B7?9wWs-v?3_@><^?$PpJ)T=2{qn48+y(~hTD^b8qS*+_ z#@LtlKhg05#wu4AV~Gy(GY_lBAogp?xUv67%Rqka3VsH%t(QV&V@5eVE`VJZ9qzgS zLAT$_Ht2Xhf&APJ{0y!;j`;nrcs_yr{15yLoZ@r`bqeHX&V~DK0x`z#EkHIG?EMQ? zvO&i%@-wTs7A|j`j5TC1@ap;za|~o)9x^b9y}J!=?D5WaE^hmffxCi%foz^KsBDmd zc~V0LLEjL>Ht2XhfehRY3=HJj)N21Vi;5UL(L3!#XDmj7(0jFd|BXfH@!Z12sRBDF zI%oVYjy|yZ3Ji6xL*~$ix;Hzc32qb%5Mw2q{TheA`zuxwkXOx<-SHIKWcRI|^vUks z=w!DtH#86#eq<4Cfkisujkq=eEU*Y;yTHPuix*g&<8B&%;*%Nre6>mLu}8D+Gz9;$ z446Gj)#c{wwX$=qEHb@r*c_%#dpvXeBV+jht-ilR;}taZBi0_Tdo+5Zq_N=$@m^!1 z`G!ghPrArQ^t>KrI7G z$8Y6cy;j_i+LTK&6f{8Ymw*s zMI~P0o$ZJG(bg6Dt2LAJQZ@zX7br~(A|VltCZMK5h-QxZR;>SpIqJAp0UX`FsJC5X z(Ziw+_E^+r71vH{${4>i>DuYK2V0U0pAH)9aBkTIl2qs{4cD2DPO)zBCco;A2nUEB z*ypPBY4-RCSF58zy~(1P60?jLHuU7miX6-RbNYPzMy1kt6_uUw1IHV?e|TBxwz=Vo zEI3@;3)aU1y*4u+F53f8myBWobGU~K=<(xpxU{9YmKRCTWeOSTX{un6#ivrFLhvX; zGYf&;&XIo71&-ZZaxuZ(SKV3Ec5vF3Up z_d6_N$;;|3qsFxDLjew$|G9u|C1PoAW>r^MgFj~X*Re)^E7IdTTdyIkYe__}Xc zl(60Xc*T@}d84df&Zt`}o1Y#Mx@*7>*A0F3y$bp~`_Oiyn0-L8WDL>va_-NRoVaw| z)X#SgnxtIK3`kacct8$#X8M_Rmw{}IKn9RtX96Z03zEk53byux9|D?dPj;HRi;%qI)3WH86^ zB*!rVG?)Q6v&YPcp>?>?$8`(@=JmqK*{i`!@qmP^G zAV+r>M+4!afNqeZdD@-~q8^Ryzhb_<;93Q8bT@D`V1gT9BBrHG0Nay!*`5r{a`QB9 z@No?TIht1-jeUYy4+i~!Y`2%-$_DSA+&tsT<=qpjD${}whGXxv`u+uJm}fNH9Q{}` z?Em^KqK})gAPsj14X;Q3quC~&gbK&ZWw`kYHdl10xuQAh3ES949oH(5hIvfG&FL*s z8`TR|mAR>qv0P-;-eJAw;NQdgzgPty*D#QVc}2t5=>x7AU<`acGLtq2PFan5Mj00g zW=&gYWra?GlL<&h%M^GzeG2?AeF{7TodTbTyYV7*><9w`9j&NaVT20@h*$bt7zox~ zNE2+FH8luXLw<^jDN@QQlxRs~k*#LL?S5}GqRoey+iVeEU`@6rlm7^XzPhp8`&eqI z;q=VXijzJ;1jTXlr;A?Lnm1Tv1j0{hAdv^nbfP?*rQr8acI4oNq_HBseG-Y=9JYM3 zRG3sJtVQvbE~H>%ViDY}yJ+a8F26(8(KGh9hYRwyWHIURr42V$^jFY}_!_5rGTl9; zu;|*kAg}UeBs=#z&m^1{S2|_qeBWGhYG~W($~o1w({!jM;~X{xJH~z$El+r2c){tz zmo%rum1~ig1Z1p{6WD4^_}dNBO0fF_aRtM0fKjmF9TBY6ri&gZ80)#dio32&-nKXD z$DLNLnj;{SCo{`FH(mEnZru7yV9mvtQd98u6oJtuVD^F%`+Jr|oaiHh5 zRC->UO3P~xC!vB3WxL*jQLMoPB()`{J)`HeV0umqLUY=9wwoey zNBhw=w$Wy14a@dMyVXXE9vE$~&LYMx;^h$)9XW^f-)l4fb8(K{s7}hb^g*h1#reo?8+B}jMGuTR?Tfv0X1su^m7aI)k(izH*6pK8 zIUgh12k(~6e_WukWK9gUF!RiS#`67!a{21xkDs!6QubwB#i&eQwW&e68XCe38^v<& zn50xrayXJxWs>Khb!B0m`2>5Xin`Fxp}Dmru)yMAj?42lE&+#qsyC(=3nqq$WxSUl zN?olzHsT);ixF%=cYfM881(_N+}uR|A?l1^3!?rE-v9Kti@L@=YB${f7{L}Kr{~!H z4}z^Nh3s3%&G)Jffh~yk3vB-vbF|qV+YoGd4Yok1%vSH;La=S;Qt*y}Er|N7v)GZ% zBI@iM0>PFyV9R(-2Z#L|?E5dwW-QN+|AA{d=+J))YLLxsU95`o@B-2q7Z64i22sy) z#f>_%xe?|uE+N$M!$LriGV0sqKQOf5P@Zv5Go7hd`j^4bc8wfkSID zu?LO3Ae(*GM6(&x3(qCp-DE=33ydLF7GiqACX+5l;^wGVFT#yFyQ~J0nCD1rk4&Z6 z?M&dwnaiFt#>SkAlUK*Y(dn2Yv4wm zU2cM4+bv)V1gppHf6S=G?)Zky3+-=Spn)xjcEdc>9?a2ZXAKCpyarpKQ)4Zv69ikl zDpu!#tq?LC0dJM#w2ZKG2n1W+fGxJ}+#ak0A|S*0Ne&Wv?q231f#WWPAgBd@Y70&C2U)(Pcpqw(lUP9xv<{F{T zp=K{0j3GxSIRz!V@sB$|?7Nb3Ut`c5apk?$PJxAH$?nzT0xmB%{9};2&@=YIOUr7N zLxoP8_SHV<+xMLQQtj}cH48I!0<>O_n|8ABvE1B4(?-po6SckV*zNE05xHV^uj9%S zgFQbuUUmvj`t^F>Y27z9&qBq)L*^SkI6puK3ZK(!^K`w4>9~4f=RK2NT&!U~ z4LwhmFapGpw$ zqns=t7!!MuVlB#v(P2izr#?M7op3+O)9?A5`92jyVFhc!Fo~hE($ZtD>)ttN`gjXf z(&j>gP}qyGTSSl4ORHB5)Ocs5n?ly(3s3RirAfAy-gjN&Bh|9lh<=S(|rX4 zX3su7K1^(i+~{o@!^dq}@2-3P-iLbs-KX2``$;d{awpu^XG^%t?&9ap=T6L8-&@O%oj2sW*c*L2=#BHMpXmpV+4`US zn^c_*(GwiMkT=8^Y~HixfLF#TEpT2N_?h|m@PI(akVvzpZ|)o3E;y`F zGW9xDrk1k9>}z6Mi`WN59m(+zt#$ActI1t8VMFPig{QKwYQ2xQob_!O@W{_0Za0Bk ze=CkH9sviv_$W*+7B~#?3O(rm0k0UNPDF;2PQj>icM182s54&uK-7Cn<3^orYJyik zo?QJHp%&z(J}IbP5Nf*?YC*L7j$<2bw$6Z1%Uh@gGWE+tWn#rwY!7S*wcx_nA*cmG zAFvQN=xqG~p_Ug=%XmKr2fZ`_m5ce{U@H|&WnG=h7%7+^89wj=DifH| z?SiPwVEu2bqR!SJkb=8`fL$*`q@g41zuwd<HdF|ei#e6SgX3Sn z2{OX005W`-I~r|PSj9Ht5N10JvmoljD{!OEHYGur?FN_ydMSsadcmC3wIy8;ZIxbZ zqs`VC5N3G`v*4f}iS55)#Z_#N4p`FdXh|0YebjK&8?1uP)*ldNc>%MGZ6zR;sl9@7 zQMu{m`5$O25k!WoZ^h(-Z6$bJ)}7Yi0HUt(3peU)a~h=KZlGbH*O-f_UXX^nmWDyJ zwWZibo2@e-4fB?U!AU>%9x4+nFoQIV*U{dg(k}3Zjuvjv+4=+0FfV8r`~2YmhS-2y zx<$BhL63BTN3aSnE$Cp6!j0Q>&I9BsDFfK1Cfrfp7Qld$(cAk%hvyFzo&r(oZ|Sp}V~KOoccf@%Lw z=R5-=%nry5+6c4J9h7O6Sdv9fYn@9v5$F%~8hkY_Eknh)u-ngR4#&28wc zrdr!E|5l^5sbh5w?kS$X6-KxaNW3C(K?=M*ABNaCYiba(hWr#4Q>2trDAAI}B3sRf z+x_lnM4JyYx7i}Tz?y7LCjSu%eRX5G_p#Ja!|9o&6(@ay2#Vw8PZzzgHE*!U2!x;1 zKq3#C=|p)rOTq7*#Ic)i6sW7QdSc~E?Rt3q&>8TS>QP0b zN<+$C+*c03TdG^B5t(Gj5veFFpS5%;@?VX@#EySC$lPnIb#d)+*ZY z_%~`{?i9NNpFU)h?2h>wF8-aPa84m)2me3||2|^;Crteb6!W-&+YDW_ywoaIttx!4 z^sOp1{Ll6PFth}O{1 zXe7>FyIB3+Guu>!(`&xh%r{IyddrMBE$}M2s<%KtgfGBHNJD&>?)w#IB`7OMhkC07 z3=$Ad$|p?Gnf&XjgrfJhfn?vzsuvwoR zJtWxRfzH~1;D7|>!}_@^QWwm2zTgu3GdRmLYxk6ZJqd3HdQBW(?o_fmC9x^sn*P#1 zF5BZ~tUXQLUNJAM#x2wO_vAG*8MbGHr+@Dk6YVLH`^Rxl!kSyxV;4WmD^YszGrh*^ z%t)|3W8D9-cjjR=ZSMo$r!-J96H47oW$I)qNlB(l=1Rs63ZZ#IM5fGB(L|YLE<+-X z#}KJ*GF}yBrs)bH)$ct=XYIY~oOJT-Z2g|~$9330%V6@C61bgGQ8@bqGq*o%JjR${adVua`zRiFBqE=qC#Uv zJ%`4)I)+Bbp^Dkd?z8VK(+jlw^xVMd-?iH!(pH{xTvzHx7{e=n-+w3j!jhk@y5!?p z*0|&%q63al#9wl9Btyena`?Of=R=;tJ>6w0UZ?#6jgs?^ai<1<$%{W~ucS7tf2Xc` z?FO$1NqlD3eBYa*IZD2_8_#IrQ}V5F$Qz&H#7}GQ?WE0Kx@=e6E?U*II4z(3ro-iB z(I2xOE;#KR9R4Kb%z=Uq5FiPjjxe&XN@WE}etbMhOGqV*E33*R|2vh{&m(ogt^{)X zL>M7cDYpx2Kk%ae4;GzX&cdQkI$7hQi`bWN&nKatk6zBg&NO8@;)Tmu&Z->sweRGY zXvlS|X=Hxg%2`-!vs)q;TSQ>sa#qsiEMT&*N0@Lq`?&Y1Lz`;-sg*jFv#{h-$@$;H zmRv-1;Br9NNC8YgHGzH1dM65ys{5FT;J`_v zwn>9t#=`Rfn=-W6!YV2e3y!sfc!G1vpiX5hEcycH8W&x}euT?dM#@;=WvhXB;T@@7 z&v9X~7vb}d8Xb^7K}&!vy@Yc9fwhGCKgWe7w;2yoVveBrMH~I6%S2xpg_ttxOwBBWhf75z&E z(pqj+BP|>t>*w?j>M&mDSGH^CuhWZJcvS9rk6dzWM2F9?7||+ubwYWfv1~?*$HSFO zxBgG%T;BiZu;?pC)wt*)b|rkMEAc~JUVR0;yx=iGrNNGi*?KKzVX;?Ri(G6Gfq{!z zNf)z#X^jHHgo|0xXI8(FtLevtu;gpKYFu&=(SeIui4?PRTJS)5J^B4#VaJ42T9e7C z3$>v2i>V8m4R7i~*>an~wztxhrm6*mOwzma&hhr(t1nxRJD;8_p_y=6ktK z59#pIaqnN#3LpKL?UT`WW~u4tL!9bQXT36Xb-8CE?UyTj&ia~N;O<+I^6uuGNrlV1 zPmn$$O?1Dp_#nItbVcttc_kUp+VC$KTfeA-DYW1d6Tf=LS)CSb=0ZcGCG=(Bv64`a z3XPS^-tAdid+hpa1!c|-MyZ`NjUw70XoHHXIr8^_nU<~TygGhN9? z2EDi?c;t;od|5f;B!1UIzB=9vx2Q%_pl?WkgJxyZ5o zmyd&Hj(awLTEVrXoWxZxj?dK&R_)_CYu3x_MQJ&S33{!L-walI(=|UWd_oW_quCgf zHHxR>pXj#!e=%^uwx_|Eir&TH1EXFg+&h$$5dP1NmBpXC7Yu2v z07>J>Kr{Nlg~~*|ot%F!Y{^ALM=i>#>N75=0~Z?Fl7rcxKJw7H;YSm52H&!?pSIoI zC#{{w3JdL7?}|cR#1+kt)Kv;ilpcG2f#2Y3x>F5Z?tD4Josn?Mc}DgKAp;jw*Sxcr z(~rBFpPiC7kn3^g>1Ky}_Nr=kGj~_)mxdke^aw|omsq8&f)u|~hYS#0R*8AMSi7={ zUdn>PzUwGn^V*Di`=6Dvu;#w)Yh3fX?@HC%2il+{l(!qsh&`rDS=g2QM8E&1-Z4>9 zHZEoBR?5O+@9mF5qiWwEA}nwzE9p`eF!@vFAO1qprNjj;W%+VK9ZFeP@<7V@hma)~ z5goXcl}IT|Z^WA(p}(W_rD-}fCx=|K8)eU|*4oEg~#%)~Nri zK|jcam^kzVVG?$#C?YO!^;_5K7nb~pX^l%RB06f3H1I+0?;qrnNy8Ct|6m_G`W?~2 zNu$;ZT%iE41 zLQcYUY%Ppj%1ITjVa2auD;N9J7mTT!Zob8My{AAxRpzO zu4ey3M0DU9Rw5_3FiTbw_b+x2%{TbSZgf$N$qN5f`11kx?uOvk9pX{f>7vLR< z(Yy!AH@6{L-G!7@R9aK4tn6&<-CT(2rt&aGDNXPh_!RzU;QjfZf%ik7fgkdV9It0+ zsN3({#Q;{=F6q-P!rH^P;FVd{PC9JX6OAuT71iV4#m9%YG;OlhTzdWLhwN~RE*92n znogR-nZx1yklTCz{OlF`llK~p%y|9bpw~{8`oM`J6~fkL{nbR1Xrxr8#OW~FG5*FF zwbeINdi*t6X+V?aUNO?^?ALspp*HlZe82b=$~p1zJ2%~k*mFd`t;#-ilYFE2tv8R< za8J&TaqCIWU<^D$6 zhI(BL4ZB?Ld8vzt@Yp1Mx^Y+w*v}TqosUuuSiPi0sjO`C=dK)%W#ew^6Ha;$8`@LZ zvS-VleG@i>vBTMHcDeM}MbornQ*Dyf4%-%go@kUrH1DW+Smu60L35ebgp`k$9J^^3 zz2|d}HjAIDw5Pe2PX`(KxNO!iUE^OdJC6fWKSxzk@_p(`M5_BQ3Sw&d zfE&SIrH|_6+0Xcp(QIA#s)LGJXmH@v>350yNvwzR_Z6)#7;hy+g~#H04v+D543Ci9 z)5!)+_WK%aUHq>@=s&+BPg(S|k2Nm3s1UKL z%c4`~Ei|}ASAMpuA|b@??`d%=u^T*Q2DU6HEIMI5BiFz286}7JZM+O!EnL37{c=H1 zYd}&?O6t_|!S81+yF|b0i;*9usJ<6LScd{TbuaW9`x3wvW3-tbVlTAiwX@~*-E~$ z1yWDQ??2(n_MVcM?rvQwTUhje@1pP!w&4RKu|7ugLlL z!WLarh~R2iGSx8sChJAq zTy;@<60T?&sb~RMo*Dv%E86-!--XrAC;hjot1T)ta78QmiWZV}p)QIGVa1iGgpDiO zI#;x?=tZY%Ty#+(f-73dRJ3%{1h%t}Gchlv+UPxhf@uN|qCY8;ys-07lYadhrU_XS zJAw2LtNwXSjjJwdSHm^2#A{+FVmJ?2Nk0UvYLzA`@8d?jwv9S@Nm#ksU*Y49BF@xB zg$AyPC0i5oHZn-}-^U_SxF)W5=bz1yV9y5GkCq>nUfGhlf9Nb~o9;8W&wuh~UCjGKK9K{;S&*c;oCu zj3ih@H~GI2udYIBERxqmb1N&yB|j%Oz@Uv3!7Jz6{8!Gw{8!Ea=#}$;nkF|Gs`odH zRX@&~+ptagbkndlytxg8sJRV$>v?WNK<(x>#7v1ZJyp_k@XV}O9rb9#CpnYbFHZmL zF+gd-HUDpq)AV0CM!YV6SoEfqO6ts6Ir*KhB6YNzwL6>X2Qa7e+r2UTqxy+ZBJGsxRP@<2W{5|{kLSUq1E0Pt824&?LDK}Kf@7ZNuDZ0h zl7+X-ZYIlP^~{YSpuf zf6G?CX$?ENM%r9n~Tl!meO92cf_jEj&%w+%ZTZP&!$^#0?^!tc(?PhgL@zOs0`Qb{L= zZabBTN9@TvVZl#VUGNqGH7>aD(18B%R~fqcf}e;(u{14h?*1?TB;=g=&na}}@-ghP1O2irgb$E*?;R5#5 zp-r{E-l$Un3k$9?t;Pix9vZlSl|%tc@4+FpcalM@@D^4rTQJmv(`(*FL^rilh!Q@u zonbMeRr2bD@3TGZ6PkYkl(+ly41pB0@u8fu6ZFKy262bWoLqG-g>WjVZpnT^IwE5xbV=xHLoOU zUit|MY+;(;P;gY;!f>`2FLWMzEwcr^aFrpt^{_@ujSE)^6t1x3J<0PwVM{K2C&Gm* z1BENV>b(xd2i}URwco$9712*RV5$2Qid<^pF@XzLNf)k=o7sC1C1G`x@cfKz1DF+7 z7ux_9ysvDH3obk~aN#P6!j*1@z!ui;JZAOF?+N`0W{51&ZNO}j6*fca|Hu}Wd|-Nw zOD=pX!<8=sl`p{3^Fpw2! zRQW1jU10C;S6058Jau{tX%L>aWyWu@X)~Jt0c13uSO&o>h?Rwn~|_X`gF6fcJRD7 zK;(>s$a{Wyw`dwakCKe8z-(IdkWw zzMA!Px@zj3nW-T!kL5ZBopU|Gx#v{;8mY%Ra|2`)QF3I359UMkOc8@(T5~7FkHePnN1yZbyc}mPHc+S z-Ix;v2R%((c18zgboMxVw+y~b?v!fY6V{v_BJgOyl>GiL4iVLGk*)|kmdikh*zrT; z4u3~8;q7SS$@*8L#xV&fJYW#+9l>sPlfRp--ZqNlkfaA*NRx%m{M=I37Jnve9!DSATP72_NJ)fclk1KNL#vn3e>u+ z<5d@X!b!9rRb6a)nn#NbC9rslO`nv|z!n>ha0~mrIld*hWKd?9{prt_LY_OCPjh}+ zIQMTK1uL~JC$gWtbR6`;F8!X#d!=^S$^B3@TF#PC;%g1(eJ&$cKf3R6i z#js7pvv>bmzb}g2cyw0T+{4@%N3)MK?RTpB=&7gfZ~9!8?)1byx}x>flmOQoc3oc# zyF7lsul;Sg-p2aG?cNR?fJHB>AjO+Gk+6hR!MLnqDbz47p;l!Tea*p3Hz()c3M;Gd zn*aWq(@R-c^Qq+fS7B=|VpGE1o@BZ`dMOLr(lmLrEmf_qM1%(}W$RYT!eUQ%6S>$T z;sTelk}hQd(@YhF374{udy99TTZd8>mV6fJe+XN05z&E5SxJ<#^o|@-yp;ipkE%IC z1Sq_X)&Dj|uYN&p=e$6egjHf9(mOuLjVCy#435>RoOjN(H zlUbARKdSCzBEkb#zjdpAAvSDWP;69PY!PvR(?#fAtGX zzL5O>HLiX|Hyn%{jScFLE2hIU}fph!G04yZocqu<$Nm@o}`+xQ4CaV*Gz5 zBfy&5b*ypCMdWH+!!l9B0xtVI2v^mz3hzh1xgXJwK47sObww_=h`7KttfXsL$k9&O z2$QgaN<>^>8G+!9QmDfTE-blA&Hjgo=)fnqk~qQrIn$ok2`@yJr6OFo2Ij1) z<&(79_2gs?T)EhugGDa3h`7Ktuq12X${fEU7h%FRu;@cY;X@U~tbr@DhL?GbOD-Zh za1AVp8koH2sD!mzszzA2M-=_dpj!5b^hy@qjeC>+8?Iy}QOUxZuOa6j30rd!dlIf> znW$s|*E;h1m$;Ix*Gd)^djt9XpQ?*3A}(+xE9puWF!_-4k8mX`fl3yZd=usTLx=<- zB06v-D~U>WEB}S;HU4St0N!)t&392H+nRP*=)WjO{Jxp)xLcJzSF126J|G97BXgy;a%l@ z>1i_+O@~HqTq!Lr@2JmapKj!v$aygIuLFUKr`v356L>@2#7x;zSy{QlyX&RX?X2FM zf3+k1;QbDrJ4}lCUMdMvMp5{Bksv@bbQ8a^fTEewa%)LI2e53 z$3wa1hKY)7wUK&kw)I@6C0$Q!m+?_enpoP5eOSj?vidb zFFujoVO-PhTMIS6hb(>{x_Fdo`@SQzRSlQl%qczEKJQic>nes_vPu+#zunC-F&*ig z!E*O`BR6hZ&zF7l4Q!Wx>5*Hg1k)2fDfA7O>Z~k3^n{f3>N6h$1}%sz%`l&l<)2@X z7qYzX%^~J?+oFTKjts6?q66QrzF4&Q0X*}gvjCBOt0iW^5Q|;>xQOG&g&r?1w!K6w z+R@^|ZjS4`g|_w!eul6_ z5FHl@QPB}m&(R^LR=emhb2;~YS@I;GIL~FPHh1dh`nB8D=jnHE2did3Jyv4X;+^YK zwerua_bpt_z2MQ|#;V*ADxLau_h>iv+`4yiBjVJhd$QMf6Mj9|#N!@28h`mwpgs9& z=A+g#J+#Z6Z^yihZ{cycr&fVR%MCE*;>3u>K@c2!_&^f)K=gS)c98vFd!azA9X#xL zyVYbCksv^l%tw;ON0N$=1PVhEvfC=;7GZXWWzob;383_6iGq?)-%!+R2jxhB&Vzr- zLb;*(FMKW=oE@}cKvwXXe8cpdS+C>vG%Ykx$?oUzDa`PM`_l66qmEwRmyq-B^zDzm zGrGXTIS zM?dB#`}J|^U)Dq2r95K&;fKefSr-32QoVGk)v;aVA4$MCWxT(O=y- z)xgYKo^Dc-cFrvG@{>zZy5=j~9Fh!P+TUF9Y=6&%9Y@O%ozoVr1TXoNS*?DaX5>QJQ-BlvZ!Q)=p7oQ6}AThh5>eZ+CmggME=o?LIVZ zy8l@x7ro+@eI}fE?&OmZImg1eB)>YCadDj=ri8!U8=T!?kgk`5x$d|9)9h6eLe6>& zlYy7~71|2`K$R2^0pmyBf6&XVaVExQ<4q^jq1-C`kf)sc=0#dVDz}P0{{5uX0ch!14R$+#XlTyZLa7Rg4Vum#5|wExL4NaA$|XdCL~|inJJZcaG z6L#5wx9zMsG9%f|S5KCaC1$+RnX)FiEFoi9NmpOH%zoAd(F;CZk6Juy-@8;BD_Iy; z)!uU$V?uxh@Z&9cUOI9k=hl$MSq*-JwHF;fs%*2YL;?XYJuaW z*oXgI9G#kzc|K!9leATO`#hSK^>Qsc$^9?h^sII1M4!$NdS5wZHG6&a)SeyNqeSj_ zVbr;u`tL(l-AIbD9v>V%ZTzO_HP1Rj-CHo#78E$vc0Pz0K8S%l5dISph&D73qoQTX9)Zl{;Z&l3o1?hSDy|ybep>C ztu!3$niMZy0W@~ zpr0kyEtZ9x8E@K-t4JOv>Y0a6?RaR0O}bcam0<6)DrLU!w@dBxR~Yq*j6Ce&&Ap{u z@ZyF-=-?ISdyiik{^0`m!@QWnHLWhF&Q98wd?h$q;h0+MvNejQTtfmIK3h(MM&DJ9y&Z8LG~y*nxlbX zlgPS4cV3tmbXaAXxvcQN zwJ@M6%y~gqVW}ecBVy1BF`##XwMZ2L474t=;iL-9`==M@PCodu$m7xbO;@IQD?K~C z^<3oe9(vby{CNG}CHJ%3(%ti>_IdPO&p`K|*YBES7Zh2}ez0<7wzH5U61MIB(qf}C z1&gDVuK3?AaM#WsoNxazGig@;1D&AZFju!703fziO%?k+5r{U`KnV6GIIsFus;G*h zvioZ|sY3I}3ya8aCwFi|zr8tk^v6f{uZzmxxcVvjbE9R}2QQwHuafOk?qAZsDTKw6 zq}YM4&v7%NRC}Ivo+Tu2m)}m#REtQuqCKI!V7BFohcN~PKG9hx+j?xYZOL&PW6})LQ25ncmVW$bWCB@9v<)hIB^q`2I1?bJ73aN&mdp zE&Sn%Nu!FV-=321*=O*3=TUFZczw;7bdEj7B_nyn_S|y{mR5~z^fxaUfA*cBC6Uo= zvzM`~1*efTOVgYK8g(b%@hWZ=u|*q7z4w}yL~8fU>KN#m*>p!hbn&3JYharS?Xy5d z7?1;jC_1VFp>}*?0|^ZdvYa{HuVDcxDt*w^<>+-ipN{$KlU<@>RL;I%Vx8?9^C(Ay zyZQ0cighY|qStwK4RBX^T+Uc<@VMIRf7X1VXN zRr3F)A-(n6IF<2MJIt;vHBQSZx_0hSg{{pXmDzKXUJrJ=!VP)j{6Vc(y6fZYNzu8P zb{;}f#_d&)%jRs1F5P|UWI_Af?1J`r-AT~i#j`93vqrr3PGt4uRL|9u zTiD%`^6K`FvKpRmbzHsnI8gj9ZYE=V$e42DIu;`dY;_K+m`-I zSeOO`*7FHm;S=b~BXE?B2(&;1sJ$J8!4(06JA4MIdu5St|wW?v0ro_$;U;b{YkHgfg@|&LS`YSG;7@~T`Z3B19`tusE)*MyR&h9<^ ztIm|`Iq7@5n;I+ns&s1fIj`i$XQk>CzGqroF*-5U(Q2OZ%h|`CzndMnl$$l(Bx6?K z?KV(3DLRTb{1>-bLx-T@?FVH`zWkbDh zUHIMlOnG*euS%O-4|dUiHiI{7-&5EUw<=?j`Zl9IIiKf^@_w82)U#xWvvtLa>ydB2 z+l|^AaG^pkbxdNluI{*Xl<%lT>T7)#oe3??DBF@1dVW!~cIegA7>BejaO~79#3l() zR4ZRkkmrBxXej87T`j65Dh*n5*Kn@*7T%A)>Sj0g+~K@}!9H92epk71KXlv+T_@L< zC9^-wT5>oyDYWecJ=c$oM!9=EX*Tt+o@$_ss+z*zc|O&Or3v0!Jv+6RRqdp|DlKZ& zfB(2tob*auS)7$oFeP$0Zwz#2mi21@;>V8-$`#)!^8UNMP#}T^Z_=-?_=F*8a94Z_ zkGRkJvfXsuVbf<7)|cn1jhb{JI`>BUWz}!fw^{ESukHHs(UbWG3nDIbma917cHvUU z;PVgfo_+h{mRz--BIL)DQwA+h1smLVnVV^}!p^zk^}L+e30`MQ?`&!X`_*0B`9{E` z+6~|7JhWeRq%o=9=hfnduljK426@BhRggCAu7xB+He5#Dr6C_`3~X-3+3wd?D`tPQ2RaeU*GUm2h$*L_$I~MrW9Ju z|DX~(b~ao8)Aa0|;=gB=M9)%HDwHWH9(X;)>1*q>D>LupU*LKkzy3Kb>y=9OB30j> zu9Lm1ZDW`7Ck)C|dg~LF@74apz49vXum)6oBeH{?!0o>IexJ zt^A^;-`<;NwB)K57^Lg1@+h6)^TB4y*lk;4tDoE+KT_0h$MC?xoq8xJW?GHjbTv*h z_+9M#@&2BVatA_=|B}0C53s0q#}`4qe{DtEzQ_i!76-Rf6bPqWLAj5m+JSoj!4 zU+mZAQi^J<#fCQLdquq~vhTb2Q@=|CeM-){4{E#3K{doD`Ak;K<_9@{``mbN=lJRB zm#w#>pCon8`7ylNXDj`J!lnk9aTjf`ILw$-7|U7E6odr-dGo^oM74w43;oe{)rJOw z+6UI+;FgMl(hD}6gWIqvrLLP25B41uvEbhPzADqUuj$av;psCQmzM#a%M0SHI;srH zc25q=Uj8b{_5Hv9p2#TC?|a>4W8Y;j6RK6d-K}ERj5;~|fRFo=J|zzN!wQV@V=_I| ztUatUgJt06TRU?e%#>v9;O7p?72m}K1fwkt2CW~28`oc5@lk0|d%}iu#kVl5e@~-< zr9Oveemik?ZHBe#jmv`r?E=H&C$R@QoV)ex==Nb72Be4Dc&>Io(J|oHd zZ|oXsO5Qc3XKVu6E$f#9HvxUgGY zCFj3j5tsjVfN2pIpj`7sQG!KW!2lk{MO*+A+p9(})HWT9xRMfaflD0u{WE%s7q3aD zruc@~q%$w#0*eH){>36LV%1of){V?Y>x{%AF4H0|@VHTe zc#tv=r6UwW<{_WRHXNCUc@Y;F+#>x?EaEaI;sS`iYrPUK7cav>i!^z#ui@3lb z4W2hu*0Wf|mAr@x1pc{#2w)MH5fK+4+%rTFu!zf$hzl4pZXgVFONY|YVG+0fMO;Af zfIR=hA}-@1E&zE*-hX2emv_a-w1^8#9=oCa3X8ac8$QNGTmbX0QjK7!{T>!^B`4wn znWtfh3_ZnD8oyd-JXFbzZ2qu_TYC`~SY(m@Cl+xT6LA4Vb_@y%EaEaM;sT84V-O51 z;!0e^rK@e=8s?Dq-&k#9NNuCbxbShn7v%gSx~W5H)N7H&siqEj{n~J{I1>|!=`t>` zc=;XeS6IenT*d_;ueTr|c*kOB$D+TPg*5V39YMh|uAo(CT*d`3@5uWPVPL4OI+k%I zCF25@59IfM=_y{kMxC1C8)Bo*yo?Jh^0Lt8g=JjEWLyAIFdu=yGA^?+F3>3aglJ$H zmuVRnc>K2x@gRjBN=GP$&_i~E4M*rv(^4$HXpFXKuR-3jvi56ifW z%eVkUioE~EGVY%s;{uaLQfR-zGOpl;k8v3nz{tQSAS&OO2?Imz_pppBIT;to$PGYb z=qaAk_|-z=!7?u8duDZ-iVG~{$@!mH#$`;#1rSY0{}ao&jLNtGL*WtHp|FfAaT%Ai zs9XsH7+BoBDS7`bOxB?ewy4FI2k^JW>-ueRQpj~B1`sktcclsxCv;nf(yZ4alT&RS z@(LCUxfOXK%lqCOVs5uBI>_tD;EE+WFyH=S(c%ZR(b2Ai9*;$H(*MLlE~7%OE1}N= z(h@$*SlNJJA(wF>mzPQaN~;J21*Z~0yUwVPTbbkAkoB)HFw}M(3%QaIa(O8pYOl7r zC}8L*Uc6?Vn&Qcu+J=+jnH6$fiT*qm%H;Vk7IGO8a$SjmJP_^4`JY(GWlqR-C3JZ- zI+EW%!$K~TLar;J!{eda2Bi#A>Y;RmVn{t?H`s8b9%hBy%2d%Q5HY|)E+ay2WvWmo z=YL`$moXuicSZs`(_f)sUuk zd}0H^({aU}j$4_#yQCu^WbUT4dSc{mvdwEaxtm!b*Og%NcByXWhyfOIB`f5*5`B3D zx|8>RSjd&Qkn2kH<3Z4xiNXL2xeN%ol?Q-5ULXu~TZhuoVIjAEh1|+&sTXGM&_0jrm^D3z6M{@j(rv25IJeZooa zVM7nMx9r)nXWuQx8^YM(Y&N@GdhDWU+OerN$!dpfi$6~^N+Ozf)I2P6KcS$xOlv~Q z$4idgw2R*JxksDD&sEyfT+63}jC@=+YnZO_w=>PuSFCH#G2H6N;UrzpeLQBes)Ew6 zod>_DaYoL&r}Vh@sY9D=;yJ5w)Yra~U!oz`?dEF14IiUIZY3BU%J(mYfT8w#Sjd%( zkXxDJ`@*-#D@_G@il;Pwwa|F5ko!AAZsoykfAakk7IGO8aw`vR2RubVfrVVggxtz2 zKHY@~1{QK9F60J4txHI8$DA#zw5asR`)@4k2x_dHY@bdk$Ge_UntJ(^R&T%7PElr2 zCf#y}NlMm{CA#bHKw(0cbtt`hEfP6Z)*)|T8;-2Q?0{Tevkoi_IwBU>$7S5d1t3FC zBOutv{Udx_fMR5fpa{z(Vs`8JX-8e3cF&{Mp4 zy*f3;lNYrOC&e@G;{pqla>N4rxQzL@0Ahp>0)c&8W_?_sF^cp*v5(8Nj|)6TA4SnY zIz5z*Pz;@Z-)5E-v3k=3+AqLpTWz5F~5M$3H5ZK3M)W-!FiD4D`%wx+5UEcu?Clt^^U)*!(6jRCbKkVZ&?&AWGY2^Jk_Hq9N z9~YR+=#2I&VRKlieIE94*B>s)-{C(~+D>)l_`6^I$}FFx=PfOro6;&``}_zWquYgp zq%)dr_A-{W;53qEX_|A0aPyEu=lzF_*->5`KE78m=ZSmv+Br|1OHw98WqDp3K2)E0 z6|$%w^m;*6C7zkrY6L^=_ppyEIUg57X0`<)Lr?LP#;+C{5B70?*T)4GR^Y{eGV(gI@aX{89MOG# zD-;@Z7L@+H7Rj3`=#aOr4M)&n-oXVH3qlbK?BFu)-~td^HUfej+&{v>1t^PdASl?u z6};wPd?qe{Su(puFw_nmJGhc^aDj{67sQ30;>GLFsVSbkoNYKMo_PlsN?(UHhy`|V z8FO#}gp&dSfgN0C9bBN{OwRwr4ldIUF7R+A{ZG>4p>%{|csyh`*l;`^<{eyM;C2S> zP}sp`%)tc^9s>~w?BFu$-~x=LTm%Cpa!c zce+G5I$4!)O=Ohb3|K!QZ_%sO@8m|rsY~}{ukj}Qda#MdJ#_2h-~td&(*GoLH>K4R zBX^T+Uc<@V%saTiV5Kbz3hdxY-oXU|ULOzv?BFuu-~xozjtByFa2axN0fRR=|A;Q= zP&zv7;MTu`3nD`C4ea0wZul6Vi3?yh4y_Rk zwco=IuH+nCAhU^_e?(95l*X?X8V`1Gf7ihU7F)>gzheiNF$WhwY%N6F6?SkLb#MX3 z_7w;Qc5o%`;BMkSr-|mz#O+Uj6ta`N|HdMM1Vse8fGbaQ-{ptmf^O$fn)6y@ZmOL_ zUb!|LJBN7z7g+eJBNkY|Wn91oAbt@D2o`Yv2mu$M>>Y`qU;$Uqo-;1s0vP{iHG-kG z=UBj%lz;4&-V0*#Okhz1sLnHF$?N9ak!gOqqE9ibQ!57`Yi9Epc{0T&n??1LC!0hciW z7eE}kj6h%kmr(&1U>q5aU|<24aRC>A9DN`X2({zG0K4RBX^T+ zUc<@V%nP`{AnZK~3M}ACUcdzcCzl`sSiogOzy%1W{zDM3fXk483mDEUMHuLI4yB{R z0&e{axPamudH#n5T*d`l01{5#e`5jnPY`f{$$3q*Uts}PaKp#AfD2$QB-99o+V5cj zS8@U_khwGik)fw}O5;}xjRy<3zboJZi^w;K1r~4_6L0}U6#4!S3%HC5xB%ly8Hx%l z;7VM;rQd&ph!BnMzdMZM)w@X<8pV>n|E62Gjfw6thHFzJ7;2A>EnG=i zxWMH)S^v^gym%ctHN}(HvkfQ3GjHJni<_HJj9?3wF$)(!B(+2!u!Sq>E@UU)iH_wv z(YgfmgtreM8rZ^R+QJ1M$>jGRNRx-s5sG2*klkRzF?pD`aDltZs7uubn^bMDiCVNhb>%*ShxV>ULJ}NGIvv2Juz}O+2%Ez z+|9g&3k>d)_kY;JmAr)u1Ri`uQGhL6Ml4)_@Q8f>jV)Y;EL_0wxE#d-UD2U*blAeJ ze+w5-{7atyVGEaW3m1T7lK0=(!u=C0Tws!Q7VTHq!WG=`F>c`km}jGE1Vioju!So* z3m3>d&p~A9DW1~!)k5RJ7Vhs_xWFRU4za)%E@KuhfOtWE{}o%fj9Rz=y~S|o6)q(k1kHXKQZ zSr6BZ(BrXqPtHHW9xkIEt{b7x1M(pb1qb$U8TWAA2m>CJeDeF3*uxcc>5O`~m0$|; zQM3pHL+#SBhbsvWw=%^Stw&twDPFuDotomw3)+U0;+geu-H86Y&FfqDD2B^J zc7qMa@YMh@z#c9m9xg9c0Elll5D4tyGUnm(&PV`8>0|_>s`lw-Ki^@A zwT(3~-Bey|?b9mAe{ZPo?w~}%EQw_yZ>DH{!T2IEZKk5>(8!G|rKRN^_1WyxjeHY1 z4~G7AAVTqUn@w#3Z>XD?DO)NlD_3}Ty>z;r)tmFLcBEhYwmeMDn%LOXFJ0Z z6T;ghPC7ugrHW(3Jvok!&zOyVCflUeSrrlogAe?8DA(LDQIV}SQjg8Hp6j%v>xu0$ za%xEvOPjF|>)6WW?KMxo-j@>|=eRS$wb8&`(yivjC$fLOeKv)U<4DuK<$Q)u4Aw_7kR70HD^BnBv+S=P#Ta7levNAJ+e}k2v z8MLypsLj8SMQF&T|K7jQ2mKe$h5ter&r!CI{1@6Rv6<)U>|js%FBJUWB>cY+{uk@b zA|6W-OIa=)&eg>DxAE~8w|ZxJdnau7ZXxfzI?&sjt3IInfGbBdjkL2ncQG{Va=quJ zF0uW!dmDA{I-skep<+_+FXMXsotkua?2aAUNm{+<6G8E;fvkm1cBaVcC?w_1SuqTFai2ZNkc*$2l|lbW)>^hZ5rxvh5==BvJc?^Romr93hz zQmH|?VLeD1R5fB7WJvbhJXS?l^35eFByR(EaXw}UO+ zOR2MpvU1-Dy~y2Wx7KjmScjF#1*8XDly*-(xo~cK_G2r(B#ypRaFX{X9nKu(ZO1mW z+BU)Pk+-+;PH)Ps<)}ALmX@@;qg9~$=|NWaqp2R}mi~8p$Ye8a{^JPuol3;0rPDxp zq5O|hiJ_u%jTmbF9|bThs0#)|-2o9%7d0C=hMOKyA8(x=`FZCq?t^9Jo+-!FJfgI& zUA|!9|0JhLa^#|Z3zqqe9eHT+u>oq%QD!HLUY7Ut@>x4LAZ_!>!Zq*Yj{apVYr$zG z&C)dI5aH(bx!ap%x<-!kC_XvX#rIi@yl#hEzVBAxdgs!!7i~t%5lZ%sFeiewiXTV! zA=2j(;~~iU`i>7JErrIQ{ z9kwn0JkcnLXx>rtu+060g62|PyyFi(W0^`fR*JIhG(72GIP0#X<}~79aX{XUnu-RKc3xzgzz5tn)pBEM*nHv{ULEusyz;GiR=% zrSizbCvt|r@J(~$aGG0lWJa=^ubwO;OU!tc9%-3W7C6u{ z!+CJ_3h5)##L3QeI>v%dx_6z1V_3zr)&R;>r-gM9RD^o1FIrnpS6wmgw-%ven z-meL?cb~8?^SpbiOTT;XJu;T|e%r11{k9K&E2hB4*LALoJAm6zCI4tPQbFK^WyoLV zpI*P-_y!s2YW33|(6ws*kj@BQ<$SL{#T zYcw+B^@oFAJ6Y-jCyrDITbuP)qtQ}i;bX9Ir^y`YfSifviDCX~tN$6$r1{mxcSZ%U z+WNFmQdA7IRGIPj$$1GmP5Rz1m%h8u^j}uAjd}kFMddtg6>GMt_qdnw*OkNBTSrde zZo1h$^h12SdQyCotsHgYXcI*u@{TVzF|l)2Uht@kpNBPTv8QK&Q4__?bKja=jXu?3 zwJSl~>}UQ+Ncz!~A@4thQTSyD)gt{=r|>h3bet#IhHXV*pduRTKjUCMM0b03*by4@*%h0pQOxM!QPZpmdd8)LFY z@pQbrtklGl@dW2o=Ih0MoSrTpcq{**O1H~x)D*JXUUF~`?&|(@T#=Fyq@Pe-k0t<+ z{H`vqO;#W@<;eXHx5*N0lj$abBB3cyzW<_MGf>Wrut^a7?QV{V=}6}cmb=dzxpC8a zzU-rKV7vTFkK96~0I!Lk6#9lsbyk)idO}Ki^_h#W`8{^EI|6Sau^FL ztA)~{8k58A`HjgI<63kkGOY=+vp?D_ey-A<=2||`lEh`RhUprAJJU>k#k%$!!>x`S zPSW+<$73d|Dku%xdGL!GXXLzlN{@S=I<(0qp0g@PeeFB>B^q+wZm#}gOacJYjGTWb z42+;C3Y=SS^M6l+E~fnV-f8=7oHbZG`9>uEU<>an1%pB(fTh00&57&Y6w81%{fE^YY0qh z2*9JA5#oV0gqYq+&lKXE3@oGW3QF8|7omgb}&UCu!g{>h5%8~@d<)~H3Ug(2-Qx!0Ep@$ zkwB>JqS#(YXmF6_%;|m&%L^$gebCkA=yg4xj`{18U7})C&c0t_o$VX*C`W@^@8iID z3LyaLREmI*xtr4JiJ=gZwt2%*2$>xR=0>o24AfU426!BpWXFNI5q)_CG+2lL9tS4z zabRvlKOTh68xRCM4vc|uU_aj>X?AUjFwkd(P%a4ZII#L12ln%&v1T{&{11-j{{?599ZSSEn5bGz~jIe8wd9DrLm^YZUh651C#hTF#4qim_Gd|_uu@- zBU9jsm|f-bGHVA9d!FM186!p@U)|=(SNBum3*zGs+>MuCH#GikeEeTQGlD|fsj0M7 z3k|m1y>C~bf5>jDkXwY=O_qhMSrd2<(qH6LzJvAsl&@2*p7LoPd0`>pOAWfA)0EKc zPtL!k8#SzD1$f% ziZD?W8@%(A{rWidFYBT1QXaAX@WW%#EQ|jhsb0EN@>!)_*>laS?^l=aIFq3=qVqbF z=&x>^@5brcv}aZGmZzJPq@6R%y!_-+l&<*-H-{vHm-aVTJlo%MVMlnC(>ZO?N@zX; z__6|*FDn=j07m~?jbNxPIyQLg&)|)zX7BDzuWs+c6DrLX3OtY3;J(1)oiSgi#|tGJ z{0;Vaw-z!#P7GKWlKUU_cp39}0ff;z6cpIwW!B>b8ph=JpRmWvw8slPOyp2>kS-9V zsT9KnA{)wv;{q{1P7D|f-+~xmkC!oz7o_%)tq}<9@k;o}i~dj=V2nD5U|^4zagP^( z{M}b15NgwiJzj};yZ~ej`Tm2<-IVKGF>*KdcyF(ITsCK8bm{I(Ckxu=W*4+?{3!EM z$fGEi3ln6ZgLwbNCKygr*6>eLF7r=Q*t{7hrsVg}u*WNTj~57xyN$Lb?C~<<@dAVi z;}Hbx@iOG`0)~k&az^lwAC30p-P=++I_&Y*zsCzGCUX!9?C~=0@dA)3{~;jQy2Q}!b&#> zsSjJrlyJu31BA8V z?C~<@@dAh$DF_7icp3G00miK92nP0eCGPRk&(Xp6nN8k*<8yQd&e7>EuM(ljS&rfZ zyS$4>skZMsLR-~v`OTctqwVuvb-%7+*d?n(Q3BI=fyG=|!~(m#jJv!5WS&0)f?eJ} z!sP`hHoXuO?D7iQbjH8y3t;Ax-#-%uhT5iMm$&|0-pjNpyuv)*=4k?t7oJsV8f5T{ z-i5~mM(-SRp++whZ}2zhKp%Yxi+7+;x#kt~-4A)y+i>3fFmLn%i$$dWfQ??pj9vh- zI2r{7HhP&gdVz-B--re_dYLwQfrmXg|B|$VDEGf&SV7q6t;=LyVBk0l#RN8b88dnT zgwtyT0vo-I8ol7TyYRk8`|~ps_#~2nlSul}FaU9DBoYX)+@F6kg={A2xa!H+licD)RjkHhTXAqZgQXPeuEcFmY7Ou|Ijb5;RHk0??RnO6>X&fKh*3+>q-RNybXl^CH z|A>uVCXHTTvF$NR9N6e(+~@@$J8TgUZ1nySMlV3wS&pCx6Gz2t(+6}PaOH@mk#+-b z(-}8<0nBdFe-{RZn%}X}TYpCH88M7rgIxln7oJ*aE5JW7yUc#p1R>$OA0)Dj^ow z?PbjE1rYw^{)gROX5C((5!e-l1$KLxc6)(G&?Ur!^n)lj!D9G9*zK*0+Y1c#4MGgC z+sl~S3n2C+(rZXR;zy#J6fJIag0$M-7cJaNxnJLjo$Ny>z% zEYEAhhw2lrLKXo6R#4SXdY-*oBN%GGhuvPuxxElF;qwt0dWxqse%S4mz_eaq5%CeR zz-}*NZZCki;E6zBx0g}37hqhHM=-G4D{;4%eo6@bPb7K&jZX=`jZ+zKwZrV%QscCo zqHE_KRoL1LQkgwB>Gfc@E8LJb&L7lzrMo`Po)n#%Y3C8(HSv=|-*BnU%JM@`NJ+0g z^D&@y-}IvEyv+&CsE#Nk=wc3~FRw+?riwY_ZEM33b2Pt)EPfx#q|OU0u8{AavChl5 z&I>@I^-yqNo%fH>c>&5b^7{wEtWhyLbFA}zH%`}1if^)&qfQ)cqDVyEnYTRMq$KT} zS?1*@m!fpdSGYMO8N9T=x#HRWo(nsUmLoc+Em{ekc>t|52QaZ@|0fI#wKK;$Z~f`K zd*>YhD_6roo6vAv%=@)5i_qAt#`{(Cv|iHfT{uhN_QLZkO+Rm*E#Iw$%unzI773pa3vBr^X88gLE^L;S6P~c;%dF)KG;Sy$8rbq>+VTY+H_7>D z4xW@EWdZrNPWMCO}C&%PinM|a!y-%}A z**Y`);hS#sceEnmhhUjTCVrbr;v-V$5B60v;2gHN4`fRMSHa@i|J?#7mH?auGl@DE)s z^G$0u0SwZfA_my(Lg4E#E)E@&zVO$nPIu%UAHRPsS(s0+@eca!KW(xiB!) zeh*u|lCyk)OeSUi-#iDqxwiH;)>h(wr4vu^{oM(^z#@yB|A#GK#w=d|@oWy-uCV3H zsO1YVo);h(*z%RQrA4*x^aHr@vQ{&eoT#EsC_wh zeCyBg-AriwFD+j`ZGq(rPqH-Q7w~-FJWGM^>(Wc8?+c|J{Egt0hF(}uE`h~-JVagu zH=M^q%=^B;BL5{~fqh@bd|v=jXoo;x-)9 zuS9%b0P=kl0z&3)N~-om@6~1xZmq(y%93p^yUq*aifKYx9LBPH*L%we_LW`J&Fks&|%U7jM zt_Qp5KbyguweKlxiCdL1Nqw8qo}BvkeE|iFJpaSKFXO&10Fib>v4MTxKf(6}CNhl? z6Jc7ZnD>f!s_*}1sxN?%JyatYYQKklU&;BtKt|pGk)fw}O5<0HJ3j3D{;ux}EE*>v z7TEV?%=ZNl3X>2B?E5n6`vMHbYy<=Qz7qF+$@39+qA$2xT1w>oH$ESc(D?{m`E5yP zH6!Q0)6E>pRckG>HXiFs9kD!UL1bx$`HU?8{EEDg<$Z4sF}K?m9prUnaK#dxol3;T zqQwtjF6J)&fngjR81j@~V9~-0?N3;?O5y~$>X?+Am308guPjMr@ zPaeCixU_Wp_D4S$QGNkT+aWcAp|<8&`K>?YckTahsxRsL#+3_vUwEdanKu>8UoGXy z0`qs7ay9e!=aVAd-(d5X=}EutM1Nkv-Ja}!u=&e~`Rh&$iIaZiq;<1=QXpUF0-!| z>3U+jj9jOriKWfhhjnb_^7fjiU+>Eak8|9a;M!>5F6mbD;uF~&#{FzPUsy0^{{DQE zt<{;Fe_d5HrQQgOEt*;d`R@(&-5r!jm?g0+$jH7B&RNdFz*Y)R;5%ZGoP za+QCZ(vJtB2l@SPZ2mG}{_+k00Yk4TXg8v3L6nXTPx`H2^OsjG0gB$_`5!ia88v@- zBZUBnwgZX{Z2mHB{_+Ob023W)!~~nag6l;_C;e7}>ASB+Fw}kzo4=AVe=Adb|AB}M zJ;hTRzgjp%*!=w+^OtvU3)|O#M8pD{zl@l_yn|Z+p*t3Vz~(Pw<}dGx4`Aq#^RKb_ zD{=Fe-kO6cq)*;|yk7>p;GYcMuZ6SVpQF5A!pz?fxdQVSo^EL= zPlCVG8g3zQfLBQib%3GVgTKKJ@SorS1B(&l_b;&n%$NfVAVw`lL4h4$W*uOlG5Q;# zfgNC`9bn)wW)tE;dPI~PV=+7;>;Tur0R{%9O%Vg^05j$Q1Bh{Z5ePi}mr(~8U`*(Y zU{qC3u>(A+c>3)r`JR0Szjq$>_Kerpj7jI%V_Y(lM{LhMr(kK-*hYWzg7IhH8Cnw6 z210f69k1e65nHsO)O)XKNu+kqtd4=6nN4>DL>CWgyT+9uLi;R$FECYqH9zs9NFda< z6Fa~XaeyI0CXYZs$lOi23Kk=GV+Xh{4lpp7avL$g4zT1MU?5;l-v43;m=OmURCtSj zP#EBgMu}ZCZsDug*Z3+n@4KQ}mQxW1x*$a9=&%D^{|+#qm`0xeVF#FT2N;0Na6z$w z9pFE~0R|?s;SjFm0z^< z+k5kjmR!{WgLJ)B9;Fj}KG;kdyKPG>_tV-#Ry)0a*8v6=b8aFQ*a2qD0R|BBCLj>l z0cO+z1{l_v2nKe5CGG&zdqOaV=9Bl|^n`(X!g~H94BZ26O=vCnf+7QZz)X6;z`}M7 zVu3wi#ywyFvbYrjf<53r!UF~;VfJbjCek0Ao+i{}Bd;+Mr_(xc)p~Hwiev z?H&jmV0g}@HQbzM13Ov^Y~Xc8LTz9u{NQh}4g4qAz`(+(2(iF6Fk?0_fN)ucKwuk~ zSsNH=xRK}o*al|W1_mDP{wO*~qlj`&Acj$dZQ#1tz`$T>N5lZzz>L|z0Ad;W{S#~h zGin0^3{M>t71#!5d?GLaSwYUfsS1SJd}13|A~rAp@gnCxlDV65DJ(|r#x`(WY+zup z>Ji$dunjDE8yE<9PeTN-4a|rQ3=r0k^G~r2%#aNXX8bxE6bty~H_KP0O|A#K=s%mm zo3-!N^UZIyFOIG!&;PIu%(x8Fb2?ikk-BEC0 zC-{$Wf&ogvH3UVNdP*J6WX2-6g74*~@xGVaf0uWbx3{sccMEy%)q&pLCi(-q54dtf z(@5J;uZy8!m+L(*b&2h--P@>p*8%HUVFA*omBUy`^VcqvJ|CqVuzE?0Qd!yN&s{kj z%f{W-C!F*iHuP|N%bqQJ_T6Hk#k%$!!>x`SPSW+< z$73d|Dku%xdGL!GXXLzlN{@S=I<(0qp0g@PeeFB>B^q+wZmt%z>5MzU04B)1MljSi z9XrAG=L9>|>qKDE1~y3-*ue0_OUrQ{&kNqSLf{2&&KBwgLkS3fgT3HC!3zc!A@Ya? z_JSGnf&oP6E(8L5!OVKWK;t0!{depIGwlTfk3;8BbdYWl_JZ%^m>esUiFCI2X*MZa zXNG?#=cNZ9-X1XZ$5n2t3mvDpYNS@5HF3+dE?zJ&IMNR>z+Nz8UNC?-O7=h43ue>{ z1{lZxMp02!L!~yNVr!^YLH>I~eRl^X5@ty(3wbj|>kGygiD@$xO@~HqTq!Lr@2Jma zpKj!v$aygIuLBW^r`v356L>@2#7x;zSy{QlyX&RX?X2FMf3+k1;QbDrJ4}lCUMdMvMp5{Bksv@bbQ8a^fTEewa%)LI2e53$3wa1hKY)7wUK&kw)I@6 zC0$Q!myuITnpoP5eOSj?vidbFFujoVI1#tfN?JvfQ0=k z5(u>c6&pxsaFFH9>3$6hNKxs7t}aKf>-ludU!Uv}6{B+Y{SxbJ-;=!BoAi3H+ZAre8|M#dz0zGDXHSaG&9w6f@S6BZp>McUXJz@JC#0lT zpZOS27cUqXoGL^?fxTeKd%-~9j1wY&y;+5Q3#Qk-;10!-_ushgWv1?>tHEsvt@y4eM6epnq#6t?5+V=_tOhf# z1_Kc8Fa!jv!GDAr3{b9<^G~oEEa=o3SAzk}O-B?h!oW~Fb*u*0pBlVqV-}&YSq)5G zjlJoFhQB=g>i**e(Y-7ferYyOBQ#qm@O~NWf`2l2zZTAde~$8g`Dw#H-n?IN<%DMZ z0^Y9=xrAokRQRPO%nNpSBJhIY*_YNnPu|}pg$NAcovA_%VJHUSZ?GZ!Cm6!O;!Y35 z0vp1N8NvV}`2qrg4Pj;tVW5#h&cDZoFw=%G@JLNZ(Lq{9l>1~cEF(NQxGsh;F!+aj z|Ah@<#tdNqkxtIP!-g=UhA_a$ApJ*d2s3U71CaYaP^zd3gxZHuglBw z3zZ{>Oc=JUZ2jSq{2l%?rR`K#j=%fWugvmEdfw8~xhbt8w$G37F}htiNIIk0W-nt| z3r-_xmZmv}2saNobl!i+m>uQC;p2N1bDp?oubuPMxg=#mRF>zp;Y0O_S0RgF1Q3g^ z3AZD(@=Q^jU`?1wO&D0@KSnIDCd{}d3_uEP5fH2i{}GxnKq)FmP=wj2Vx|&2LHN6I zx^_~0ldT+e;%E~^BJ$3><>@9RY3Ix`FF&~yrE9*z%^}I)rTxtn&-V9R*m1NR(K&4q z%z$78(9Tf+%tx~R6$Xadu47HO{xo451`Ofge1RbhPr{a@ZJQWt}v8^ z@Hf~M{u5kbVDZHQvB0h{W3DiOD0zlJU{{z~R~Tq~vqLnnE6lVj3_QMIwsl3bOyHs-{w#QnA%kJWY7Kid#i& z(S}m*y{09R+C8&626|>T-4PI7JgDs&SAq!bvp@wJQu^oW+7dljnkZFN1ww5`#Rh_> z3CrC|vG+Qcc6Hgw6t^kYp5@Q~Ixnc>n;hG-|Hs~ShDDVu?H;m#Bok3E10w=PL{~)A zHGmNn(=sBcAQA*c1qDRKfN53CIcLl$h5-y1(8UM}D&~ke3x3t(%sI?(CIG{Xe7Aq} z^VHot+k5D@s=BKB?drXA)u7ni_Ll!40Aw1kLXga3S~wyAMCB+22)%a~J2EcC-c7i| zW#I|~25Ns%7!a_rfchy40m2nl3Rf6F(CkQoK)Aw6;tB%{S{Er8go#AOGCIN) zu6V96K%q_7{|HxD>0Ds|MCU1G8-y$TCvb&pAls^u6efhnBIr@1bgnP}qhDPjFvasd z!WC8=R~V2n*h3*Byu}ww{0LWA0bF6g!pNDzf^dbE#uWx2s-;sv5U#LNxxxTOjb0QO zge$CYuJ8(qM-+kDqFTtdCVl?R6l0OC+`;+dpu71Wx(@x4GE^~Jc?4>KjFGMJM9NSI zSy)+Q;Rxh}u`pGqupne%rILjskTV9vERh0)kcE{_7RGxCfMUU>Kw;vf7C)Dj;chP> z3zwfP%m=3SX^FrT-{c8dxZ=pdLx8EP;BLY+S2#mUz!iq;VKyJn!?)PhK?2tBs>6(| zVF(c6H-t6(C$NSAi@FIE7KAmdG}bTxQNIQS1Yr#;mo*G%G}uU?L0H4eW(@-#jo|Vt z@>TQf;@FvH9u+%dE(P<5u!hUR8U_rkk5Cv8*09o8!vKVBdkP4`8dfT67{Fj>QecRZ zREuY)r6#Eo)^J%^!vKigKVktXo|Tpw5W*TR3u_nvY4U~wgxKf)STI%^mJak8LngRqAG1lBNM(wct%mx<9*JkOKi6q0lpE^GHYnrbv2j)#F_ z-=-U7EKI-@&-VyxSTR(i@jx{KWZFKWkP+VEizR-9HLL*EFksP+e*c%ShLy$|1|VF% zQAR;n!%Afh0~oF|C@=_XSmCT;x{)oPo9afNf0GU4X2bX^tHN}qyVM>#Ve+_Om6r*H zdh?%VPD!hf4MP|U6J*3ZQgq6+{20;EkNdZE5!hZx$7(nUT zmjZ?Gga!9{_kHiveeQ=JKjzK5^>f6<#Z5w+7us5d744h8@#dBkR{wXmEDjy|^2xkb z;eG34FJg-3@4jZsFx_5B1g3a~PI$r<#}n>|>V}NQT;Z8{sP1f~ zb{M8v!yoMhtYNq^W^45e^M^g!3;4quc9-A}^Fu}Wjera$9G;Y}wycAno?(GlHoUE#<72@pRuuE-G8s|p)I_s9}?y51X#_}^( zZl1U3KVjCRVRm=P7{>SsP4t|@o&C(amthzW$2m(tfI~;y)C+F&YT~HIhx>Q9G%WH$&4p;Kn?vEcXSIAe>YW`7V*03S zpuvwF`%b-Bn0l?lmj;W*^VYb^LMD-;_CrgsS-i`BDbNxu z0yW1MGl2{%{0ph0*B!kV1p-hn2}6jzEqWkG?ej zh2|g?J8don2Z`{9OUobTZxxUvLGOz-(B;Bm}z%}%GRRM`$wW6@$;2b68wsxV%Y2L(&PV>xUO!KO&%^SbOUvKHRx~d7^Jj{D|&-Xk&!Szc1+v9t0{qpx~ zZT^o>#*;SVj;619I`*Yy_F5dN@Y`NI*YDMsL*78C-6Kdf;6a0F_GLGV6Cfk61fO5hLk3xLC1DHw#$ zii>4*gg;!d{9%5ys80)jA!mCGasG^Wt^{|J*<*-T=< zBl-ko9W)!MSZZ4eHWHoIRvtEz^(1yi;`v8Q>$!Pd+8utvPVb)1snE7w$rtk7wiK!# z9vc*|-}|oP;H7YJdHv1vEe8BNmR{|ES>N!wm(xGSZSYLN>NAKOm4wOr=cM~RYS(wCt!OTL+C{Z=n3;hD;o!>e}DNJGjX3=$tz!cB-2$NWGOkzN0Nf3pM@D^Vz@soj%v}*0j z#mkqjFsiKGf-=6H_CMB1v5h5#NqTU9F7Z2s1tIAvkfb-4*S6^duWb{rThOvN3JAg^ zRw|PitYU=$1qNXfE1XFzY#YEjT3PJ;dwB2|AO9dfU+=!2-rgRr9slm*+N%p1n?G%rnTo@;&SV(5Z*DKJ*fT2_sod?!eaOVNd+A?(>*w*()xPom%m`!YkY*#m?Y=f|gmB}UsEY>7bSP(X`(%HlS z$hxi+AcRf)N3e+jl=bxe2f`*6r07a-MGU}fjG(NA37Fz3I$;x69GlqjkM2ZFGl|`& z37Euiwaj+nEX*n1d{@9J-m{pIQw-rF{DyFf{{&7kV6m0H|3o;&O5+p*5ZgXbMnO2m z%Hj)W0xU?2hB?=b^=`rUJ~IHmxWUd80=m`VL&*=O5+p*5PNG= zKoCx`QaQx{M$%3S48kc^I;R)_+22+yAjK0{!YNh=rx*Y^kVyeT@7=|YmP@gB6Haki zIK_a$q23e*gj1||PB9>G_!@-(;S?){Qw$&^dr=?|PO*|W#Q?+6dlU@9gri~^9pMyL zJf|3-NTKV0gj1|^PB8#-yjcGuA#TN(Vd29^`3H^k4UP!H{EFTzlq%p?T;%a9uJHI3 zmnnY5^5Xo8wJW{;UA1ue(l!5juQ`W2FR=P+d+K411xp;qBDJD97FIHGEJke_KlJlF z_h}p5@BQ*U;$!)D?*l0h&SV|2_%>*+@2Vd5;g4?RjA%P@-+_9XzqXG%aA-}N{WmY| zdG_<1W{DS1*8I%b+16<1%C^_Td~#i)#*F>-WO)9`o>TVxxG<|)duMdXe}oG>;kTT} z1xx2~!P+9gB9!YNij zcVfUIy$6K_;S?*4Qw&7=)3+!f2&Y)Vyvg~zP5mTqQ*VU;jI+KJ7=%--a85A|#{_5= z4cw!1^!Yb|L*a1Htl|mqx_~*dJwKJQ4#Fx{7OQvya>7_#)T6Klurn?Xw>59CTkb_c24!%wX1)ceW?D)=Js|jjT*F= z&f2z2b(irrR>p|9QL6isjhDp)8vfAGFt{Ji;doVPI6ZYoY=`z6O}yAf?51;EXKZsw za&T}cQtciv(Ej9Qzl)X|{Xcx_<&uF6%&a%6Ur#MCP_K?u;-_djRITrH-)FNv*GF%O zfla)ry4I8xid|pHTKO8;tct`^h8B#~So&xW<2*>VOq}8gsRB+h zTsO1boQHpRwXJ|%d@zoYT?~OF{D!cL{{(h1U~w&j!h*1imBuaxAa2m-|AbwvTy`;_ zag#p(B|n}G(F~Y=E>;M;7y!A?rU0S$?qY|`rP#X(ySOauV!$AO zFNFbN7b~7!3GN7b}2W3|PF`MPWhM#Y$rr0}$_8P(ToNu~ON^z`1`<-+v%K|k&pEGcO$<*zv1CygM)Y-i#p(M{)@^N@X4H93y)-DWO!o2xR-`wd#Za^26Hng;FCbR8@^J2^P` z`h*5H*gj8Pvyn{K?LvNSti9D^%c=AhyO(Ug`0<2!lfOc1HlCRKCge$$&%+~uEWKJ+ zLTP=~+d80Z+g6kJdI!xY(#-uh zHag{<&g+OpYX`qRJ8b;9PoLLTU!aF3NeKZP58zAchJo_d&6?xpBr^M>aXkiP0?wM3!_-$lG=1+smE~F z-($I}R?$oLdajzZ%&kGk3+p?x()LEg{O8eY%1c!(%V8SJ^y`@$8#mqObogJ-b5rAt ze76;9F3VbWP<8yp9ix0q9J0NgGC0mED>GteIdX;>&)qVk+T5P)ZpFsB3csbislNA@ zH}CxlGy~@yJz^0)F4!sUY0$GhuP#R>S!_~8|1Ij11(vZ{bgjQ9h+S)OR^e6JwPxHh zD#==(jriI($|_)v_15&v>g!+m^i_@D*)y$Zg705rle7Euefx93s`SSLQk|TWzX`z}EXizUkoL!h!uw7Iuqg`yxSmHJRYR6joqg%JrPtf|Xl`hQBGQsdklxUheO^`q!^tXg=oq)@^kV)pe?kC7-`g-Qnx76l{+wUB_aY zUA$Rez%GU>Xtr0|VE(TvhnU-7-u!{1t37cLKYQY`JZL>7C-W9iNi7~UY&)^dl@2TLtnVf4h( zyuVTH|NlJWEI$Fy7_Oz+7Hx#ToXxH&U>tKt@EOOuMXp8R-v}7TOnNKM!3>dpEvh8R z{r8Oot;$GpKO0fqYsJ}uh+&E8UsU()pZs^Y#mUoy9*3;zHg>|VXYYp|jo)kDZs>gd zFPD=dzH-0zxb%7Aj`TKJlhS>1;v5RsHLst+)lT48q0QR5=I4y#D8Q ze#`Wc`|r#?8XIKMs6pPp+z_9Bo_u9xrxE890 z*iA-JATb59<+wId@>=Ns`b9(jlH1>rxa8uT7|9(Jn>+BmnBy!8Em1Ef&N#?L!_rjrrFZ%sk;*8|$jOZRB*zvySm?NMzwjP6uo~Tp+@?W$HpgpAOJD zflZbmCxEgtyOj;j4&1s3vV+r|OJoQ9Y!!Y(vV%V~JD`tu_~BDWy8grDf5dGR?e;g$ z6MBXS@`STZ8S?~4ec?AGPxwRggc6}=ZwrZF5l14v2e2$hkV5Tcap^G77wFH5u!YdwJW|DtOx&ML@Zf?D z8AB@X;z%|;7cvp`#()m(NrC0S5Wdp8Mt^wyD9mG6`0!EwK_lsBkVdjlHvjFAV`=#9 zkb?=lw?pO$-VRB(DoKi=-?V6_n_>6aqjA^E8JD`xpa1(oq!nCNOV%ju1(_O-IvEdq zI4b))y)rXyA9F1>c36XD)iri%XtQ1q=j`sXD?`m}LmD@Y%l)d~c)Q<;ug**Dq7$}c z-f}CnQf(i&E}>>G%@nuly^xmqcMERw1@~j-ENO@4wD#<<$vmLC)z3+qT6t=ZkF<+< zYd9k2n?qV`Y@KRzhO@hlYwR?{b;j-J7?ZhO+i;HTn%yvw6<0VtPIpVaq*><6j8$vS z$Ql*kG%PB9WzEo=1!Lap4S4f*$C@i)A<35xJ-L%Vh!sEQb;6n~o4a}gE?ha*>GQTW zLy~LP4G3$SlYdFWb@Z06drMv>@!0!n;mX*Kr&jLC?%%55$HJho2lgJ_*XZ8Z6VESd zz*`G%ngqiILRP#efZhfY0i;7LF#y_?1wi`3LS+Up>+7R1c%DYh#4FZNS8bhVEQ{NJ zqWj3)=SAn8awdP+Z1iP@k#is;Z#KKVGs?CbkOeRZ+Yyj0IZGwNKWpm;Ne&JUMXKEc z2HKyz?03;}qyL9by<9SoftmG2_3Nnx2I|$3O8gW}hpP3R?)z-k=lbX^F|dg@Ro6Oo zm(`(_>$lzdR#DSTIPDTbIGl{r53;-UvCz@$yI}oiOHSwESM{>(c5ayE$K^!lTg5%s z3aqQy@NCTQ7Fo^PeH)^wq2a30pQEPAvhK%0N&RZ~?N~E8y5+;Ld*7aKj9m1?<4eH9 zh=OAeUgovFmYP)+^>7&w4fb)gDHhLax$ElW?2{_DfoIf-p@`}{DU*!4ns{`VHw5~r~H#do*Yy|h=WTk9eFmfbCM zZm+V%a`6ly$9Ht%%o|51HVh66JKsEf(RlxmYpcW0&HXec;pN6zk>L0mjT#G$z)S8w zesulILE`gDVr)l-{ioy=)ZhCLyt8Df@u=pHhKcn@`aaow?%eMemptyHm8iLT#)!CO z>b{|l4y>HIpR4Lxai4R!Nk;D4as5=M$9#57>R-RV@6@WjhjE5+I6pNLukZgSYW2lL zm(JIoyj(wJ0n4goug*H#;_~WfTO&2SLOqUYmk{omZk91;%x&xR(QBb?Fy(~mw4kZ) z2V1uPs@04eWt`9DE|_&@-;zzvHO==}xxIGbCY;@5kulTo>h9udX{hwt!9|>%Z9AomYdib!tG5o32hYbjeqQ=x?RJ;k zR_>D@-hKDWtcxbv+j8(jcyg)r17!Vb0QfDNS~rl5F%IQy+_I2O$9`A}&OtUI!?CpT zHL_V1iKPrJ7^|`L(H_QmkZi4n!1#qF_i0G38AFf11sMaBq}hYl!+(Q4@~$9vxS+?F zJ3wv>zahE9ADTM|hn^7g_|KuhV#*yz=qWztDM#oj3oNf)>HnFS`6vZHHt4%E|x$ zxX^zo;7A6bs0^U!Q=`I_5LH_nTtU``I#_DE5XLnujT#DLGnN+H!-&PwsUl>}#qYFQ zzpOO5!=M&|+yTne?2&o!_3ZHaf-K@<5kHGa6RnBDZv?J{Nh77YlTp%Zq9t=usx4Aw z&Pg3k*864nDPfuA4Lz5o|5ShP{pk7ImuKhQac=y>;^gID|J^qZe)iU*;NyJuleT)6 z!Kd_JUs(3$ZEv@+6ARxpUpA$esgW;7y|aTsOdoX(H2Cr1J7ePRteJcvsLki-@hjWD z+kL0$yDmo#*Eo>pbFb0VaD*PenqVwc4w{5Ka4&^%PbdV*W*}QNPRZq5G)dP}bL|MrQ-vLf~N&zV90HxXxNk+<^ zjNs)5Hx@NQ?D9*s9g>VFBN@St4+4iNmK1PIp`TRSkzxNS8T$RtA4gbJ;?!bfEkSAl z^=x){J)C63%n~FSm+csn3`l$7Hzdg@)4`XLk_@`w$9M8myHf~~Jyy0PqZ~*6^tlCg z)o|-A?gF*^OfjlFTs27*BruowFieLY#mHC9vx{SACK#lKJ0w+5W~u;o6i=W35jR%h zZj5Fw;avolf968UengjFs_l?eL0PE+Ia&~4H~RSy{ZE}=&`Cy^ zi6F^<8aF#;CQdgJz6#Qf|Av%EH~5Jy{D#`58F+S5T@5+YQ1F=seJaHdwB{Wn2$qd0 zsicHaIAejEO$(xE1Q{;Nkc_252yZmg3L6r@@GpJ;n`A7N;Leh8;ROI(beggw(U4iH z4Uxl*@*Qr#@-J}{yZlna2$HcVD`SCFGw~+{9LZP|m9hLXsw`J6Xu1?rx)HrakZwTT zo1O3lCmzc~1&PPic*ev7(q#AzNjxgkIY)`Gb!9V&2uj^@$(d-WuV@L~l*3^w+-+J_ zEdL|fL#eLX%jKR1ABHuqv^6lmAT= zBWSUb?og8KBd*WKmCQ?QyAjAVmDN&ZlC0olWz8M8PMWZ!MS308%!i z$^rmfGweC{f?@nTO}!%tt2*hPx)he|*(>>2{;)0S9*MhKMmHG^o71WdlfvERrXP*F zGV;Kzx2;zV{JCIGv;8$o?nODZhdh76PQH}6vumcunw2@3>&ERi`!e4DRz&X~0Nra= zXh(o8HN^Fus@b$+ z`%Wd5x$ji}n-dq;ygtFJ&7+A)(;iK5@O2NpH}=ue$l!0T3-r*Y9$x36?Nn80AN6Op znUeMFgHAz~#R*ansE@NZf50wzf2<%GxtY(Hj6fO%P0W4<8=RLN0L#{oop*MqbSPw|J}DrG7*a@8QGL7 zNJgM$&fXt`(~?uhg0v*ZfS;C}7R`v^H-fZ;Nq?ogAOEss#J`IgTB41ZD_KU2?-%Sk zvNz&h=J>CZd>^E_?fYPH(t1zRZ=ugtr8m;_UU=5hL~m{P0_!$WM<2hvdBpkWJ;%QB zUxK$znRsWl>NWjtZbD6*a}OW56xPP-te16~D{?AVq{6Nm6W2;qy_%IkO{9h@w)|y?hDc#3n?*{y2L1)Xuuu7 zE3p220GPsC=^zjimdJ~Lr6ytFeHM`TZ?;4vq=!i)(NJ=t0U%zb-#;b(3_tv*FcKEt zPym2y|4~*X3V?JAB8i655)H@^Z_w|5kwl}|N@-4F`TSqj}jy+c~K>j6@C^CzoFtRhVww_zKNWtDEd5wJ`>|dXtx^^44Gah zmBm3RDd7~(ci`^VowqcG3^#{Jz9Z?0`tp$VXo?uNCO{%@l0+n=dqk4&C^_E&5bwEA zAc}_D(oKofZ%SRip)Z}_dLMJO1b_kLcsJOGU@xzVm$CiS$<=!5~v;}f<_zg*0D%Tl{@BuDt5l&p$rAh@{qe9!V|lc!c`THhX$t@*e%f z*>T|r?cEr<1CpMpqS$@nNW@Mg`4qv9T^8(w8+=tt>=>WLJkVovO7xW;5%=Lvq+@;{j(#ClcRkmUTf>a0o1qcXR571;kKahw3u^uibI^QbpxmI9Z&4y=VrVner z!n6mpB~pU*OyF5h7SHo_!=CT!8VWlb$vwS9ET;hrW%2uYIbX#6yqd4&_48Uq_48^b zsh^jy)jUR{)dMv5=UB}^1Bd9kw?8~{vZ|_9h_i#kF11BzoEz=ytm~w^tH!Jv%QWHVvK^+zqQ$ljDBE_vfArquUn_kc zO?h-MHge?1W08G#tw>patzHDHUs4-Z)|38S!DbGKuKV|zV%J@=y>Lpq?!s-nD&@K- zym>ov<@^5YTSS^)?h)3*|Ld3A`Kzw3`gZT>59d`+jvMbto)DMrACZ@^XI{_e9CIBjwtrq*woHM#qMwUKMb)###$Sepk-|Mjbg zm&%M9imXR%21jMR*d9hKmQEF+{}->f(+?{$H5{h9eaU+6LMJef;8NA^1O_Ve?05ID zqc7SiNM#eDLIMb=hDk&~a)3qBc}1r4!U+lZFLiqgJCcwnDj|V&*Vrm{-6h)# zNl28EkN{*_gDA*ILINp#MJ6OADvxTYah2Qj`ZH9GNvI{)Pz$;_=l)J*pjyu^+5t&T zW1Sd5QuD-}pVS-^N!#&n1W65(l}L7O@cvWj({`aAW0f*(Pk56N)+cf9hr{#IHG2*` zY+@9v_vXp6*WKD~fA#jk^NkvxZ~E))O;O*ywJi~^V3xqNR((BI*wj=t{RD|F@S+Aj`I`mTRO@6f{_6SL#(ol*4R z(AOnuWDv$xb*7LL4Fx2lMz$mcZArpM0pJq#07yZ%1qPQW88#AmD=+c}b2HdZVJB*C zlD9H)6j1h~0I(S&y8oN#x=Xef;^q|O=7f&|z*MSTr%Z(@rjTqd0sZ3#4K`H}v`^GxaIH@u2 zCrE0ZjbKb_AoYjeP;nN+StKz4hSPhcBsH|_is7}7X>6)bkSor+kfms&6y)f09gLg? zYHU{Q{GTDWk-X^>qFJ$<$fZ8l;Ui#C?EO0i1SESsl3gk%yNp5Iu=QAKQhrY~%aLq3 zB)cqIcFA9NE4u%s=(+Pr4pJAJk?5@a0 zU;2%nCIMil1tqZt%_#&#otEU)C!0lSHVfepR~n_<|NQkSH5EnLhyBZW^B*d-w5LdVg3G3fv ziNy7nY&PToML`EBLbs6kH86*s5|NV}9+RWT(jGn13>SWK=y-)*e}?2xvgMEfO?d$t zB=*hd{_i9}Q&fNk>)t|@vKi5JmuxR2KvPP929UKpOhHBhG(`t!u>MXhC9c0@vmpVR z;sUfd9^d>XbX15zJu!WwHQoQc2}6Lu9H5Qy5Bl9q2Fk}b`E3=BoyKhy4o>5>RhU7w zRj@DFOop)`$(77YZ0IKQ;wgHmxq{_;z|~!4f3oqim_WlH8X5-o!#NzUDh;Qn?uhNs z{*JL1+lbvXb;dS_BnJnFBGv8z1MN><_Pc1g(f`AzUM?BPz|4B1`t{TT1NG`iC4P#g zL)H3D_kA|&bA9xd7}&&{s%xFP%j(d|_1kWJtEg!voOVJN0Q)Ui|Jjn$dH7YmY`apu z1$u3gS>C-)%a83nHRjDf2d-W`@#0?hhnBQ9RbDZFDD8)Vn@F((pID$L9Qtb0njYl(eGap2owUr32HaNRsayR zZzs_TnBxFyH-UV!^t%aT!%9%TQ>C}~%rkJ(5Im!Fh%RcL2AKG>&WChqyBoDl+5DLVi?%0L7 z{-D6Km8rQB1zaN*x+>_dbdDDW#PGs^sZ9{h-N3kf_3JF1kW5Edh1l^-PMgg8AG#}tU>*aVCfHuF1_1$u}d$- zW`>n^=_T4rC0Tmtu$1yw>b%Ks+Qv5;Hnnft+&4~bPo}&&_4)Wty^(#{5B4XTMVqN;?4VIJL=iF;OB*FT7gD^YolJ=4_cxXsi&bOVc6GZXEvMf*IRod zc;&3W>vKQGU0op5H~7eyT9LWqo3>6F(=Q-=<>yOR#ynrHcI5ER0l5!)PR*-=`UQnI zgE&vPCI`%&J~aP>DbAC!A9&G8``mh1xhx1qk|HrOk)oghz_`kWgEZp|)^M z4l>++^C|F1v{uHJ=cSF-a5)t0W`MTXrI%teBwAB^w8lSx@H$0-Mxr&vMr(Wo2DwT^ zK#J**Xib69n(%G|E^lxlg&o;#DsH!ds_5Hw%b^(4;uDL{1fi22{^7#VnG_UEj*OTQ zz_=Ku#_!@gkj1H!vMT7=P3ITjRmCG4q5y{+1)duSM{98XH@pRn9f{U>uSiz#i_Z%H zf^o?bA&_D|BwABmvNno* zcjBw_QoHDcEt$963awPz2d+z~*-JCUt$Ht{W&YiQ+kCzw*1YT)=q=ECQ zDm$ut&7PE-zc_JOwJ?sYyNh+zDovAA>KSXb->#w>vwQ5dwV$qJa{k%9GWJrvRPxpANKH$(`Vy?BvaCMbuKuzid3a`x*?O%8 z73AKI&fl2c@7u@m_L)8PYIPbJ9JDs{LjHcA**?LOcco7s+2e)l)+tV_?p)lpG3mZ_ zNvk>Vu<+rNd-V#QguaMdd37+WRoqpdR(s-l@3CL|S{1A&{C)V=5_=C=&8+sc*JFs@ z=)*U0mIG(k(!8Gdz94G(wSF{AtUuEC$>wwCYCT#EeL?2lM`o<%8W|(vj5T~i9UWLX zbw5|twcUgQqiv1U^a}Mjrd>j~XS!L&oH4hp(?_p`w!xGWs?&m|z8`Ga{;O6q zZj^C8m%CurnSDz(IoC8#vT}Rv!c924$s%cR|4!8|#-tXdxoTW9%tzkLu~_PSxGT)zVPuwS$W|JKL&T*8XUHY}cbM?_FeSsK9Bo%kN={(>!#&_Nb|d!EQ<_J)~deJ;69gY zmBH0c;8>x}+PdiQg)w)Q3_7B^;h&($+1|%8bAulpes!baksfoN232XpU4bI5Mp!{| z^o~b{|Fwpn647wRLMQ=gQv%)pohhJ`3!Ei0RtZkAvSh5v2_=N_19P0$k2Xh!iUYw< zT*W~-_|c(YEQEq!e+z0*_=yH1(l;~`3R?H$prn4aOC1XGSAQYh|3Gy0rCSaO1r;9( z@^`I8^!u-lBotJ5D9A@)aTevfm{5>zK_nDZSSVPc3<12L#Dx@oqTY|}JzP$7zE#|F zt-!jP4bR3*AJ%+@X^-aZz75gT&~Vi#&H_e~1rLY$+91f7Lb0hl#Gf^xdDt z&x((q@c|O-{A=jb@X;b#4NKHsFJnl`_|yn#E)1} zMR0}a5;sNxuCnZ*^B)pND?X0qqmVR#wgo~FDaEA}WtX_)Ad&+s+E2fKDjLa2_hTfE zE^8dkU;P8c-ai-NYDB~c*7?s6BqahM4Boyeo`BSE?>M}6ht z($NR4{MA1-SnTRcw;U3rDY*detVg&R}sAeX&|6uZS1yZL)h+H#2~NVgyoq$?~) zhX`&a??Ur!UhL2pUnf63kixHNsUo;B{z1Rn&NODB+k&<;XL)UBrt;d(#8Pc%tV;$D zj3L~W%u8%L4O1?c)lzko-U8@5Q&n8wnK2dIcgDA@eP=GMj=WvCwfnLkm$%v$WSTt9 zHS>9LbmbP0u}^>Ip{p!pd}k2!G!?-C4vo&x?>`e9=>9(yjYFD{Jpc~p{3Y6hcz;IO z!-%rj17k2<$?O4_ys!F1t}R#)p8S1$?^jiBTIwxzuet9_&($wO8a_C+{=c(r^lBa0 zGv)R16RG(LT`!#eXzd!`*SlY%=YL&^d9Jy+j?m?ZpznWQ$klDJIq##fTkcnv(aGm6 zvR|EDf9d|Duz2k*n&^vHSQ1!LG!H@iE^!>R+yswyo=qXxM5O2{J|>{hgPOOrcZp@_ zkikQ41bXolz0_QRTV%M4ZIN-Of?H&ix#MZGtF2Og)kqrWGHS_#yZd(EKH-0Ce#*Yp zd)q`LwT0#P^y|-s6agI>;zRN3iAB1A|O)OoxEGT>H-sYFRg)#p9 z8$sEdDcF+_#wCMp@xHS11l`IBGKAA6Krm|oZKWh_qS|#THe3g9dCgcN4#bkgw@p=! zB?*V?U`f|+QwWO2i!y{9BwT0lIVxod*J1f@()AzF<(I*DNVu+?a2~e=b{}c6qWCNPzCvz_o*)|~INE(jua_Rk_ zDHoCf0&)Q16>F%gw$3w_#qB>)#sdi9oD^`l>qGA+3>l4l92v<;E2{7f2;Q$xAxIpV zj-Emb-?04o8^tca48}tuY2`%H5CA+JLIKAV!pX1$k-~TJ?$Bi^d;<)R==X1!V35Iv zNF=SeNE!;?7a{#wPt(2W3ivLlf@k7d_>v@}Fry5J2?rUBNCv-20{4mv+`$FDr1{^XE>MQxgOtF_ zQvwGAdQHcFqRTIX@sJ%zQ9F<@V+HKy4c-5qDPoktc1Yk}-oPDTc)N<;fEb8FGT4v| z8xUcv$ceEcME#nE`r->n@BfjAATgv;zKwyj-Pl zJ`SJ8NW?)t4vj?A6%kQS0}eP?(ieLQLD7&-1}h>Fby*{7{_=m_BX;>^Fdh<7D?Xy; zd%o`@Dd0#%tprjDABGO+B@x3lkR8iWA`bGgBof0{L=2Db zaRP!WA1DM#46k513oO6dSh345gYl3UUhy$J-htGSod$jXnJI>lA^#;+@$y#1!G1LN zQ})9IgA6uAVtB>H@Iu6JT*Xj9c2&YD1e?eZ%Jm&SEYQ!}J0$RTTND|JNF~FEVr^0M zdi1!|b?$uo3>&-3bCaF1G1rI1JyN2-2fjRc==|*yO(xBcvx(kcS`^7O)~PsckZa# z^SXC@-`lO1)@$O-{TO$3ft4!TVQMT|Z0mrsZCm9Xs+FhPNj>-D>E*d!)l+gGZE2RW zZ_Vw@9#5o-dkkHb~ye=-eq$8JlH-v4c-^gj^+tv(A~<#%#&KF90i z6vOM}G*!^a$&D?x_lYoIkI_r@7n=KXthS(mLv-ESAD%f`RaGm**}-9#+M+bhjrMic zdFbw{F{{RMj+>{km$9+&uh<5McGdEJy8rR~tb^YsZL{=6GpjDnQhC9Nn5m+TV%<)u zF4HhF57~EBlj9iQZRTRPxhm7V-=Os?*ZsVyY0xfB*TJ&0lY@h=PiSC+?eo;*tuuQ4 z(0ADA=&$)Iv0v8drktcxAq!H&)mko8tu~yS=3wekwUO!Cs6DExaYD;JfBO;Lyi+!r zq4zBJUq3PV%8)NJHWn6WRI8Rd@nB*MYzaq(4A6yj-zU26`ZWJrbls&G%h1xUyI4c1 zBK$*^VqxEld4tXOth)Q&DQM`a^iDUwjy>FLw0V=T3D(V{9)~9< zZMy3fz9%JpVq{+P)UDTsmF#!oG4#;NQ{3vUj@XtI)sEKEzW1%(gj-T9MlqR&? zvi};?uEYidhGOVR@9zw}1<^oR#8_n5-%D~9ioIf~*{IQzO=r*5dbGG^GxRAe=96kB z0)uJD5@#@DD%fCb%V;n?&qt4|-C$^N+gzK=CQre0r&4N*{Kc7UodyT zHdub)4F;Cq=$gdkmtr$AY%nF4zgUx#O1b- zJoG-89{D*jy7!hLdTXx} z+PdiQg)uvqt+>Nl-#sYBeRA%Iwqauj-}QfVv)#S3cCMdWzy@PlG*%yI-qWFW-NgsS z2wcS;IsryJmIA_I%)nB#Ka5x`6?@`RGBKR4iYGzU`w9HJb~Q8w^>NeoCCK79DEa*x zL4BMlBrm_tkf1~^K?&XEfe&0wE^X`#(Pa63ZAsw%hjXbCzSh`9;_?%_iO#o*d#)8& zSF_>SnCZisuQ2WL2L$ur@Jzl^z=;NxQf!6<^N>T8EtrROHwzKF?oy0}1oMgy<^eE^ zIuu|{NH8C-&|n^xzxFzb%P+-dNHDLEU|y(NG##o%YRJxt?*BwKn2M_w(E;&v$YkJ@ zu1*$hFeD&WTy>kj{PlWCT>kR=tCIPM*rHt}X%emEfEfELu$u-|DBy^{syKf|SGW1= z-Y{0|x=S$@5)dmkAjTdJHg@Z#6ksGER##j=%wK++eG->nip`LKSRny1RJX(1grUZ5 zCgb9@B8uGA6QIzoflJx!&w_Gy?Wz27H|=!sQ7)Xp!*2xTZqdz7iUJX*t5~Ot%iQ2} z?Y2^YF`Z9HDcQ+zq*yYnBxPh?S|!d2L2=RRrBoP z*qJFUDZkqyaj{%zQ#6+wS6uhE2`QJ>;6};*maj;EF>;gY+Q`@cL2=A ziUN$p#R`mz`ODuPUKABe${au4IByZ6&v6~dfVAj;_{c@|B?W|0s?#h?(b|0I1=D1 zD!_+z?@IH(MAu!4v5){?@c}+W2Hjbdp^yMy!2v$li+ha3<(FbJB*0flfDeILEbmtO zEjSXNj(Xv406kh$z}d?ln2qrd`rXCEnuTuhTLK)r#A^w#nAZ{@k!lIxQZh6sNlCbp zd5H~eOia0~mS~jLf=mrZ?TiOL9F_f@UYQxUkGU2bJFLO7>KZ#Wv{}1{b9Q&xm7!*~ zA&r~H<$l#~yxs4_SLda6(Ft2JZ@Cp(skRSXmr%2pW{O+&UP#OQy9KxTg8MOZmb622 zT6=cbWFAo6>gOa)tvt2IN7}`_H5?K1%^@w8exJq5b;j-J7?ZhO+lYS}JL~JCFbiX2 zHIw4qq(6RncQks~tB8BI|9x}Uq1mMq?;blwe0bZcz&vx2t1}w8!3*wci2wj6c0G>~ z02B$paUlx;0D8BP2!I&ZMOnb`vRFV#R!AkY0Q@-og}#ffCX93*Qdo4?^XKuC>6>$= zrZ%{HS8rr?^47I^1D}TMd)oKK!To>hjr9H2@NIqjP$!FfOypsA zjpx1GvG?jj*T31a^)jJw17gTEK{JypJkPa&=eho+JXafO%}n^11w28cHB6E30oMf9 z{$%52F@c6ZG&BtEhjTbyRT@rD-4WZN{Z3;qwh_B&>Wpm;Ne&JUMXKEc2HKyz?03;} zqyL9by<9SoftmG2_3Nnx2I|$3O8gW}hpP3R?)z-k=lbX^F|dg@Ro6Oom(`(_>$lzd zR#DSTIPDTbIGl{r53;-UvCz@$vta#aOHSwESM{>(c5W!etGui8atiL%IG*<<-t5?s zX7NW`AK# zF)Q)I!i(lXyHZAnKm9i@z26PzMsXbm#LscI9Qd-KQ|lq~QW_iz=(8@p)ANlpE>Eed z_w?86eOjw(>0i_D<~CKgSi`y8z>zIe)L*$gdg6bvV9UWFxtXJ~>{}Ilxi<9Pnh)w= zEX@i+PnOtez*zdz@4wnJ6k)}jaZ?5(!&WcLA@|b}PnJMOldAWT8EUSUF(S@b-8aH1h;JJ_D(d}FJzZTP1lmrV^m)SK9ur;;TN0TX_ie~B z&A#t@d>ON}MsB40r1s9JR;E{X06=)B0RRSUrgs_!0Hhg?3_DFp0Dc>fmbF@bEq+Ur zN(KP*ZT{7B<=jiTWAyG7rq$5zaw6<&)ZmVvR(Ksxk9?jSQRB!O=P26|ku^6TzmtC= zIc>(&X?}XaPft9ZJgJw-7R$vmgrc;gW5O0hv~hYp>&_Z~GiYjZC1h~U$7DolYuV;obQH3{1UjZ5GX?B& zP@<9nyI2ogSps(Dge$_y7GO7ci$v_?;DJkG_rIKM2^$Z1;GvBq0ziz16<0VtPIpVa zq*><6j8$vS$Ql*kq#WZBj`hKK{-NK077aP284ii{IVh=L?XtxBu<#uE{Y%k>mu4^| z)>lTX519EF(2kBF)Rbm0B-YnZ>9vE4I6KP}>jMD87fJ*`n&FUGUqP|HP*rILa)&~P zou3DVov6c;W;kT0DaB4h$Hz0^Fai$oMx3P|z?k5%3*jq|Xd>GWcpJRe}sFt8Q&hEjl zj??Fo*cZcLKKw>d9T(jMrKuG0#Y*zUV~IDuj0$Fq#PZ{VUOhmu4_zr%}dE1JS`my8jbX z&?(JeWT-EdBw&aG&0DN0hw%tk$Kek9*SCD&mr-D=>bqSdmPuv0lT>*{#Wo# zQU1cuBk{lN3$N^(qWmaf0g3;WbW@aHYh6g<|I!1XlrT-_3cebzvk7_<%s^}$hA*0f zLu_@=&i;XZRl@zk!kdf@4i1Na4t{nRMiG{(u7>diOC|v@)?leVSY7=cSZV}QSZ~0R z6YyD_T(R^ww9xDliKYIN5gKp=OC#-JjKWglPlT4$#?s+l2pt)RrCXyA%B~9%ORWKn zV_0emnL@LcSPEVM<1vMNZV2uOE&Q^@P9+Q z`Dz%Ou;kJZMgW$&S;26|lIKaJ-LE>9ywhQP!&1O_7|*aY4*uP7`B;kah4BbWb5_B4 zho!^_q`g!NOY7m^-N5^TUB{93ZoKx|$u=;yVkx~P47^s_m-iy=D{ffI`44H|UWcW} zt}uA>z2zY7ckcL~{rG^if9?ZGN3$W)(Xzsl(M69q93lB))cUs!UFK{_4>vE@oS5vu`pj)7c5Q6MLJO!U!5)LNN1}D zmI}|nI1iGpjS-BRSeoJmV<<>f{eGdUBb#7p+8b1Lx(=51HwlG4mw9^a5W-Q&94dWq}ejP{pJk!y)RYCf;Sg-Uw zyjox)PP3&_AF z3rpRr!FY|O{)NcEs~VPu_d^E07)yiTp~xU)K9(YLkU>-~mf~TSSy*2T7LP#&OBQ2k z{TCP*H-p_8$Y4)bEFJq7#%e5`*@+C!{=ib!Ef`pv4DNcv@WIma&M-z}>C;yjMIafn ztY9?6l5Qs$URbKt8OA^?)s2C%7E5-EU|=mVbh?WSTbp31qah5uLWadFqF{W) zQoBeP7;}@Z9xy&*$#XD_S6K3H2!kgDc)`ZO`+vu^LMG$2@&6kWjZ9)O3MO;HkV!%> zEG4EPlcn)kT7MatY{-wtZHlFLCy~kfG%OX?LncLd)l9YCBU5dRmT7f=WLjenmaN(%(>j4zvU7le zkuhy`8JV`$z*5KCFdks3=X7M+3xAdAkkv4fvE(-s#%?SH!|y_{S23Nm7{&%HO}m9m zr{ncAoe%%+{8d<5o`+0V;J@5-(|j2C`%L!~A=AAhuyphWGCfuuOQ*eHT*A_28)SNA z9+qy~!We|5r}{8zVd;H07+CL33pc^Qp21A(6b$TD%&N0t^u&_oAQ+nGH`wX1;b<3T_1hTY%Z5@yIOF21_%XVI0QN z!nw%oUk@y;9s;8POWV?s+4kjFI=C8{9dgGKw-IEWz$-Zq*x0Y)@p~7_X#xD|=+_ zG#yKBvBTtU``I#_DE5XLnujT#DLGnN+H!-&PwsUl>}#s4+y zmk*Hjs{tU{)VhIejPYM?Ulg{95>wV^rBAYr(+tvBOLdJG>s2qPHM+%uFoJf%y`? zU};$#VlNND(xz0z-uwYe`{p6`{unHsxR2PUYGLWZ4#d8A3`@7FA$B%i7xp7n#D0uX zV85M$*zfLPsjw?z7wrJ4vCa?}(O9abhZ-AuVo7N)W-@hDVW^YG&w)C4BluFgwS!QT zb=ZztHhY3A=D&)HxPj2v5S-a=UdO8|5Y}^07icwB=gNH?*7M#-X++(q?&9g~?djRg zFWj51oOi(g(>sK59u}%|#-vR1r1HKe8@|T}^@by&M!)J&^Egm!5T@_x58o5!F)V!e zDF2|5^oI#gsDbL*707c!byT zY^;2(D-+7`qH0sPO5%j{(XJS^5uOm#rtwI5P6%pabx)oXg4)zrF0TpsYNJh?FcT=H zSVKG^sEyXY^87+jo1%;5oACDoUDtWu#sj>KHKNa+8uJ=0(w;`@pDR2WsEqwy>g&om z8K_Hrx}{G!Cj(WnM(gt1`(4=wX#okl;=^3sfG&8 z3l;q{=}l3LHxZr;RKu}}*e@&PWS|-iw#aKTPz_U#ye0$HsCHi-lkrtUPi~WeYN*n# zsMKiA5ZP?3)y%0s(}5volIl5xCj|9qNvnq(CIl5}`c$4@2x`*ETb>hws+bqsY|6KW z{LRLo*mhX{2|;DlyO-}z7LG(=vr*q5aYCt%KzKq>o91KWIU%TxE$vyD^stm)2x?>f zSe{=9YGVP%1%kkl0p(JBAzy8bEaf>NsEtNe`6f)~d7B)FQHDdTc%%diBEKNrA zyTdKye8Mw=-o)LK>x`f{<5T52BWO+l-N~T5f3**+BdE?$`ZZ8_%&3BD&*~z#zZ!od zREehlDCdl`MYU%Il!o~R!}M2+-K!|=5uOpWXDZ!BN**(U_CT#yp5F-CGm3U(<^4v! z_6$BKw{OHVLg+ilR<1LG_QXSf>~dip%hR44Jm1p;`<~V7s2S~K&5tO}5uOb+Cz30- z*+6S1Oq1JepfO=^hRt8Q^8TgZYes*N%WOWV2i6q0Fd&a_!?S^w%+V>wY$>7|vbK)c z*~D(fl!ge;1{yM*W>txzP}@#sJwtjE0o<+dxA`yp+pqd=24DmD_Bf zAqffPm~E!$4zwxvWzYR$zw*)l=yyf{YKE1*;aC$5j1A9tK4Sf z??54ea-9)^-{I@aIb)ir_H6K#IHPpS6P^*YXK59=&IsBQGe@p7g7(bWCf6B3d#0S1 z+l+ke3H>P789{seY|1$!y`T8PUI+2ZJvu|Slb$>E9X>43k3LEgo((kTx;yoK<(&<* zCgZ)_W&@2mLBFV3-fshK*_hUcso88K=RhLbAv_ak$4yPS%mkWoVYggn0+chJ!A0nF%yuyG0piI>3uVifO?EIO`rwGHpyis(1JtBa+wLVU`JvZXNsY+UH)$I$dj7MPI@jQq6xyY zfhOeA@ppM=18vA@Cbw?`jmXTA+iai}X{Y2i8)(Ln(Q=y&v}0G-a?JMoq!(Ar_!{!~ zy2RNeTe$FSpdq&p$Za;zkSrs)%?28BWuV+<0}VM%`+O!_D9yToh9tj~+qZ#+?51<4 z(tq3UEaE0;h#%?+@kG6PGzv`^VjbqcRHS4km`(%LtM!EXQ_DS_I;y7|Ex+m1P(2o{ zB<21((2`HzK~Y-IU*SpeP^5%WpbRl=mOxGaX-1p3?`1Wq=}*rzlr> z9;G|>DD^X`ud|gYL!p#Jcq&j56FST-*HoY&dd~9sD)1szV&yZHDypYiT|QHRQhYgB z{;7OXckDS{%_!?sheVa3A+1}Y2uj&ngr@>!Fr#@+<(dkVLI050Kd@9^1j@(#k&fkIg6$!|JP3Uit& zQ|_+=#V~{`$NW-8xu*k9q0v!((}99~ep)R{((QQ4^>utjc}J6L$}k<5 zx4mFC+6W#St=>qw%D}{|q1TRge~96Nj_{12D}&!s%O{T+L0x){mFtY4F`b;`HlrVp zjaIL9np|g8LG_x@nOk|+5q~2K)nn5UT-j!vE~-6#dnEqVbPgg;dxU2M?HNSZf|$Os zv@?SC^mr}TZv^deqpLXO{YJj_IQ=EJZ^Sc#_Ozs{t@8Lr(4NNM%lVB_Jng}?wGq72 z+Y@jh7#ZLajW_03gMEEa-4XQJJ##5fZ z)Q`tTtJkL3SzS544bKKz;!v#aSL)f4MKvTKSS$+CcP8Q4KtsG6%WXE$kiN9<6Gfr4 z-v%1eC0uUb1{&f@^N!2&ZF~)BTO^ln<7I|aeGgTU5uOs%XWaz(P6_(6n7%tGm#+i` znzdZMUkMsCrK@~BShfQCH*L+8$>`9q(hPJ?gnX)P#P3LK}u9aLEu|+@8Rro?z=Jd z`{VmpS!?#pTC?}mJ+FXjlo7OZK!NUS(n&nbiT{JWfUhJjIwx# zJ~|j>7Cp`M?5uEfFv_P_^+gAxjEmAo2cwLvr-#loO8@67bnfrv*viuSQx;tjzc)=j z*EUW{m}U7sy_7J_k^y=tVVc=@^-{t(Q)cR=gn35C*GFlVrGa$)EvdM3HH!uE#>Rf5 zQ12 z$m2e>V9qIYnO^@a8+A z&cZ1BZ|jQ=M%mXvA03Rcg&xF7batT2DC~V*7rf3wS^E8ZD>}~?M*+soNd=SKr8SKb z6%2BZ5~6cIQ>0+v&O1cs9f@U4S|2 zq=Qj@qx(E>$_mlJC_mcHpg6W49xEYdbTG;-+d55^4o10XTY;<6!6?V51S^jnjIw8? zin8us2VKHiqpRyrq-HNdd9EPG9g!)8b>ima3pcVy(xX^)zAz(u20BPk=LM)u$t8u{Rtf1WDwxxYbh!IfX?U@w!sSS^ds{RiW{eev3T zM(dedoLulq?i`x9;FFBsYT|-N($Y<#S|8z$WONs$#szOgG}6>Z_~O6((p>H@64*RZ z{E$yBA1etfe>l0|i98E5alsRrD`?_^CsMqli3^@cxKk4sJQ3%lCN6m5)lz9LXB|ME zC^^95qQ9ukpDUbv@I}GN8u{Rjd<`}7!5>-v(8vdmq@*(=_3?vG;?WOu)cN3*H}5O; z*SXGwUrNz+qPO@0z={PGUHEmY@&Us5;K*kAZ1{u_;a zK}L3i?#5;MyUn#BW;Lf9{%|$?(w|Oz^IH3y#$NvJP@S%wyYzGX?I9-@ywa_W)mJqx z_#`@uCN6j+%2sof`Urp2_)Sw>@J4w$EL0mW_@c~4>A2j#Q?Pj==AK_J+RMcDgp&)N z=t6I(z4eiw3!Z58ji$KZiAJ^>?Zp%PalsSS=>|ZpkMKlQs()%+@I<+;(&N?Nl_%KW zC+mj4Plh}(FulbU>3XR~Il|vqIeFlR?x{8Kzzc2YbX-0j_@HG$4e`JOO|EF*f&R6= z(!c}lE6kJPS?8(yAbO$bInGr3W9I=U4|MPGsn2-$d7yhcy7rNe2f8<_ry(Bb-hkFy z^6^0TI&}Ua&jZ~n(Vkm`XRznG;A7jK&pPbooAXXCXy2F4it;LPLH}O#P(_Uk9%%p4 z@_-r_e9)Y3pwzhFg}O&H#RWf9-Y?DNT;HJv47TmJz3Av~9y__LMCPjt4O z!6?NAPjsaHwl^L5LViE_kATMNQ)cPrT=U4*Bb&`^$Lj&$D&MAC7}3wl}fj z%I9Sr<&Dcav?8((K?YN-sjiC*uE5_wfBP8{#mHcbFaOa+2476tri%>57(p-36uSv$ z4DBNynfoD@%^F*2iRLXbZ_7r@8boGVW7ToLkqMB&8jD8iB7-$%&@%?bZo(R$l-3m) ztTADiE;3kScpiDO0j|}>cdo2alVy#abX%5-V>e;hf`~peYO;*64ndjU8d4yFB^Gbj zK?FN2q>EXFh+u_T8+1ej8%*x4g9sKF)mDyZy{G)U#`q-iTQG>olz%-{YtO+JAcFi$ z=vY#L2=dQ~)X_D_|HTO%M38^VFFJ@IzwOV}360TEm;AQEu&0<6Q9dv0HNUysNrnut zrG*YMR%Kv=jX8CZ!3s-n>LP<3=Guy>{Ft$B!V;f-sVg$rV(fT%GWWK?W{tg+^t@Ta z@1!O&bIq`6vERr9$Y70?$#jvy8p}V>MFwlkr{A|Jjv1`+d2(Hm!5ZU3<;mQyblC}H z5BCY=i$WGzq~D!Jt*VWpqpc-_lLraoNi#*B?uZA5xbvO{9(dsj-C)V}i^T&Qoav$= z9ys6-y?m382jcISi^sjhv+4d1ojQ1nhpL5BcP9^YfAo{z@d?ah>VEr-1|H~siB62< z`eo{Vif#<#d7%4AdKxRw1Kkg%l^UO(u2sTG*L;7E?Gav*UfF!^5uBPkIiUH|I94p; z9Nmrg*gU`cOpyax|I|j21M~Y@c0~?o{2gf{Hy+UUNJZf|mV0XZs=iMSs>)7nogC2i z4_fB-G8IQLb-mKy5Cx_As*;{ zaft>V=zjc^0uMXO>xm<{2qQ!ki9sdbrN=auoVk2T_=jBgX?B zM4?8G^prXjx)x&ONJ2lMR3HjAa)edU(X}8WM=cQ$aq<$X zB#<~xTX7Qit2|p0uSk2CTz=P*9x^+LFq2E&wxW?FV$}dr7vHOc2y*A4WtT!kkUaCx zIwFGXDQ@T>g7op|bVI0X?#FpH`72Y_^G4)zf)bHg!%G(R+ci!i$e;hB4kE~(kA54c z&^5@PwYiRnAb+YxI*1^Dd@8*{U2`wI&!C3)GODv0KD4?OQBJ=r_*zvP)1PH`(qQiQ zrH2qwG(8awr0?8UEgD7x;ajd!r-AGZSE|!M^lI1CX&`x(tui$3ENv5f@Oi&9wEA-h z?xca>eZtgeOu;+8Qm297A5b;*X1{=)V$neG2F2B*f#CJ_tJ6U6s-0wLdbpNwCtNeU z3Y+1>=uGY%pR06-*iH(F-H+b?2vhVls<4^em41gKMFEjJd}fV@6a@rsO?R(S6cD#D zy*ZGgfUq?tiFU-jq_&AVqJvKgDzpw!ofHtY|4exbQ`ByBUFbO$0i$3P5H*^%I#Lu6 z^?k)CAZpZ6wXuMxHR+WoXGdaOGu4)wN5!|I$mw?_?hw{V17SaoR;R&?9r&&qjaiGk z+fuVkr_9vcwwHP|5V~nkbsC6Wd!Y=?T2H}85A#doV-7iKAow79IOEMXTr?287oFxw z(U^kwpaUIg8qDf;w40Qsf#A*PCRv&Wg4a!^rc`hDYI`sLiW2WwdUx;7-iHK{;EV=> zZ~Bp<39xAQNdv)`^Dj)j(y%QAQuqQo-to5hvfIxm4Fvy!?x&?`AozrOYGcDX1;NMC zX{$gq{j3>o3ckmd{k$k;Uzpm)#z_OgH+J{iDJ~iazO1tvjVbuN#p*N=d>UP5c^l$@ zonp~I@XzS3PnrgTkGm? z)!*_IY%Cz^S4Gq)AnF|7!x^w6EDDJFWihoVAnK&9q7<}DHdFHK z8uB*IeDOf{oAe7Mc^>F~A&rK9LHBc1v*hD}?#Jk)Nrb1TYqP~(jrYMzTFm!g(#!J| ztgkV4azOLvbc-p@(cATEJjcV#*7(YCK+9+eeh6#N%Kb(DwE&#WVhzb z`!12a9yaTA82!4z+bdgNbJxiOeP8vqVv*-TzJ5_#k%!d}==`Ll1|DdAhvtu5zgRrb z`wAWK%JV?;GxYdCgoj>xo4SY3_B%el=A@Gcy1(47$YZY0{%EOz2fF{hS_2Psze`6_ z-o`ob_?Wt1J*6QY=zjJODV{a1Tz(q!y{}P?)#*xfk(pupN>XkS%Qq5acWBR$bG6|Q@UbC zzbus5?Mf4isdVwy$}CVi4_%pi8yB8fpmb(>S|rN?rBiHE?h%wuPQQ|pjRi`_rDH{Y zmJeNX`nYROS7vj%N_neCS$)1Tv8zlBeT>R%K9?G%!~li!k5OWPy1D3QC^Ef(vhSQ! ziUF#ori>&L0~Ae|N-&0{o@&0g+$V$g>`V-%nx*e6F+j}%Pm~y-W_G%K^BhT@UO>%E zVM;MT&9s%37@%gNk%A0;JnL;WI+e@eCk-a` zAUdj&qUnohpmmS*R-dG4Aa*<2pi0v~?PjEtG!5jgL&xLNG|;=|hcaVBPe4q;N764F zy+z|p;+Eh}8VEjs)~`}Drr;kg^V_L_(J&ea-iCfxBOMI{Z$bzD(lih}s*if7Ab9O` zGBorIBnG$SnBik6PxyWY!dBFl*iH(F{m~gK5@CwIhypXa=Lt0mh}^A=8U+OIKvh7h zBM|rfIBHQq*m|_b;f#WwftaF>qXnfWg^!f&q=2Y{%lhm{KnhdT-X+y2AZkbUGOs)F>co3rdPoQ9#uC^dys$g5GYJsd+rDjJ;h~`$&dP8VEZg*cuOM z8qC-sx7BFO)ZFK$It@hbY)gg#_a0Uzqp@MpKsm=Awb%t!SSpO#{Ij9hMoJ zPhI<}-B@k+Gm0B=jmSOZ;uMY(>zMX!yLar*?wx|1pg2b4wCPscoS+CJvQu(VP`D9U zh6anXr7$Bh(HoJLVlWU$`#<*-1;G;A=nXzcc&sHD;M3-!EkW*2=%&MGOVeE9?sJJ- ziix{-snt?yhqyi3_G{mzYtPPZeVsVy`$ZNB93<|%Pyz>)%h4u_KTZgpWRhTeF-9q- z^_>?IILN)DhXf9aZ*3rfgXoj*aN%aSbU)zIy$sWRKdpk&deyx{uYt_&K~5x;pT5Cr zQ3MI?yB3l{LjCfz$Pj50`X{5>$%~9J$}k7~Le3FE!UI2D5$-2z6E4_E)l8&K_+Tn6 z2K^&vyPR;y<%Dv~2?r=`q<1*M^+$N4-!9;o7o1pl;mc}PPsOlsL$@Q6SoonrB}ptC zk(~T17IUZ(nXr>YyD>&Nwl=tzMG^~N?EXR$3ujD=$Bmuk>5W4j{bJv)XYh}LoLG2c z<}FDqywSb2Bo^MNcvTV$Z`l5bs5fu;?k~KNvX(@Gf$4r6x z>s;X>ZybJQwd-7N)U8q}s$QM?_`k82g|i2Wg5ZW8bV|BCU9O@~j1`4%Ct3rM+))%7)NfX! zYIXe2ASVVBer|{$2GV{OUl0RjdyNvrK-el&So!(`UDMEVKQJc7D8^)cctapIhJmUF zXz9n-4n&=CG+;YZT$=82X%X22L5-D69rXweJYHCuJgoE zP__@bfj2^ETa{)yFAD0WeJ<3m7^4K!_c1L1c-mqpD149>guEzdJd=Fk8}*q><-IPI zOB&GY_#&%csT?Z1cI$6f^-dTBTR@w9ZWt8nJ6pUpNLD?iI1HMlA1elnF-kJgp8O>a zgKCF6i^Cw>teZTrF)rP+YV@b~=X@9lQi~Mi!$6ZvG(!Vp%qsRLT^I3Tpv=+10%O41fjD#j2#A^NsA8BW zXXqX(Wh{N(rZ*=F3VprJYKa#GjRw*iDqa*+s=ZOTE$Ecx9U+uikN%n_jDl9jLxoXL zYd$ILdo*Gl6rMd{wUye^EB4WX69>hiavB z1P=wfdRkl4zD zw3~MRd&sc-H-Uc-6^3>d`1cT@A*mKP9%zuW2`7JyQHbTY7j#zPpP&7EC~&r<06zp+ zyw>l(AL|#i#kD2X3Ze09ZnFm8Tnik1a+Fy(<{!FpidrpX~_XHPiWsX5(&?|xW8~j z6a+uFux8{ZejGH*M;muO zoO@jsY0wEQKMu3&`v#VB{5XiWf=*)n;ikLh+1m?v=y#nC+|qj0@9kdJiG+gd>1mA! z5*m)B_cbC&sA&7ltOyc1=BJl7yhwH#9%4iWlLJJM(DDa*b0dO;nycvHjelgUtMD7N z9!c-eGxlEBiG`vYb6MUK!$Q+>%OtT-^@D7ZSm;{Nb|uZ-U-xxqWN5HNy9^6$FZY+k zLfzF=+5?UA9M^n(`%D7!^;UJi?b`QlPAv4@bU+ddeJ50u#6sVeyCt#Ew{SiQtf_C< zTuChSy_!f83w_s)<;FU%v!U;glrx+~*tS=8f*|Y`Ivn8yLD7lyK!FnkDO)$Sy2c5D zhDE*=0lC*bkrBm3K~U}5a#0ZETAw*E$a&Tdoo>?uyX20%Lwji_1~P5^)oO?j1C=Jx zF$f<9B1PX8YzG<@qh)5`cHHOlk#Q)W@L{0PjbZ|^ndkExRt3a3Puii+&-7R$r9&V4 zUfYR+LfdJd#*2bRpFXkT<3&NGwjT@!YF8UbB{0z(`gea?0Yn( zy6T$kzWDBFtF2THy&QXSCk%q^cqI;lVpE2Q!ys9^f5c(XtYjB4nESpsGCm!#a19BA zLA4)AYi<~1+f0+$4~+J4{c(;Q*C?=Pj@37x=fN%N#Ay&@>5DICoC-mMx~wIcP*33u+oC zDnW-$l{Opiw@gr{;?@a6Lqg*8E>oz`>d-F^w)wtd)fb?^Db4^;_E^lS04^&fV-JK}ys)qPn)@)jC3 z(wLs7ka0Or$%vz;dk$N7cIQVcPxo)U`_`jQ|FoZz{@0(nu3a}dv|<^f+@-EKRSBYV z^ZvNYh9F<(wFHTMZVn}C1sU7lDsj-I#J5a|EJrORK6|UglLrse-2V0Fvqsc`pr%1l zVUXZrurCQd`X5U$K0TiVf5csQW5A5AiDv%$PROVK*3D2adTP|}L6y$_^!x60m)>;k zSUN-NcE^4x+keNQ1;6%vkU8JEewS}H+;#hUkAbs>HTWxjk@dmZ(!{B_A~J2Q@XKLZQHPp9=K(A1e^S zsX)z!oo*bQye`Azhx3*vY5*X`>!y!VyUL;pQ}{P9QS zpFYm-?UnIW53l+U=M|F}bw#%_G)?x-b+3f|1$;-A8e5C3%T%g8#F5`2*%Q|PmY z&;NUtD7FeOt~cFLVMXoU(e2aRX}$Ev`>kgWI&`B&l?$zAA5ZyBh>_-Ur!a^>2j%^7 zxC{}NZnZ@4c7zQTY6lrR->UGfON9fjvasJ$VftGYqUX(<^~IdeqZb@Cq7DT$4T*|? z3{gI1nD{@IVLYDk3?s+A!$zLr1qyir7=2NSPsx z9&LV8tk;G^>Bn7px^K$xzJuEKxp#Qdk7Z(~qb?6y-F{uC`n91k2g^SHW%828^9%g_ z_(0Q_ z&*v?iJAL-dImuX!c8SrV#eO5|NKn(TsJN^$_>^My|5}QSTr;A}iw@J#!t`MS08!r*tNmW%lGZ@X2{{K_g?g<{^hJqZ%Uo1 zb?~pzUq^p7GG&?r6^&Xc(-_@nEbFlSi|XC7j2_=@&BB)#UQN2OaAUV!1sCo-v!~bD z?4Knv5-;d@3{u2eEv>+RHGi?~^OP29aIBW>YAl@F{PTI!X3uQR1o{87TIzCf|IYqB zYOh=T@NwDkpB8zM;mE~>b$>3|r{A6Dtsk`NzWDaBg{d}`>i0)z^#Q}KByP62RL0q1 z88gSP`EY!!#Zvu|pAOEHHaugdGQ-bqAO7^am^Ukj9US`SjSG((Z+&kB>O{7n4!=MU zI>hde!(F7PRqtDZ_&I2WBn^X%18*nDN!J89>zW{EtO+vPnQToZFW1-xiZQ%&S+h--WB zEq5`d-SrAXdh~tsOZ%Vp)*rU^<%x;Ok*`j5?S#ipwqJAg$I&V@S^db;#P9JaREY{Q z4!u?7EK{Y}w2?PPj;!>>$fR*bj+i}iWR)_dE0oM#v|#ZZ@0QB`?z`C!=iQQhZ?S^; zD@5ifJ~n9itdP}dmIoc{G`dU3#_efljp&~AWq5d!AN#dw(>!j@u?JR;tXy$%+U9wa zOV@8tnxfX|AJn%S4SPCO}{IG~Z z3A2~_vE7oy``aa&ktFx{bYXG!-3+Q+veLh65@#7QCT*K{KkeP7&9TF`FV||6F@B=T zpMCXr<~G&ZpHFl-|EjMicHG})_)l3z-;dM%ors*@ju_jv*qn6r(%kAZ@_d4#(>iva za(r2jUtYg@^d!yJ^7SiEel>RS^G8!>whZZ!acI3$|4e_?{K~n9gI=~foqWTTP8-Ho zIR2z`NNi!g-Z-;-t*Nj7TwgEWpKY!TxH-D*wZ6|!*WZ3)@b32@%-sduib5DVKQ@I~ zJI=2#*4ZuuUP<0>Y{UB%>>RZQmDi;1-f zJRH!`FE*O7u&5b>ueE(C;@khQtv21mrSBvhY?$9QzTA?H(8ij=$ zAKX2@>&mfgKmGS!;%Cui9zRJ~?)tvXU3c{Qpy89fZ8|5c5n;@1-)TLBpu^n$__+&& z*hDu7J`N(?vtrL%DUQ3OIPH?+lqJO+yA-y);I~0d!=vJ{n(p&h>Z|{?7@4?g`trsv z`%OIfRgvl&I-l=UB*XhtMx{&L>)Nf3F?VJS{`m2}==2#%-yCpq`OS~-9_#np)vGHm zJTFt^aLj}vAKf_+YgR-??;TZr<$H_954cj`d9N~+A6Iy^=la0R9R@~UpBr{Q$S8Qd zeKUwb2etiiDhV+*AGBtLuj56iQ9sDo`&Nx3E;SCh)HrCV@ugi2`=0PiNY zPdQfnkL4K8Cr6j%#qw4t@pAm)mjA9;JNiP0j7PSWo!w*h`uzm&2DBbV!Ktu15t8!smjY*AvK4kB2hG~K`}Ld%B3L2sZXI2N4KmR~8HI>ofw1Xv5iRhPzZy$5?#~m(ipXEVa1ufO8kAxbh#-I2 z#yTQ`{P}a~AcFky=~<#e*C2mp9Q~>fHWb6Wl_GL~F3?sOnr*j;@>w_foEgAL1`AY- ztBVXas6}sH6(fTc%F(X@6v<$R0^jQDCM=P)gf24JBF+MNGUo~q2790GsJ)JvzwF>7 zgEeYT_8%EP8LUzGrY|xMMICBh}bok5;sfP-TNZ8o0KHt}hF# zvcVvAiztkn`!9mp95Mz^F3d{jH8yW6OUofnHaMg|z3x|GGab@xn3mWW8yxZx{f0y} zHaMhZHEprMAuYbs#s-JfqgRdMJ$3*6Q1*qPct$OjL&op4V$035@37qAWP&?p(bHgA zrnra+xueH?<(L=~T+ua&7A81i$bVXx;D+dqTA1L3hA$+U7P%^k%>@%_jUG!oZ)*a} z1x_ZoU^cBVWSLAC^osAhcbqYqE*REL3lm(>ajF(3xS-7iElhAh<1Ug+{XN$cpB%H8 z^7HOOEGIbG;DpgRwXq>5%+0Tb&8!x^=}1DQr;H7*7!glfY;Z=q7;S8DN0T@TZ0^71 zU|UaoO3$Obj+@_k>12aL=6&ryHUTy`B<83VHq#+}vuI<3Lq-nQ#s-J9zo(524(Z%g zfz6%&**RuB_c`XbOD(os{`Vxcsy4FyEf^;e%(43&9YpZO2W~uziwTh{(MNw8?*@l>e^LhQleplL+$f+^mBL@^8Mag9!4k?5KkX z@-N(`g9!33sHuYp@=qNjN7Tu+f?!`njAvA3yNP4fETZhbcN0$Oog|Qc&p}lZ$bRLJ zDhVXtM2G2;-GSU|sGdubKmr%A-7;H#CgZ8FEL<>FTKegU}*L+P-ClTa7 zXuD?h<|95L$iE|{j))-tP1;3z9V5PoApZtBZd4$G{L2dJ=o;kTHb9PuPH^J8P6UJE z7PW|S`(MFVcD-G)P5_)t;YLvKSS?I2!1ME3nBag%O|>w=0=JiIVS)$FwbH@_6P&3p z$wa5QrVApD_-CSpyoU>%OmIOEJuUL;Cr&1~;O`DvVuA}E&<OT^+{l(V}YGqWc=XR+k=wkq?PkDW|VJG{9T zrZ6L@1pWL#B_^o;AFaz(n4tVqdcmN=1oeNdre&PqfbU;QGR=1_@@+0i{H}i{pR?7; z1Q(S2S_>2A{hN=qFu?_{XnF2cJI>z01%I~H5))i-Z<7|LAR{Prmn4&WhsOS9R05+0 ztMAqJTTGGu*ZQ@pHm08`IC1D+P}D7*lARnRXQ73_}c(AUWWeXCF`@;uPE;!jqe66ZMxSv?7v?==7N(4x>v5Ffd{&muAqSjx(}pNSg+&5)i3B?FkCSnQ}^ucHSj?9 zw7*I5xPQgPbWiABmya7rJb~8bctz8@d;b^!f}A|ieK4I3d0jWy$Jr+jbl<&DF&?(x zfbN|pY2bnGAJ7^^u3s!3%=d=fHN*qm>(MSzBp&yPicR;i^pemkkA0nI)62;N-9P@! zf4{hRp!=>Oiae(79W!a*f$l8?*1=xK$JG7(gc{<3?)BeD@z9z&p?h6Efi5^>t#^EA z<>Y|oL#YPJaX|0A<*ok6abS-3q+4Hc4z^E$&Rytfha3myb}P;`waq6qrYsKV+nDAf zZydCyHnpAjk?%3}k$;^W&~_O4ELI#`9ME>3pS?~%4mKXpwky5v7mvf#w*3T!@n9U# zwl%F|<>G+0QS@sjUJg3{FzfWB#8zLtu48=6T_+Fp9lbyU4>TSzLjwM%i`MS+;4Ls1iDb9x9o(l%rN7?cMt*1@hKOO9!$H#nf@<8`dK^l0V`yktS zmA8Iz#RJ_B(wT}^-2?KNx_6(d*e_G}wgL8sT=78nW^^kg(yy;v`*8L$E|KeXd(hGJ zR=)zilg4?u^)!Zv5`rk)2u?|fRDlS}ufM7zB1pfKu1ggn!knMGMn^>iP9l#{@EeeoUN%9;TZTpLoYNiM`m^XE%|U5~rE2N&<;D(GhE`f#r(? z60e|hElCncyttWacOdZs`aO?iB#?NzpSj~}Ox$N9wilFLV$=_C>XI5JCDGN92g;QMy^fPqTX=^GjRa z(Ln_Hx8Bu31o>BY)?vI)o}wJ9A40hD5w9E^#)DoVBJZBx&M;vC`|+DpXO1ECJ{OX;eVmhClw8&f$Z1m z6`nK=L_c>@y;G3#*&|DEEYf#6T}tI?Q(-)*f<1HrEb zP%2QTSTqp)3f+Eq^$iaV1piKorkzXht*#kfh0X8?>eM^F-?-Stc2Yp>R}_g91%&?N zTT3A+3e4)qB(g9?BBKhM(f2yYN5Lo{?hV?8Nksu+FHkw*r1-*9)VTS5cZ71kT~sFp zM16fojRKJG0*GU7xg97-5hX%9x9}P4R{CN)lf(MGm6#OBjC$B@yLxY)pyO4S` z5d2aA-*}IP?l%&<-mwJNs%}N&^*w`=#yT!{@<8zlXEg9&ju+3SfhW`m&a+ViPlyqm z#dd!fcsJr>e1eVOBs#_?$OukIi)9hL=swfby~Z{Fv@X(EkpnvCtFOobjkB&+HTkA9>u#0c|Uv zRpfxSrLrq>K->JA6gi-6)o_Cyxx!dn7d9M=v%#) z1|DeqUPlc)(7E(Z4Ls1gU`-7?&^!B54Ls0%SR5%HdLL}o=?1m@^Z1z8P9Er9l@1BK z`iF}Lx|chwAs*;nHm3$2=w4`x1|I01Gnob+=svuk6whGSMr;Mn$Wq|Xy=5@Md(!Xr zQaaZSZ~x9QUXwdf>5cHJ^sBE0qy8e5!C;DUGluSA;H#W6yvoe+wUwH0B6WTe+lyltVT{18vLEmV-zwBqJ% z3-w|Y{ce~Sh3fDpt-N?qs0EGN3-=3|zu`R&)JRvJU+v2CrCFZuO;0z{IP-juw*A_7 z>Dse%|K8vYasrX+tC1B2fXMb4C=Un#k?@mK>gEI{H%hade{qC0G6F!P{grgsApk_? zA4$#>03!J}qo3Ui0FnRmeQ51rQi1v{IQ5PTh%%zhiPGAy*iUQH+ zPti~r5H+GkRT&`4M5Z}1KvarLV`YFS7Hg=S2@DSE#h9crEuv)n^+*PYs`2!x3=oAQ zinjKC`#8o`JJz~tM_E=o`dqeJOy|9Jbm%p(C;m;46N>6lgO(&hP}Gl1AIY_f3X)=+ zI5fFYmer6SXqCquFoU9stfob_5EOM})FiopQAwJAF9$^}DM+bAC}32RRF&mgMLl^) zlbgHMWEf#ol!JeXx5}WXDYI!EF4QWjO0P9?t)i~vUn>qxihxmBLg+a)cfbsa+H$p? zT&t)q>#0ZxwTk*OpH5@Bp-B)hDohL=W(Yx1V`_dY-dENts!W#ja!}NnE8%ibRGM|Q z<)ElFV?URJqS}m1CI>~m`I}{S_cK``aZquZM)N|)yK2sQSIsHUYEBHjGsxhqIqtum z{`MRi4FoyCs5&+2qC^mkx|5kEo*)>NClzf71i;CS@~rmQRxg5JR3F<4Mi7kpGbXVm zlpq)ts0BSx5d@qB%dvwv1}j zr?4y-^{8MG8E{gBjfxaDQWlJwbnUz>7*%Owq%1g|5n6qi3^)n;jLOuH4lx9DW?=%P!^2(boCQiFe=pgv$9~+sFlZL!KhN>Q^|r+r=I^I3r3}C_A@_tqGzq@ z7XbYB&I?ThIl-t^wTt?1**6%qD(e_oFltrmX)<85R$WOT3r4M4*H9LWS~ZqVXuVEJ z?*qodMy>kbN7{Wsv7|oG~OM?T1KtfL|Z-qlQ1x9Rn0J&u+3UEV7M$8wW`iJSuko< zww$tH)T--AWx=Rb7Xz$Gyi3If47F-wQ`weLt0v5s1*2B|OJ`dGCShTtR=wYizh&pm zJ@!=tx?5n6o2t51Dn-?+!!FHY-}l$qEsR2S%9cpCjQK&gZw+f?BY|{N8j6#)pT0jV8-uEA!AdEs?ng5G03I(NK zC1DgQNfA1%YRkOuA4H{*r!9s; zeW=$$xGfZh?CXS4;YLXMZ-h}O1vltinYUl40h?|Kw}s^Y2^BovwvhE-1`D@^bl-xm z-+Z_Ak!w$~+;x6doSmNy{MBkJh4cKZLH%Yms#eGU4RV687pX@!aI<=Um4_L_x^y3)_lyC<(&DIn_s_MM22v_37%6qfe|Qr1Yvn-eVwS^qhG_!$U&9 zl~NRhe7=S16lb51&L@2(+7hz)e?vtiH`A~9HUqa@4eHztb1kkvPx5r-kKf0A4r zhRoiECO?+|tZ&HeC02^JhV1@db8#5*`(N*f!IB|JWcU+9#9_$si>`{pkmUy!7l$Fw z7cU_OONzcB)5oXhKwP>p7;^njw0q))A=_`yD;^~Bece%Fup|f)8UN$|#9_$!Jtbhs z`VH%ew}!kQxmX;A%>UyQaTs#{)(he=WdF(O#9_$)ul^Q?p#rp`g}EPd4Rh6iRjwLP ziq(LRuUoC9cGdvb?=;>%z;fbH1RCa(z@Z96Qg!2R4`m=D-8}K(k{hL19k@j&pZqu! zf-SWK`^VZtC73i-0*6x2_KE}!wV)L382S5$V({vsM0=O$Fm*?V%tH+8}{LMJPc_YrZ&>qCJ#^gtWTh$Dt2Jo(tF4RUekP>O&D$ABNDrCn$j(0&+s6S?@QAe zCJzFc{taaq9t3jy^9XJs<=jIgCX2)Btburr-#z2Gf*20feanR zP6ynXj|YL=oM*le0$KUz2|@_u;~l4k5Xi)zr4T{{-~ zNX8wGi?)P(TsD;`C^-%;4~v43mH(m5D@UKqbM`a$L|S4XfemY( zvzPu_q$Ts5J@Hdf5c2ddl|?~F)!);mjAO`Ic*xefXNtCjgx&D52#B42BWDlqA__v< zZn9Gpgv^~Uu_y@1`_2nd5c2n~3ZfvS@M+^jLCE64?L_7JuYmLE>)pu+Xhas;|rBej%Ad%TS z(d6fbA-9*Kvm9<1vU_lO@xCFyzlacnndj`bZyIw2$zaIwOZSPlhAclcoj43D@v`)j zD6SyQbM~a A}Y+24#2haublx=S2}e7`q|I1CxTac40YJDx|*A3-Zgu90LgWc_AE z#alz(&rj=b?$(g`e~A%q4Y_~!c5xW8|L1SSVaWd>*Ti9{0PSgE?q^7byJ~>#I~gMm zTCJs_8W8(8XHFohK(k~rK-7W!bmAk>A}T?)-l8p<-^sZ1Kn93vu&a;^5cObMe;FVu zLT5^UoP)#qh??*&J@OC$qAG;Yx=R3vy6}>+87I*EPR6;LqJzVLs13`@$pBFuhSAbj zAXLrUns2gF;WkN;e=vb2v z==@X?&Voi&@(r~A5WxvV?Pxwt28h~G;I<49wIllzQJ`5nexapaEIGXQkpWRVcCV9Z z5w+v&)Q$swRvq5^$U;T!XzFJX z=pAU*j?p(nLuEkJjur)FfT$gX{H$|)hl<*9zn4sls2%$@%K%Y3X1tUEqIQH|k^!Q2 zbUMZdjPbm`9ZmZV+bxalH(5>uibf0CaPS~dEeeszcn~NPIeS~(34}23Z-1q|1rGv+ zVK3zc9t0}FblOeuAW#ar(Kdnyff`VWW-<=~$v>Pn4m=2C{ns>;0wK)%+l#bS;6Y%I zu!{BwJP0KC=|6L8&Z0tQA4%H@o;HxuE6_e75W;p+dq?zk<=t;w?`8_KcQa$DA4%xEhkhgF1R(8xFxDC>4gj)l z;U7c*NV<_U0Ryy<+$hZ6%-knWaR88V_buiPiM4=?JAJrM^0nRYb2e&YZj(N>Dj z+v%j}Ljof-bqf&y((I#EA^>FB!}#~+dz1iuU?CyNzTb=&kOTmny^lL50ziswl}!YI z3|n-N2mlH8VS5n(^6SCnA^@b-WgCNTuNG=9Jo}Eb*FJNC- zV92yRz7}f*xwZ;fg{u{0+qm>3j0=*?2n(W%GcE`+?!7$xeMyGyBj;W^AO=C!U7J}9 zg1kHCV=)LaZ{@-Kkfi7fa&MXwVi08CC+Wo?$iGKuPZv-Z78o*ci=+IlBmo3Dcmkb; z25f~vkcC@+A=V1=a4}k!aJ7O={OE#ME6Bx%(}+Qkjpsy*L6DE*mlcB`BY&7L5M+QW zCogm5gkeuTu6^sqpIq7$Tn50Hn@HGKUGK@9y z^A*}(1?~^SAVIIAM8JnZh8{}~^Y}1G(Y87m7?TvSAxEd9O9eg*lJxIW1;&lFgDm|m zohl!YJhKIaDKfBSiM@O-|xPA%9n+r^vi0{PrggZK8Qm$&By> zRL6Kx$l{@Ntigj~U-?2FzbY1?`S-at#1+zup^(eRcMwJ)n^)~9gmV9VuF&*!x5P6V z=J&e(pc7VJ6mt48I$PmIA*;9AAk;7OdtHT64#&Tj0ir-e=Mx2*Z$~~|Dg#8BX!nl{5S5~IT^S&X#iGJ8K-7z4 zRM9w{!v+T>V*$ITaQ|#p0EnuQc##Ycg`?LrKA`iDY@v31I>`d2^IkjN{&vX;MfGS$ zTY4cV>PKm^oe&fiq#(VMMZW!4e3)3Ps3Y@f*Dus6D#@_;a;>74 z#L#`BP^+jWH7S`0K~YZl~3A;_mu^V z%916QIMn_3xk59omxH3ZJPVS8qQ0DLDhEY{X|qNg%Kpd}&X=Y{%0W?OI(#GtMV%=_ zw{_f>U;(4je6>-oRn(f}ujQbqHjAptK~Zm#W|f1Y;`E^>gaL+WqU#>P_V>A_B(_@3 z;H){Ze=Fq#qv~{^+Yv!9>P{J&J%V6VojLbczQ1)~lnr7f6XpHYe8oRewU{QF$D=kL zWz?fdbm}kIGAdHm)iN!!zdV9_jV#4v!Kg}qkCg?ZE}gn63r1yXNBaVS0W<$T*Hk(I z5d@<;b$lfgHftI6sazvjFe=pI60%^_sFR~)!KhMS9hU{8P9<9+3r3~t`=tyx1x6eD zqp7p}gYCash*hgP(h~!(Ch-kMttv+w9YHW^RS|mFBmg#R)t><>hWD_|T6K<|>I$}u zT6H2^X0%z$s8x$<%Ysp>2F#EJqgK_qEel4iO7@v77_}-Mo!|4-?6&#YzCs&Dqmwv1YJnvPBcTSl!~9H7Md9x&9Z z6a{3%My=|XnIG)Dxks&w)0TPP|E!i!TMUH?baJXN3Z>_(i^3?>oI#m{Q7AU`J`_fw%A`muj6!*d|FaOv zyzjqVUKoXfvVE>F3YBEa-@+)AknDSfQ09GqHd<8i_|UxX|CfH@#*0F=IQvjILX?S) zRfJIHegEfF6L}(JC=`aSVzZK=P!THle4pVpM==yi!O|MS{Xz{mGhG;kwQ~16}85w!hEy`8*4i!g+p1zddt;kmkFPp*TUv@)c+%bAr+u;pu1* z&j~_~FHYH-1H``OiF2{PkBAI}`S-a_(`yONmXP2V*B5OG`Te8Wq9COA`oD;Rklj;~ z(>eQuEgzOE~((!b4)uNtYb=qakON9H*`9kH*PeeEQTrl&yp?3?41<7E@^1CLB!;t5v(IF{!kjV6rbd|>ibALB9 zG$-XnZWyxte{?g$4MV;^e^YEoSdhs0UFd|1t2Ohxp)&+$xB1=B?m=QfG8k6!l_{}u z2Z_wTqKtTJ$o=QWiNlcnmz@@eA^)dYAr3VCKMqyk!ym0M`Ee)%mFdEaABQ?n>RrM1*x%X0^8Vi)5;#tQa9E!qg+UWB2kF|%YaPb3)_D~kOZWP3sf1hh+J_#JQ z13iXG;7}Q=&~M`S#)ZX+(y)@wQ224E4HrTr;zV&+L1zy9?V&oP%PG+w%EQO0xo`_y z=RLdeyX%xjO`NoapV?~RaykEgqx-$o+v|wv=*qP^_KS}0-MxPd5J66&a3lP1B~_vn zh$zem-$7@!k`XbYP$PUDO<4t^5F`AXDLT3qY=qBAqk|~O2%kxpg+g8H?kWFivZ+_1 z{yjVO>fWJa2T%E(M3Da=Z3iTYO!>Fd20?)c@~@#Cq5=`*U;K`L)dG(Z>l)<$D!^Rh zBZB<1sAvjxt%FPc?XKBhmCgR|qO7iEr>`zJUrKsP?<9frN9d47f+U4emCgEHbhsu- z0?D_YROt?j1ahxeCV|vTsVqu%2Qtqkzi^Yx@Rax*9jeA6@s-@2B#`(htzIQaOo?~T z_pMIgNEitu-bm>|G7?C<+K)PXkwD^Qw6l_o1QO3rCq5=0dM5D;buE%y&OM(0nd~Hj z%qM9kD-dBeAG@p?ky*p{(qXA&M2rZMZ=#<~DG)*S6*+Wt4bm^56BVI|=6TA0k!HA8 zB43Vn5<&hGG|Lr;ApZeM=8{CF{QD?nC=fyZEtEDCh#>z;n)M1qkbe=)d?BI@F8Pn3 zhNm(bp@zq4Lnk1G{jcDgVg(;lw{5@nUAp${OiOtuW4I9!Mio#8BMk9^bW~+biC&`) zJfYid9gMKZ9XjLJ!3eKh8Dpgi9gHx|542vBV|4#{Ae(zy@ADrctx0{j$H@rygx1l@ z2>1M(NgJc-p2xYhGQvGS2Ww@7dwv?Nl@ac_9Hz+Ve)w^~m3!*3+|%}n#hBms{=tWH zoNRDT1U>IoWrJ(}tEVkC<*H%+z*)G@Ku)589DFFsu2WQ0o+p3uq|W<9F~T`Fn<&P(-gEWaiSEa|#u#uOI~n1g1hg>M!3g&R2XK#I zjG?H!5B-m$0Hf)iXV0}V!acuHK2#k?(>=H7?njPsv&%i-V*QfZXpB*e=r`8tbrGSx zQ;)7a-S;3)O4z3>O+zh|@K5WqdZUDa8kf*VnF?bWVnkG@n{nliGs<8iqTExxQ3e?i zrFW}Ry1#U0Gf_;ae@dTQD9c1nN|>ngKYA%)qLw@LQo=+H>2BMr;vyY4O;l~Dz9>x- zz4xD9N|>l@Z&k{VT$$*gD-+dcnP>nVnHAvPNm&MR(!oF<(%q*r9n2F=`#fbj80Q1J zD^{jM=85{vl0%6ul~JE%otpF%LYWR`d3T9QbgMm$GSE+SKFhvk6ek^w(tWA#=tSvY zl(t>;(ZMLTKR*~t3eo6blm>D2L}wbMc3ypSFiJ%_=91{_2+y_6N3E^siU{qiEVDQ% zVU|90ps9rthUrOr3@wx}P5TsjI}YPCr#-52lxA6~b4g#6j1mT_MEhRJC^veV=;NFI zqYOL=os=+9uk(5-VWLje^isk^9j5E0go)mts*ln%QN3n*DPf{2^diWsi2^IW$|_lD zP*Ab9*=kE{9J{Q>?b~+mNY_(NIv8jbU3V8yrGt4EQHPc3V4N8vEJ>8;Qe*63nolU@ zD$`+YGklZ2=wOy1bQ>xW-5^h+Y_=^%yeaFkq3|$@lMY5%L2uH$MkhiCqb#7rrA!B- zOpoJV3f|~UqfAVoj}Ar|Px()&vn)CoWf&b{NYM3h8D%eaOKFVSETe2YZgnY;)e7SqL7tPYAq1vAW|^J66{nBnt|>K!!AFnN(4Dwtu^ zSy?J)&PQh0dBdVAz_IJ`Focs1hS*dewzd5zBh9CgycC>w0+G;dOf(7`CnDQPQ52cs;dw@u1) zFv{F*{-xmUtZ9_bcIl%-HW_O>4iuMSjms#9u+~UxG+=9u=Xb2=3VK_Vdhe`th~ngf zQy$SvAvL}4~QJ7$3ZHDWyg}nB^2DDfRv$tsI~)cdPTkE_>c6_1FE)Gn-%j zrZt4u_`I*{Ex$PV;FkyE{l~}6XZq#Gej54Um+$9j^^N3UOG z^o?=lmwm4MQitW2f2UdT<>lMUSYB~*!7Go~Y2Zp@)M0t$XIgd2$Hlndky}BU;(|Xe z)7Jsj;(|9$(=Tk)xZsO}7p42S#M2WmLw$1vs5ee7c;a{Z`C6<#a&wuUxSLo*T&5?k z7T3fDPh6m#w^|?JiL<#i#RX3st}e~xtOLjs|E;jN3VK`CaMuC0u7NN9daRKT`QqtO zO?>7$#8~I8B;pfTevu7!Yn?o zuX(7yu3r^;`5}>$GTew$Foj;qFe6U3-}OlvJdAnaD{A6Xl<&ml7s=Cq^HoX`)lXIXSGODL^jCFquU8l`+L-<=IyuAFo* zN|BQK=wOscI(UfHS&``2*uf|n^5}`qG)l^!_0hp7Noi-O+*ueUJd;Xv&KiQ>DZYEw zqVxKwC2$&WQo<}HaEkv2`|+xq?r?5se$1797NNT)GQbKTQ~Zt2zIWSsC(O?q7;%jsP2qE?OH>%Z?D{&aG}OG9>P=7g7GN@?bV zm->v?%n2`@q(eTh8VQZ3>7}Oh;!qDKyi|L*a^Kxw6k>HOgL~a`&{k)8_G2CohMj!y z(RMoY^ZGi-b1Z%Gv2__dw1%$G)cN3_uNzupsLqEvHnXKhKCG8N{Y)btJTs9lQDpi{ z>sr$<2Ri$Y&vSp|p|6tn6zp|h2E2SjtvXB>qOo2k1&Gbb1+VPbZN(xZsI}w2o5iBRnyupQgCriK)w_xtw(XdE)3Si_7b4qX4HT zd!msK`C|7eO?)hG!yg+?YvhARmc`S^2cOJKsgVy}nHJ#nR{(w8KZ3WdTaPF4A76mE z@8pAD_P(c)4}RJFyCy!_P{D20&!&rx8G#X+Z5;wG| z#aBpRpJN@^xefD{5Qh1e22cYb9P@ant_U-r%SbUlZ_r5y&s?T=Ns3)&5yCX5Y4fRp z5Ux2vrx;R%?(c}bRT;JkrekBTgg(-iWg90UZ1YcC|DuQy!ZwfSrxJ=0nzp%3s|^i= zu+611y1H!I=6lKw8X|;kPSJ1Gr3m}DR=E3J*`_YbHsN%Vn9uX{IFQ0lGFav%Z7&qb zV3*(N$(SM;GR%W7EM*nQG8lE)P4-6CB=!_7yxtLmw(EZw*6{1YY(8xAb>sW;OtQ}F^s!`F=t{doR>T;c~ zoxAjN{Pj~O88S^7+e$!vhMU54bn2iOS+Ef|aT8sU z1sQP@(5hWHvH{le)wD*HAN)r~OW4@faFW3qr8DXxgEb1!vk|Y|svqCr zAjb;}u+-BUwfp;z%-^JTlEE4kck3d9HA)WDMFwjWr(b`09W%aen%2mhNk?Ri4A#g( zH}r~;!5S%t$wx+iwJtOMG91rlhV25$^rt&I)l=)GABTPD}vz>C|F zHXABEWo#%UAJEahDjWRKfR0*J*#n$ufL6%|-)a7aB`PK&d--+t|JIiwcL zArmrKJ@%D`DXG~c`X+>nc#w9*B)|k0#L&wiStd44a6#9E%DppP z(C)exCb*!rpLxeQPNoYQM@hy+e<#_j7L)Q>O!;{?4we&~Y;eNpG1}PRh7ldLvB44j zduU-Zt3|i9+SuTXXxezIs9;u$DBH?OSkvXM&4_J1@#$j!J@xBcCmS3xl4g_Fo(izR zA%ko++gncs*x-->ceM1>bVzqPaZu?gV}nE5+SXwTJvANDw7Wu2zjirL@+g-E2DrSHD7iP5*L6QVAfBr_iJF`6{?zi4I36)r<#7+`Oy!WXp z2_)WJRFwo0ub|yntbyh04kTVYS0xft;svx)lZb?oK;r52;*y($t`OZdJpKTh#kst# zFMOp;ClO>mG}9Uv1tLhjpHhMX5#-)NX+ePqlCPp!ElFf1@kKPV6^J1H44U0SL<>D? zc>IC3+fi@w`AOG_65kda#^kY%4UE?Ey{M&4oT3(AVqk_aPSJc=l{{}p^?*V`p)3GZYIH{u0-YxPZq2?lufg)&nX z_gT<=+QG`k#F$`#8@4ku$#G&#DDLN$Y3UtIaAuYy6P@OoE{HqXKNHme4;MI@-~ywW z7ACmhuk6Z9rVH-TTHdP*IC^Kg;CcovF~J2_X(gf(6I}2ey;&1uaxcYqxN<=?mJ1Th zvwD}q_j=yL0Ztawfv1g>S)l%NTJFilf(q~lovz8SWHG9-x&MIv1f47k6u*^MX3`_Sp9~8d7byL3rBaVfrGKHN zy=*K{`p3_eV}a5a>71EA7H2-joK8%0I=8nq5J&D~H3IYc6@A83g(=L47fNea6(*?u zg0h4xlbP8c(;Qb}g8H{s;?xVn+X5dbazNYM`xH1#ZEe3~5+75>0d3P<5{{$0OIusMZcx?Yi1fc^cj)Wn zfxhK;YT$v!Rq2*rJ|5^?W}Sw3pmo7N6?x1$orAn9*DuBc&C~UgipROGLcVTDpPKO+ zkB>R&&BAbT5})0}pgB9IeP>>Yj771|I01{+bkzdr!sYdv^D_ zd~zG&3A8T9E1I|KawiXTAJtd`4|E^cK#?aK`UTy)3#`k%_KWdA_qJU$#Dm)1)TX=C zIJ0=5dyNrN@z8qO)P2$){(0={a@+Vgd7%4zy>S3PbHxMQo1~PAr<1jI%Z@by^88fL7#%{RY80*(Yn|aE7AFTZA5Aq-jstoR z`du*&%<-;C6gaYBJTSj|JXhp^#_i)MazNka*@QW0O>JuX8J&s8(#2kPZS&d50d2>= z?>i1I4ru!^eW_184yFsV?L`j`)!OHNuCFKcM8`K4>bQEs}#=y zYd>b{{zW(cJU-^6lLxv_p!wu=e7Ja^dko#5$n!w=p)C~qW$NCSK4%~o595LEo#|Xe zo(HAMA<^X{ z2=Z@Vt|KDIzox4WBFO(WT~R1>4f4;R^_v0_zoW?z0hF z691m|L0%uq`kKj3BFy9i$E;{1iC8s&)F(o85JB#}biA(+5hULi zd^#}}B61&z+T{P?u>Y?4nxI5v*6@>kbPz%QtrK++LH_-@bPz%Q&G&T>LH-q+br3=R z1(IXr-f``8)$r=5;qg<_PBN$e6W<0+=wRJR6K=$}J&%^Af%LC4S>q#31L6PJp-uzY ze{HQ!1JSS3;|b|bLGsf{WukE(t=I%lKF2=|rF*;JP8tXvC#yOQ1pkj7K6o7)o@gNW z)4J->K=8YC-5?ze1i#!tJsJpp^(z^g4z4BKR@V%#!e)4?Z!DU3e7|wAi|wR<*zxIz zQi=jXzoCVs6a_?n{GVD!An;#w-zF6W#J#5&1%$mpub?=i_|j9Uf%Iqd?A zBB%dXEp}m@G!Qn7rkXSj#7&e#J(^G>elR`Gm5v4?KX0fW4TOF`58R}qf!IIN`%D3v zd7gr&r&;Wk#=Co+G!Q&7&17jBr05rv9HnU>cxV>?O7V0Gg8xH_K{^@;{-}+5G!XnY z&2E8c=>16~&hJr+TLgGN;2az4xSV)$f(MG%r+Zkh?+3VeFvqLVy`el0G%tC`8fAH& z5F>s*dc!Ku6KurKGD$-`K}P(fd8K&h{fVjj`~Cd$IG4GW?!;s2UU!QI9_U^fG-mDh zI?i12K=%?&H1I(8JhU&Cj|aMEptY_%4|LC*QL10`1|`z^sbsnF%D>5l#ci0;(*Sj+bhNajq@K=1CX1^UUwQY6J zcN{+QFL9XKHp;EY0d1>MZi&?wt~j7=nd6FaK-*k}6*-`7!CZeo=0?kk{sKBI(@!ALx`l*N zTw{e%X^lj$D+roXZj44!BjK8sLJ=}l5+mVgdf&y{R$?Q;5HY=2TZxQ> z_0kJRn9xX=y__&Afsx=vJ7H9OBf)o{3ZZhLU-66tOZNz);u;AWJrzd9F%m@4{+UNF z7GZ>uAUC~H<3(XD@#=(7gpp_q73BPQVHE1cvU#erZIP-juw*A_7>DsfiJJmaZNcF=jiURW*rCGL*x+()qYb4Gy zOca<0Ev7LN8?^S|>>~pr?SFqqrbT4_RZC@nNd6-!-3f$>{NJpGOp7Q1`Dvjj&|)$p zQAje;7IR}%Q3$Tk1v_Vp42W8=E>xz)#74rg31on%2=!;l08tcj(0#i=A5j-x-jZn% zrQv*BQD82NDyqZsGBQ9Eh+(vN;Orx75jCO-?I{F+C=+>TaVr2srFcVM#o+`-B2*NM zi^7&;K-7!nCq+k<0Z}rB&XWP6YP6dm14QBIL~p|U4$e4N?O5xo9c5YVSXIPoF`f6? z(V^EswjKy_LQy?N(2*54G_O&X)sLpMjue8Tg0MPZ9`bTS^PpAKkf614P*jmio#ddX zBWox*3k8fyGU_k6R#8iu)4EEiRaBD#^hq@#DC$WlO>S;zZis`5a+Rzv1Vv3*KTdp@ zSgWWiXc>)R<

&DJIi01Ecy}ekcn@{aH&@PcUp$pfMlIwu~CoqJb3MP>Oo3$!*uPhj~>goVlFlyC$y0P*qmG=Q-VWU=!qkX<0 z7`5tytFmFERu$eU3r4Mq2;gn+eKu>=4LaNs=(Aa?Hhn1*HUpzpO>!uj9PV; z&LIT6ZPu#GQr>1@)T%YDWWr`()T&VhWWlIaAJ*XqJ8$l>uUbdvl^(Z8b*og0s#k|C z9Ae-1kNv=Ek_Tnp_qU*LzVM<@mI}=lYRi4!pE#UOetFt5@B6R45pD|=Xag;TdD}wi z89!OLE!3P=^lp&1EfkxgRfO9@m5G~Q2xZ>)-^wbC!X9Xg&l^*(K4fj7piGJsZVQ#9 z34N=DXAD_eC?R<-3b%#Y5%fYBg`#meyAaB}?_W!Aka_yWP$&~)J`!#V^`T|BFbajC z&^lohZgF1xEQ|^>5`?uALa|o@*soj_^C3ea`LF#&xGiM;(c6SkNcY{p=Rgg1?Marq z&d-Xo^Ro?f1eU^ie%7FVvl>-dwhVHDuooG3*=mXdl-DTE&dXZTRW>IGN&f#>yYhe= zi!c0scG*AszGmOItl9T{NeCa=WzCY(LQ0Y)B2pq{DMd<(pPeWbEn2jQ5=s(LDdqRf zz0-T{T<^Vi&dmJ%ea~~w%sKCKX3os}-s*Wcmde8t`eW($aY5K?d~+`s1ciV7W-bT< z|L6@|5cIv@aV`k*UUL^01a-fGmP#6NDu*dYpX)fS;59+e_RZ9~&;&u+XVVr(69i?y zl^TGWAP9SK0)Pey=4R9z1 zH&MmXi@TUlRD&$fGwqcF4&~tc#RfRkgDr~;a3~1#mKoqs5qg(1z@a2mZEJu-O-MJ^ z0EeRR^-?CTEaHHw@EY}L_2MpYC=06^8rVZ!n0ej+hr-aGw%7W8qB7K?)rvk2r6D7= zDD-e;FfXVL$GG_^a3~J1_hHV9z@a+Kq+Y7NpC}LG+G*JvAF2nh?MJ{5t=@EC1_6Co;i)kRH+ct=zRfof2Do+Af>;KEmvH?Y(Uzmk} zmf!Xw3jr0cc94aDey69yQJs-PxsMfK*??vr&aaIqi`j)*Z=$C?kyio%ot`y@WdjO5 z;3^9NZLYnFg@7t&>Zy$=gKZGPFtp#62P1r9OLd zcdBE#NO#qD>QoSCUX`=((@fFQtiQgJ}C&z|K~E(kLG{V*;FO1z~! z7X&e$8`0}bJbPkH(BnbW3ecE60fHpgdztGVR5?4{D$uYb`|PJ?aV!ZCw0YZ1E(r3x z;BhVp3f;F57X*>6L8V-CBB0Y52XZYzs{cI01wpO9ox=giKKmOxxFBfuW8ZN>knMpa z^cu4#+=Fu0$;Y(>@y<+J4Gl}O&whfHd$P~Ixf91d0fLG@R*VaRkPlm#7^F7L3wg=6 zEcZcqt{0a7S}ZCIYCiX79!&Px2hvVP%bI{e(d*vJvnKoO*+^-%tO*!Y{UoKXHVnGH zl@9Z?VNmwRX_KuDgSHQ8$)6IadjmSj(zXVDPtMM>Cj0DX((qx>_#IThwVV{zpz;fE z;9G;v_oLdSZ4F9alQvY^Flc=V5>sthCNIkk-|)uE8JYYQGs_SwH@=A`Vizez96X@y(Bp#QU}gQg8b1sFj~mN>BKq4hj{ zCnNb*+P&tZ8gTtPXDSd?VBR)=E;WJjI~jv}m;g};>ixw9%I{=kUuyzHH8|DZ1c-XD zt%L~>6=D7_CP37Lp|r))42PH;RE36Ax)?yzg`9J_uFCIZ{P>^=5Vc|FR1+Yo!y;O| zX^v62iu%x>UOr<0Q6btdF>w_&qINA4AgV-WI(%oiiaPP#!(5Bva~EjlytUZ`h*~lC zOA{cf#gL&UK-7!+w1&}~95F^zjBIF71UF zKva%9sNlo}s-H^2{-NiJ*seytp`!v(JLc0RnCmPC17+J*EB5KFbpG<(L9Sxo^0it%~pl!P57-j7^#m-gvjis#zxvt9EG4E*; zAZo{`INOIv>6jRGew*BzR>%=2K@so5s0b8|1%W5Nfe3kiJB*e@iEIc2%0$C!{yZc? z$n)ErfoCp(UI_#W!;gbmHc%0ElFaIOg;KEaHgS;+ zDUl6%e*5zy+T)Ptw|i(ct78KKKkXlu4eSb*P$yl-1|)r8;7MuVT#0c&%j?!-c?A*A z)}MuddjBv>8zIkc|Mx!@0&D&G-?9)8>fs|<2q^T;foH3Mvn$2{X-DT`Gey;C{>*?bHwe^hvhUaxj5Q0swVgKxYOT0Ep{YYW@WRCWW+h zbx2!l2yN|4yP;&#*2s63R0z~{(HeiA5<)6?HH5z2(S!$q!Z!XvA5tDxps^8awSrbhz=~JP4FH_j-Lu8H@#5{1YQ#1O#flvpUZT^mrk4 zITB8?umVLMM~mgye6c{HH9YkyWe*#Cuy?s4v~t1C@-d_Mq*3_uclDtk#r?M z4+B|lO8tq%nDVd#UCx)0F&=?|FrTARsb>euymva&4y5@hy6&WB2iiR5My4HzbBnZ0 z41U|?+7C=j4liqg^9)Qm%r*4+SCaI^b09Df==~%FdKf75Ge0tYgG6unm5G5yH>4Xf zdcHxVbJEoSJq%R($5Ra7%3|Ii)4TFAG0^EJ>oYMB>XElHF;MEJbQqI(4#apM)p<)X z?34ipT78z2I|2i--nEu#2Wq{DRyT>qBkVw~@1$xK3v+Ksuj%)>l2iWheao%>GIqqf zJt_){y@aw<2PMDPHJUbBx+ti2vm-1I<@dVs)1?O;Tk?Bd=j*X;LA&?SMqbwz)O#uQ z#B@>6@3GIaJ%oanyTC?ah24^Bxvqy$@j}#^(Lsr?VL`_)_F;J_zt{CnZ#D{A{_O2+ z6x95G!`Ue4d82G>6cjx--Ad7&LumTh6qYUdy{_Glvr*9X#hchDDEp`n*(hjxGdfDt znGG>6sC&L;EL-w>UFUciCBN6Tr#RadH2z8Y4usCQge|E2gb5n9)GHuZmk*#FWMs)@3PItMT%fp+fm-m}7bZX~?BD&&1c-{T{Fn(4 zMPVClb2O7&j1hIA@na?yQ5y1WGy$SI{QMOcDDRH!&1?chjd+UIdzzCYETT+|p`$zo z5S60E5EEBXEDF$pv8F|NcjOm#%E`MUd#OAzETU>G8Od`XgJY9k*Zh zfdzu)@xOPMRA@mj+tUmAQ2F~@<0_j$Q9)XM%!kU~=PESS42mjpv5pxOb>skPto95G zhfzuHr^_lVC~C>F{bmlMnv8$O42pWv=3X->DoRm$ytT(Feq;+ZXj z7FJPLp6hQ0MP=DB)C`K+(v(hlSYt(X$ydY7D(cHGefd!FOIc_v?W0p8+ zW>!&U#vU?*qRzBDV+KW~Dfl!WDu18rSA#^9zt8n{Q8TNkH&0R7&<>8Uii&gBow`=l zpV&gp88pR*7E)`@_3uilU{sxDwAN*S_m|8|%I!`A=49nsNQ&5@q=Q6P@ zf1m3aS`acVqdMK$-PAJbQ)}8IFfF4(6{cnu11x{@t1*2A#Nbq0; zCyA_8e~vJ*EPtQt5FG_ECQR0<7si@eMy=X?mnj&vss-IgVY-c4Rj`?Tx6rL`8iCz2ptcFE$n($k;3SrP(t?5djz^D)Q*<4gw;i%XcT&n zJsYSN7pX6=V@sa*zgwDROQ6taTi%w9LSdLVgpEQ)Xg8IOLMbRltxKKRkmvoEco`+n z``>+&<)J`9^`D)|MnUfHr6p_ZajBmGh0YJBd#kzB{u#Y{rh*{z&#m3_7=^k{wH8n*vL6G$# zt+?(%)h~_XfaG^W53b~bpy?}UeWo!cVF{8x={v3^D0+LkA*pEzf?j+B$CCVR=G!W=tAds zTGr%uLnm+JSrag5ddL1eSopi4*-DYtYB?!j(Df^n@!Bvb`(Y~F+AwJQs_y);LEWd6 z=fj}yU1*)4?IaYw^d+7(@c~err(Zq8he72(rV_5@r2KuZwZHJKLFwP71+um^Xnh+x zqSb~$?ThZ_I|;qNe3l23-wl1g5FZB3U)h)sgX&N2!-qllJ5uXWYbM3mp!_Av@vO=3 zhW^1!F8STi_g~>#!)pGy`}r_bfCqq*t)Dnk@<8Z}fqtJWd1R6g%d6G^<=r|JhaxbA zj?VOPr~)snVA>PE>xG5=J@pK5r~{p-g{bGJ_#LXeUbY)&Gwlf+D#71$0axE1O2J3z z4D6v6tp3;lhhlIqZOQcgL^bG2B}*TNa!{78Z0O;{?|Pvg{5ym>K7m6)I5Nlphn4?J zw9eD_6D8rmcmsQ=3GL{DsJ=ZEh2o72?4c_BL2sMr*^|G|^}$l6J%K}Acz%Zg4u#=v z+P~`iiOSH4mL2*yl!j7kn10IN=eokpGx_^mhbkD@Lv>h5J08993416HQ>m)N!>tI_ zhi!Q8x{}uh+p_FwD9_5Ozuy?XFZJIkN=i!8wu2|6q>LRoQ9KDrDp5Kw+n;;<0UHuk zghy$;Z09<;AWGw9J3(dL!lTq)w$C1M;ZZ6t+xv@M5G8rpRu?rR8XXn>Y`+hPPc(7# zkTD|%4IUI7ew7H}KTDgBz{yTbB*XtL9d}p|A^b z1mXUEhAjy~{Y4pD68z@k)&6{v#nB;7PYc%TLL5unRT6~w?*p+tNlYR`{42FJ3_KAe z2=PhU^%{~O#2|d31L4?pBK4eDpY;^cW|JU)#9(AdWXsOR8Trj5bvX^+v8B=-Lv+Z}vn1p`GFiXa1(b?0mO?*aLx5vmHVwBlajXEWPv1T$Ndv0vt z)JJ4b{(GDAIrt4kv*AyaOxwnCsjd9#_$)|c89d5qtKoX&-sj%V(%YfWDc=a z@;VCLX+qb#ZP}1DwI~m5*^n_M>7Jl18?vR`9nR(rnNoO^Gd5&Nj=L?`)`j#XB11YA zh|NaJfEXD#~;_1!PDv^)dKt zQ==t^OA?GJYY7hWQX+9Y^- z5{G9>NQw4@l&F(XqQz}}FDq;Br2GU@>5xFPeso8Nb>S}tEl7qbBRiK5aWQD)Qq25UMb%G4~e=Z@~1NMG}Gwnq7yHXI1}mCm^Gzb+YF~|h7(F8P@i9I=WYL$s{C}PzbG}x$hbyT9LbV!taRpWcdp+lm)`GPw-B+9zS z+|bEHSzf{&9TH{P8h3O^lm!=U=*EZ2(hlsFs(S5&qI^iJ*76B{?t4{PV~Y}%5GoxK;)Aj7=#UhjZE{D4#5maA9UYQmCtU`&nmf7Hcs-&Q zU=s?t)>us^@CM$6kCG!2<@<{9=PrKUsdPw`FDkjCL!w0dJbK_@Io=^r-iv4&1%-k` zheX*$y<{uzWTI@E;b!h+qO7IsLI&Qw8Y(Y)p(xe7&RA>Y$o-`6U6r6kY4F_nO%#+thM=u9iSbWHrf2gHn?;o_~w{(vDllgLrUK6nMPw*jMPJZst zKjh0-^oFd7e-lIbvNNPF?S;OSSmFCuL2obP=ZeaOT)E*xM_g&W9GQDL;zEvGrMDC9 z#t1#RG~Iz;RlW8iH%@=#hzogfwvaK`>gb#(l{+?9f_kHJAt&s|toUAuUWb%Pz?a~8Rr!!F zg=oVRIDafYVj(Z(}H5#CdV` z&|SD3z4G|u4ZM0o@d&+D81N#IN{N)|n9(gIvZiT&x0Fbn=AGP8Rtw#Q%TeJwx0I>8 z9F^X1OPS>5D16kGGW^+~h$OmoaePWDQn8Xqr9=|7pXHVkN!0j+TS_ERtuyW@WfGOA zW7EI~k~5cr5=m6{q+2hML`AK`b5}@-L~E{k=)5?`5IWtt?jlU$L928~pxbizgSMta z@^pH~9UT&<(Fk{Rs9HA-bwgJzbY7gJZUJ|6NS5MM5DY@`VsxSmrOUk6(IqZdDjgD~ z5AE!&>5wR08pWPFu6IZjan%0b(S3BEfJ=u&sp-f&nJD#Wr(-pDf)0sNWr)pS)f$2x z%kYPMy1=V9iB(0VM6&eA=av!))32IaN+eC!h_V!a)2LUEkvOehbmyfkOAYF|r9=W% zqeEfCh^wnvB+(tI<9nI-Ayg@mL@Bgq4)l^si6m+pv8W4*IF}Mh)SYUI6EBfOZOXcv zOPNFsx45N55>=mTOSw5zvJPNvQ^V^ny1mP$`II%{pC&~=kbc)!iMpHl#433ZZDQPTB9#Wv*}5rb_&wK%TzV!c{(Q!+YJM)MoXDle zNs$8mWOE{y9-?MY;J1QUY<+;iZ`H(M*=#_x$@{jEw@?};rhyEd7Ce!x8-aq8a{c9ci zhkO}NOL7zc!e0f7s8#F7`2N)hT9#^PjLM0eSvu7*C-P<`EuY=^iQHLCZ$`V}ME*SX zp5w72ho;g^6E}V$k0$(J?PvH44H3Dtflhh?{nU8ARZis63v?^X4JUGGE`6=W4JUGG z37x>Z;Y2RYp=+dWIFU=!j>L~UJ4VQ*Nz@cF^^@vO&G5SCyTiVpfwy_0{fxRWtnwj` zJ|FCm4>|On_U*&)elf0p$e&#e9r}md*|g9RpICmBi4CV{>gk< z^QM)5<3jx&al5Z3ZkVCpbAk5g*WGeTFn1~!a^;6kzHfG1$djYAU$Nstj=b}-BfpRz zhvd}|=2{t@6Bp?f+v~=dpx&rl$caW~jP@)6y>wVyxa$=h@mY{w_-$`;xSnDB*Pf{)0T%d_LpUxH^} zxoSxEyehnd<-&PAOgOR`j6at2x+Sdt+YE+_AkL!lXBSUHwrx$E2$&ebkzi>57q(-WLT#+F)GSX?hn>dYojy~+2m0_}*pr=)}qQc{So{qUi~CMZ9C zN?f(Ug^U?H+7TC$WfFDB?fgQXw4?oi9T!rh2}!*j7qX)kz2RZUg~X^H(dA{v@%HGP zxc}3*aijuteNGUU%!x6X9dRKiCeYp1p!w4G3pvrYlq0{86O9fz;zCZ`Jkk*ta-wD` zW3JwzoLCdmiPmB-GmTEb%7phazDC5GcjBq-wH6vN?r+~Ga~|ZvU9{#k=RqEfPjTQ6 z-o5`Otpgr>i($nw2Run$#vke#@jM(I_c?oF^2DnR#3SQAy{`iv#Qko{>FBuY`-8Z5 znc~17Jo~L_BWLapp8JmU0-QMyp84kV1uhQHfan@9ua!SP6=q5m#z2_kE;C=JO@Sh+Jq&od!ED&h%9K?=%52@g@n}DDvOb~ISQ}D}HexriYpG9GR%18er-e#{wAf4s z;)X;w;V0Ii7VGY^^+*sQ9oCd}L4;IzuA~bhq`{MqxgbIc%%SO+{vgKv5};6Vnw8`0@Co*x8%{|O_1!b`k}xTnb)mxro_8h7H6asP|% z#sw-KjNAak&Uiasuj1iB}eZ=_M#}0T9=gV(6 z;6bd<9(2Hic%RthfCn-Ee76zLjOe&$DjJt3*7HO>;X0i&DdGexAntlRi2I*UI`9W^ z|AU$pL3ya>58{6AMhE^N?%!u~z=OCSjaUyc=9zjh_0aG_oo*7*UW&V3Y#O>f6tyrH zO~JY#!UKQPQ~topniHkYYI0$huYTy}?AjE}w*bWCFP9Df739q$9>6Ieh;s>9kY`|I|rqZcIc#>~E zVoM~d0YY8>Hy1<*ceT%45FzAc&blB%*b9H^f(W6{Nu3E+)WS>di14>+9M_|m4=54I z8s3oJ3<>f`lL+Cj`KAkx5dInwhgJcZt?3cMU#_JKj}ZQ%Rb3Dv{JA3*Oq`*G?`E#X z(_i0fC2IIRbb?bl?!vomi*EFb^ePRW{jrqSMl|){4MIQU9e;j|X%P56KiktF?A__8 z%-90*4DD)+Xk@?#(`zHfGzj=@ z3*ydA!a)gd5b&%A}K^_0HIfV@B@a1+C2x2=E>B)EW&+O7nom_TC`S zx6&0YV{Z`bj&!}1K{Gu%;8O?39aM}5K&3&zCp~CSgMg3v(~d?4eB?xX8a&mvQ%5*x ziWAO_@CE_z8L=`5c&4;y5b(|zll-_5!y8&AH-<}u_GsrhuI{R z1_6J0HF=X@(H@fq0blu~Jq@1lCHh;4KySoO1OcDB!Oj~&gMdGzzlDhD4FW#xjEOhl zo`{fi8Spn~!xt3rNJ)?IMx{Z(BYvB}m<9oVu72D>X?cTyFYaqcBLhCCyFCp8{@4|J z8U%d$C=(i5DmTVr1W))|bYd92o{2HNDg}bQzJl+G5d{MMLP0wU1bO+7b`*G`m+I|2 zA`+kGqCTb*!F{}@xfg;0ft{hZ^N8?*mdY}yTW*i(MGVPSDG<~R_t{Y(sH-SLqw_+` z3k3CPy=_Cxxj;}C9XB6{4C?$Db`%KeBYHcJh=I^DSxWQPZ+$OM2kdDO@U_kCX%O&d^>-qHQ!E3%u#cTLGT^iHw-6ED(5>+%;alSu zpZC29x~&@-aP+pLE~kiFZGs08KeN#R4?=!yp#vVo{0O}sWIiqg{h+NsjWItc(z{kz zj!W?1Ip0oQ7|yu*h930);)ONX1v;(?;)zuD$hfOKi2G?;pqld_?%&Wkf;kW3{z?0| z^Q<>6#Qj|t{vhrLzO?j5#(gJg7-xRy#&;9k_(t3>Q422m7Hq7!Q8^IvpQuQfaUkBu zY4vT!fmna_y+5~R9EkI0wD~mSK#bqF8B1ft1<&=KGVHP3A9|+6t#KLK%k)*h=&{7o zag_tHJv%$5A9@^!?TIm#9Ek1b^a{G!SPo-8+RnAK~ssgkJxC z)#nJhWgF|+Re2EK^K?tYoCh&JcY_0e5a;jCIp9I8kJ5E8^Kl{Ghiv_kdcBW2E*yX8 zwzpiL{dsHL`HA&RsyvAMFSK|K@<)#casPpyDRUmg{bXwg<3ilOq&}OuKZyH>w*JVt z?~hmyCGMsB>*?@$#VTldQ?DiJbgqiD^S&`|`?~Wj)C5;q(s{W`X7p#-lqIc~D}M%S zmNZ_jY;>$-;!6|EOKLAy+D2Bs2o^;9Dz)QHeL>B>@{NHnJwsJHBGMHeiOWLD9vNws z1(7aJ`>&u8>98Qud0(~m1(D7|Z_%0fA|svpw3RR6NaspL-8@rY5b3}B8~D;W6zS(e z&vaAqOjo8BY4Mo1Od_MKG9b#OXmw`7fCv|$(=`(YL^lu3g$V?BvW4je zl_?9Nn=_(18O)1R;f(aOnfM}D5anx|4Px|Qbfl}(&N9fC;H+0!5b3h(gil&$g1h_q=n-K+q+qAPE zTq6nuw$2DH1ziJ?LA_&YOo|witx_PU1D>{{Ku~X`42_3~-3_s0@}*g&L0|`8_B}DCL2w7=v!_9Tdr@P| z$Qz-lcyc?Hv-3tubBkj3GzfP6dL}e<1w;mXG;KeE<|f87q|zYZgCfdS!2Uyv1_8e< zVx3}2gMjxQ5O;18dLskg`9XUcJlQSpwx>bB8$4k`6aFQeg{Z}?y;@!&FW(S4S#KP;M{pEo@7)lD#~~>D*Hjl*7xjg<1s~ zpq7~O>|UM)RO2*3*}Ob=yvfN3VJWMZr}+*pD2tb;@IEdmvzPmKI%(4wQwzA4$;*A1 zT8f&W8@$|Wf91F*EM@d^PrJeeW$<#}mY)M^4om61+%>5@YK%#M(s{YF)aF`3l{i(4 z3qnoUS(5{52KP|%m$2LuAn5(L23$)}_c3$l&X8Bp@=*gO z3>-Fm^w6-Jt2oGc<3+xQ`Zy?he!8lxkAtXRq%NF34!Zu{|CoNZ^6Cg_e_@LO4(fg% zbuRV%6!swSJxuIDWpw1~`cQ*7HnUOUw)OeqmYz93+1f>5pC< zggvN!%VGxhApAv38{nY*f6_UEo}Vq?C*=R5o0;|m4i(^~t_C<1f$6sy;7|wp(8@q> ze9hq}N1NFRr4kaY>uPhk%Q;q+ny9BRU@Cz-fru!pkn6xUCILuGi9)(U#_B5)`U zcRs7-=g3ffcq&vM>WTU=?2?bmA6Op-jk#+y{v}C8qCzyGJ^%-a8c{I4kxf*IOL>ft zs1qNQWFuR7^+csuUEBzXTJb;;BP6OtPcxgS7uCud*+j*-fmT1d@f0(Jn(=)LBb%ri z+i2UPYqKS6qHZi2#kMJss2roG86i#j zBx=aItwu;xk%#DjjpHrqNZ;?-Hk-p+RFc}i86iF=OQG?GSX7tG)Xd}gjQVnjZl~#ETX_vcg;_(55*`*cX6h`1@d~@B zGQCXgqRv!bXlNIeCesQ-ENab3x*f@zD^#29q)z(SmY6Hln9f#v$|2yx&KV4ssntx%4MLjC~ogo(W==TeTSk$A#w9W{MQSf+WJz9I!z^=fe z9z`^hd3I5cdYjrsJ*t`7&}Y=6EZGdPs7I%Y8Dde7c2+mQ%6hb9vxhebVlgnBYOEb3AJ9R~9( z>rq|mA?e!{Sk$BBuMF*`^>SxAV2DLMI!lKeJfBgI_QhYD1?PsWN6SpcE9=pu6WZe) z5ZWI_oUxC(*~hAfIUSlc>E5xuSdAp9AZ(0Uw&H?Nfr@tFf>3(??9T!CqO6~cfQB9glf_5V=f3~q7*H+G^Sj*hx+gjt+O;h@{IknSjXjo6Cps@p}a~v zBTY*v1&?OoOoXt68gP3h4oIG{*N=6jCU8vhjJ-PbmNdpBK#==fZMp72=g+m_f*|nw zX|bc>o;+h;&T>zHu*I0%Ik9{FLR$VrsMl3f>@7yqnRia5*VVbxt*u)K%}!D=kn&b* zeM5Q}D0#8hm>3B8mEBAXbo^slKP2|8l~+^ru-4IevmOR2{?NM&--I2Acwep^Xn5^U zn06rH*}i3Bpx|dnPW0vg0=_2|!%j=g0eVqS=VM|Z-xDe_F;MRgHJBKPcj>xJ47B@N zGbRQ-rY~q!lX$LMVBVnI8wWG&2n>XK)@UXMx;=0%6N4U7um3SH&EXr?>J4ark$4US z2BMvN9n%gp`{FAC{(ZK2m==;S)L zqSqaoG;h_lNoP@Ql2jBFyA7R%=%S$6CBz}qpfufES8J(@f@=R=l#POJAE9Sl$5tz^ zwovX@%Cl?<6tsIr88!;)ox-;T{jO7uZ3_yXgBC)%vjGkNsUzDKRD5rL7OExY5IX+M zBsL04K5-Ts1ugG5pN)c=m!VrTI&&z-l^NMVn-X0V6#eK1mWM513!45Ko$2V<5-6zp zW3-CUMM2l6QZGms1!W)fGizMU;UToWA@xFaYzY+9J#R*~E$I8N8Q3T&{JZH{sAljG z8o!d~p+G_9r%;DB_G}Ce>HL!+oxe%w{I~(Whj|p85533m-^C^s2BB{|f)9hzmz=U{L!<9^+es-1j%I2EDIK&zZK95d3749c>sC z|19mQv|*6^ee@=P7OW*^5}Ln^`kdM@i2kHscmWXBp!%II^I?$vvVZeo(EU_&msfji z5dN=n@~pMM*r5EIXa%BWO~4@iYnZ7lV9@?KRryXr{D*Yr!J5Mw)W1=GJ`D1oj~cLA zQzEQE|1XT>TSEajcn1&G4AxKqo@Y8KU?>6i(g9GMv4y_?@OY>OR1-DePCDn#?tiH% z;ypGM0QGNo*dLV!0J>j_P9HP?Q2c++a4bOUKclBOfrVCHHKFnwe$%xe0MPfDzj6Rj z_5myl(DZtA7e!+XQ1e{0-O>O+$Is>Ei~$P1znm_hC1wfQy}S_z0M(w{jRSyQcj?Um zK&i|3=K!G5scCVOU|PkXq0V1V*0sjMo7P~o9( z=vrtFm!Q9m-{t_Iy!q*%BY{i80yOvHM;r@K+xI@y1vG;N=_C7jy2^>TJ z(E2)}?>(WI?@~@*Th;f(XWD`6=I+nLKzV-|$HYK<574$JaURIN_ll<&b_50zeD^C%3{<%5 zCME_#TwyB{11(NNTL`^5fE-^Q&9nnW{)Utwu^rj>-h7H-M_{1K^UgCdkmljEvrp`s z?0Yw*?x-FH0$s2o!#80E8hxn(69bw4pgaR3``)WLzRAA#1Jx7SQ9IaJ3s0QtV{%`o z*Z;LcR1_4uKBpuTA5DK1W7u!Q<_<357=-PsczwNN*eGcFxAgq$#7p+Q-)hOSB~Z}y`Lr(5wFPA#F@|MJ_Pv`uz(zsc z3(*!r$3tNY`hJ;OLAod?{GnMa4`ttb4bMZ__nx{y! z^qtNb;Gp$o|1iKo?NiYfU*AvYeMG}h4=2C*uqg-APl3aFeNI*b98`aZi9P6kBf8(A zKRzgbK2j2W9JK#JEranv{SS((P2sa`J$v$-56|~Cz@Y-%JI(-y8gL7>F!cRI6=>GO zz#i&AWxD>UZx59q{Y(RUs0GKLWa7lPBG7o*LTed4KLrl;V8QDK_D~V-_=JfQ-)=xR zra7%L^!yBe^C5R(>T2raP#1oueT+U1mEkZQ(&*vjHy#T1g5B1@_4NRP@4=+&nRnJd>Lw&gKsDVAy zhY_6(aHtPGjvL@mAF5DqMSpy#4;f$GpVs))BT(!g z?xy-q2LTbU52yeXhOXOuLmB{7gbMpP0F;6>pK<`G0pEP53y^1~n~!q}k_Yyu1N z%(T^Ox`P$~$ZxTo8~}9p$_@?yg8T7yU4T3@U8n7mJTrZWmMVd>6#jL$d7=9#RmFXj zh%RO}zc)kg390`NY3)uAT;l(SvQ{eQ4>|GwLs-*QVf;UI_1gxT|KG~1Dz2iuRgdxi zP}TW2G5#N-I)cLwO>NeM!4F9-M9+Tw`4r=YqF$z&oB01BsE5Wf{J|ZRx2RVWpT8yi zft;?Hp~)})A8I;vDdYbkroHIWWnzDzrPbFk_#vg4HZuMnN_vu>;Q0QuzHI(bFcAMyX_VazBR*Z)4D-o^ZoepVOyc`sGU9KL==zPFuFpp&eYI+CH2K2P~Q!E=$)WWM+ z2x#i>n^_2`>bI0tI&%eGJ^YTgjh2{QDC^o|ECjT5+C>%u>e~AX3juwtagBw5!e+_H zLO^3rktQUX-4>WDsO(Og;ECkfHB%MGc8i%j} zy}e5HT?YZh{dA1BSIuApn!8@#D}jLOK5|zA8+}8%yC9^yHH7Zox7$Y~`?{;VN25X@ zygd%`AW+^aM|coOZw5LX&~gmgdxH9C2_db#8bW-xQfpre0`-03to~Sp70B;MrWNRK zi_1JK5a6OoJV>&aHyz!*&>B7@_|IgX6=?8Bbbgu8N=wWbMEE6I{%b*?!qYqIJ0`3^ zhWpULhn5xSaIM>TRv^S#X=$Tn1xkE+yuOtd7z?C$7cEs1S`iRv@l%iUtU!$Kd{rOP z99E#l?RN1XkmFJx@F39Rf8OUoAjqG+rw?g{u|SbGFdP#QNb*eDh{TAC`jbo00;D<&;&uLuh4=+69ldPcp7I+@*7g?7IQ(+>xb5H zK~U_zuW>=p?AmW}K~U{%d$=Iz_L&bkAo)8@yH9dK(C($@xFD$age)A8{DxGA!dws( zymWak2pWEk-XYOQSm7R2{EM<2OY$318(HqjZ%ECmn%I(hw*p!|b*c}_t7y6UK8=cl zocE&J+4?vrdi5s^>_OBs(XoKOJ?Q$$4NQCTPR90E3~*5QCnN4QY0amw2Z0}BVh}_d)LmhaglK~E;U?MFJ^u{OeWORIhX;0qS zD-&@?N$a^4xHNcjA2YCrvT$@36DRLvyvFrY-pP1uftI~HLj9MB-)Wk5+Q;P&oR|OJ z-%*jM5WPv6I7rlp8h;ttM3so>DROM0PMo6iPF}JcbeAsH$tLzJTk!uiR#gR ziV+g^qb}`-bmvx#6BQ(xl#7Ez4LM6^FS>NK}$ZpRwmw z{Mr&aL!Icpq^`~I?=@^_kE=X)!Er)f5=vf|IsxKLe* z_?@QdU43s0Ds|=h_k~m}DoY>gaq_UJEw!lG$-|<$WToeuhedrkP0y)5R{k!=E?U#@ zu&6Q5Q4aI4s4`F8YY-#hGwRHpraq(6w4=KtJfBf(O3^xphefsd=Xt~NqTYP=h5=Uo zPSb|>46&#=Gru*&qUsDV$&K*uV&tj!v!Pv7o}9lMVo`g}r7^&Y-_$_$*-v*+^m9XC zQGb>fH?)fiG`WcZR{kzVmoA1_G`Y%CS5|+#!Y=Ai>b{0{QHj3pV}O;vi}8ku&+>OM z<_^-fJ2tu=Mf`$f&|F2{lUK2*M-kr!=V4KgYSZe3hebWg_Cwt9#`YQY=nSnhcy@6p z{SNIu^s(}Hns(E!kcUM*is&ixu&77lP3@u{wg1Clp2c`kk4mRD#G)QuqrDn$yr@TC zls2#{>(RyrhFH|2S)C2Bs7C{DHN;{c)F8fI1wRX7yr@UHhZ@>NJ^E#$0an(d1Jpy; zkFTspE8?$ng2yYcs7H6dX5h1|M_uXIN#AFIMLnu;*w8NOQJO=BSk$9$J}|(_dbHWZ zXIYQteX4C&{mCVq%gm_ZW7V^!h<7Jc5DHNWJ-C`6RG>O7xt36Ra?s5bO-rab=g)F2 zq1gO%I|n4s*!TA5f>2(bAqmkKldy!kGLdZw1*Ibij;1A4k}~7DW6FUWWDjvcs2xY2 z0kCk_Ko@xfl})L!QsMmQWGS)2d9vl00L7hZ-rG zAnZw2rsB9KEJ6IIl;D8m8GHBYToB~GQX?)1I-l-lE(ikuZ9@)7p0U5ha!;PI&u@{~ zQc7rV5%ImsM;7{^oJy~Y-oH{Ykn;Y^m>4K|-8D=MggnnNCMKVkFL^T)0~tTNmw^%A zxJ2)2-)<%bBED<~69WyO#I*wn??m_Z6US7H2MS*H2-7zRcoM0Jo*n4-m$Y9=jFI1~ z+?0jko4`Q5=TJ{X&kn?U2wjKK!$7+mRc87I>CQ(dM|yUk+!tsilo%twS9!2E!#DZ8 z%I60%G3mYhIbUU9Y1(I-wo) z%ad4p&!qE^+=^aDzVD@?px6Vh`j&K2(Cmh^ztBZNwd<#2+k$TA%FRYWxzEvpLB~V+ zy~_Q?ShnQ%Dwh{#qoCiD`L>|oUCOX+LBq?_zD##EpyH{Uvu#1gzwW_8iLYGZ)r2=j zvQg0Txl`FFsQJ)G*eK|E<2h^;6g~f9HVT@4aTN{ z<6DE?-)&$Gg5Q-cyl6WK#jilRqYZ=Pr&-G%8#Mo$tvs0c!6k_P<`4NWsQ$c@d>CYZ z7&Y>=rbIXi-ETrSRJ36b{sMpVorLmV%E*Js-)Z`QmLOVA%HL^PU7BwV;-80_dRo@x z?=;!Vw{oTfp448xB@bBgw8lKA^774H>VPN7%lFteBcAZ5<09g|gVr=b@tQb#$e59X z1`mpkyUK&OzfmBrKYBcf`?^dHco6sJs8?b>F2sHDjSl=l+!wuP$s_Uyai7!Jh^K4l zd4E3iyf+um`|huOo|69e1k^i+(J@y!5c4nFVZf>j016gA+zOJ z5aXq(EjU_x%|(14FT&=yKRUL1Dfgr2BbJV<9Ej}}x;Jjdf!J=SA9E~vejv81`dM-y zw#%t!Xf_ta_Gvok<#Wi`&L7JjORwlUy^oF;%P4!b_|LA&gZOS=<@;jJgBWk6{=Yd7 z;`}N-!{$7Q^=i^Ja~{O|nNKX|N9y&0UyOLvbrtk_f7-Y_v7RTD2XWsW4WHSW5+ z2iE1|I}DgGaMk&2PZA?NR6?Z3)x0hVks?=UmFd7kq{%P$xbzUIax#xgLZr)4I+C^=vMe@- zNi>ZJpNdYKG}QVDB1|X*Dk0M5ulwW9A(IekbAg(^4m?EKoImB#L!`|QB%}^JMB04$ zn=21x+I$#44-*DuX((+rVvW+?>nheLRj2qKR^#l5MhzbwKC@66(|P%C+~t%pt(U*R zdS{I7FzPg3@)6n;*^XK;ruGVpt2px416?qt^73b-qT+%v$;+RP(%X!2Omy~C$sBjo zG50Zk_Na`=o>KH?YLJgyMr2R^M$UYc+4BjV&e-}Wv#05Nr#>Qka+Gn(i0sKgJ3_Nj zhrjmrN=SP;3hk**HwP=}?vMP;QQ43=WqSJ~wq--s6l(2^Esd8y?-x$k+IbzJHOZfq zbjFs-E8JqKGd5&NrW-BT7KHRAB13MbyB*Onjalma3{lyTAvG??^o`4g3@JzFQ#QVd zm?A@p(0b684H@zgbslWlkRh#-oXndTDKaE8ZR7a9jgGEBjj1(Pjk85A+BeJK&=D737^%2=qk(A5UN0~jvNxfV!B754B=D1))_GBa7F=M29(g8|_ z^~;?t{m4{Gym#t0aKOmHbd;pBA#;Wfb;8yjz9DP+)N;m#jOjwxg>6PEd`s=+Z`sG0 zZ&>NppXH1VSyJb53$|hY`bB2Q*kf_oqINS;8KSZwLk80=_Mj-}vdIkT{ectT1RFA> zb1G+S$dKljoUtK8n$eSQJ8#I4dL=A;3%`;3BKANXytYC^#?#KAT>L#yRCcIL$c|xG ze7{VY+QTPkN8bZZn2;5{Mmu3bMs&UEgbCTuitdou%o8%9A?=8HOtV5d5s?Mc%EcUA zRDF-i0+k6_Fo|w_M*F18B(q=yosiftAq!IIu*-%CS@@d;Vb#&n*B zL|2OkU-w5>De+z=Dic&TWWpGlD_b^b!doj?iSqDE?&y#x_ZG9E3xEFYrI4bu6N>UC*yPD(d959apCl?3l4RYt zZm5tTD@M7YLUKHl)eRLAW6=ONR7i?hbjikQ+K~_uzpTfknjKP&h-BDDFDOJ)1s8Zf z8B{7H!!y8eI zGOvXV-QrMr5x3Dgg>R$%MSZZKJ0_GUq4)CwUeF=Nj$lNt{Ibt?&;=vX<$LO|xnM-b ze7@DGk4Tz`yXr1{MBePW=+sA~&i3<`jNyyU5!v%6Jt{$?raefk>>);(J?DDH^^wbn z>^ZUADI>DyvyM&~kv#|F=OcI2$ewrI`iSh=NpBXI#d=^!d&FI%PT|wszjyib7=14# z@ra47BiNujzuxAI4Ow$?r872U%<%@!*pMwpX+dc_Z^)Dn=oz+ULze7$)MBJ_qcWtx z4M{QC5?3Z-lNs`FUuSH{kc)eru^~fF*Ko#$3^}sP85=U>t5=+{Awxc_X2BNzbxNv0 z1v1U^*@EtvB-Rm?5jyfmS7(f(rXf?Vops8HY`H`VU^{BzBQoa9Zl^vXYrd)Plo6Tp z(N0UoC!>j5+B6}{==Fo+Y$ey1nI`t9R zb8NL!Mr6+?m}8gF5$J!R9ejjU7L#bGU}49N{0lRRMhv-nhwcx$6$AKNSpz+-O!13A=WlM%etdO z!nBJxXw}&8#ten<4$0Dn?n@hlf>y0EQSK*A45EwPqeOY9(jieM(3y-i9TG)6YyYht z0_Kk89TFwwBR6z1QF>4_+iI|a4vBJ0KX=|CQJPJ(q3av!`iU!5T|9BWuD~=pS&hDs zlOPN#6_VsGT6I}bAwfpc?HMa7B*(Dl{rR$@f`Z&mCES>*Go~F$(d#XHuLKnmqQWPp zR5PQKVS1;SR0&FpN`+*&JF^=qB*SPrgovJYmRCrIvB_?{LNfGgV^1ZM;kMjvsE`cp zmzYwinh#}|x6G%Ce+w(T>q%5X?jz5v>5vo;7j)+x65}3H32X0=9D_=_@lKYI!Ry`8 zAxSzGv!PR0kw}#J5j`l4rE^rGsB}<{8B^nWm*A0C>5wSn%e$jPqTCy??6Vy#5@p;{ zH{Qua8Jg1_9TKI_78|}5lo~4F?Js|MSP(Z~^y4AMi+p)>r4?WJ z8wBe@`qEzL%kD8gUxfsRA+iYhxuS9*S2ok>rX3gZOQr*3f+gbFMC4d!Z-K z(Y*pYF673OmmJK!@C$h{o0{WXzrtT6h{%aOhhqAbsNSet$ceY;b<}9TcwESdS0*^{ zOUxH?;`y|WxR4XeN;u*|PAuu>hzmI}=Wk=K@V6Qx>dN7HK3A21HC!ScQ~8h=+p{|4 zLvHN*z#$*Dc zgC3{%3a0+vpT7$JKBsl}l$21XB!$M?e)!N~6O`ZJRJo8Ve_nOOg*-Vo+5uPBaLxIS zG{Ag}f(te0(+3^-h1@vkV!ni5$cqEi@8yp1zUZ7tH6-pBspedt6Dk*S;*SR$aUmzp zUU0xAbK>|*c;D?0{ z=1bOqFE%*hLOy&=eN*ljH%I40?)T#Q6))daF66`wiyUzwCoa?F*q|}$`-PmiR?C53 zGAAz435U5~GAGU)aO4+q;+uz!x!wxp#1X6uy5qOrur6rzx6f5EVC@q$_kJT&B|}m) zIO2*dtyi#GJy&FDyn>}^xnViX)Ly{?Q(SqL$}5;{sw=W2uV7~C8nTBOUiwF*Moaog zTo75%ve8csl?s%&>fhSVtak}EQ#M!tVtks&qmPIE}u%4+N;{IpPskQO)3c0q)cs7~i?79JrTDz|ds5mKSdw=Rf~28C+4 zAVLb{sAER7F*^M1Xb~4Z*>MYoN`&w?P8HK51`)zvbC?SvgumP^E{G8R5?5RhA^e43 zbU}ph7p2xCD{88iEr*G;NVbTkx&lOM^sW$hm~kI!l?*R=x;%%%=^nYcTRwk^7*r1*lN@*pdw zjrXU^oCgUpk#_0kJjjI+V=evZ9_m0A?047!4>F(|bu!GxCHz6~JJWlT9G>tJFCy-b zQ70;hhgSY-+*KaL{eget&W|1s;y$6S10KYE7~Oa>8<&iGUwY%yoCk5gjZOm1c@X!m zMU3Vr{OfY>g`W53;(4EYm(No&<{m+fxypfw_?@*uvCe&v7% zF<#Ko0T1Fl<4p%Vi1n26mOMiD@hp#7oI*W*FlXI6|mKYBcf`@OV*FdLVQ`?x}u{>b&&;OC8aUiF(fJ-kpa z?_+vXqHNrCdDGDCA%9`65+U#hXrX68gvkGo)YO6qq2F-SpCSt)#QynfwnVpJvhmO_ zp*PbEJrYEC>KEN<<`H#ZW%xfD8kdNc)luPBi4gv`zjQ%_@NfCr1rfr(exM5?gn#9u zE{G8R=W^N-$?z|xtJ6Vw6fowX-Vr-Gnw}uUuh9)@Lr)Omm$${7SWQn5;x$ys4Lp${ewvz`+9csy zdEz`yiug&o1y?p;?G{rGR3e1=@NfRSSP&u9U#xUNgmAx4AKb9;2qE81H)$-05cW4Z z*bYsK_=`);h}K6h;eMctwLwFRnb9f{!vAHSxE^T|A^iJj(QV-o!heXm6&6GY{~o%{ zU_pfNZ{BS?M>72DvYHXS918y-JpH$Ntx&@Y6`-9`>9{NSwk^8R&$6pD>AXT&AMyP$ zrb+7+O4Z4p27&+m89SODm>Y!sC(A(z8U+2=NqcWl&%fGdLK9Z$h=Av#rFKxHNbZ$@ zt279B7FvxO(;(m;HF}I`5b(=orMk_!g3Ovz&(wdk}(Zg#d zp5~t|=0Z>)u*VEh_1T_a`sSyQ&nwIweMidC@l}$0{ zLTexh>TeI4Q^=s6vzQAR)Du^^UW|{f#T98fOUGS_D}hyM5ZJs9Xb{|D)JZe;CXH7p zS1W&BjA>GPg)+3Xqmj~lHDZBbGR0Dwf4$wrn^&R(UMZp$2Y3_X0Z?fW@ZxXB^+t;Z z0nbF55)>&d8U#G|R(o#{@QhpRXk@_u*<()w$-eNZ2~Bv9Bo<#i!>jF)5p$DZwT*i$ zV@FOL53@-s4FW#kB6(v9Ix=b9sB#JS;Dg}Z)aE0%Q5d{L>KE;j# zL2gC8Gb1k$;KuYs8&M#*H4EDf1c5EvluJQNWf{~lM`BXMP&btVK^>ggjsijLKo^Ok zr&G%d1holmw~Z(e)cR}e27;j0qB9aBFA&sn>$zUgGFeLVxO;qxpyf`Cr&XmvV26KV zPlMnNdCi^%0q!);o(4hg9MQ50SeR^d_{Xw+2`r1{QsacN>aLn;jd zKI}$&8U(!GhxRlGc;6ECGzfTCdc`j2VbGcz1iWQQdv6f%x>rnS=r&QW(5>-8>*+ogY0O#Qj) zIaH@VK%MUGwG?&wJl(CmF`y|*^W6CsYJ#gQi2BL3{-{h?dchaO{3vOlDGMTg=wEAJ z5bu4YgQmV_WD;62)FB)01AkrT^ zX3c^~?^$Nef=KVCw~2!CCDH7Pc|oM#JZ|92h|n|rZs?hADxT>J*?gAbF>jefMpcl{j3-e-Op&}$Q_070g?UiB`Y5g)wj1qsPyJIp0 ztFy|0XrA6-%pjxr^*dG!h~~$+tQZi@_fA_eAeskgKN+1-iDpvF1ERT&x<+~oB zB|1d8F>W_E^7&L*5Ye9>@aMvm1<&Y@baTRlMQ9cxdu*hYFMR;^rz*l=0*5| zNFS_k&4Nhpq@BC|i0BmJw$M$I!c}SA8gzKB#>j7Nt2F7n!Z-ft56PG&tyj3f4|X)Y zF{m_N;p`E|2MIR@0fQ1WslCE!Npp<`C1?=tzxvn@3i1A)I|>B4&=YnPc&hWIHK*u}xxf>h<0Cr?1UKCg zI|>B$>J40q@T-s!L9OwAOo|witx_PUzK~QrxH1|RVHA@CN z3IsJnV>=22^`A*x3c3O!rMcF4pCah+JjT+wmX(MJdC@Ia?XeY#=QCCgry~4Fm#hsfN&yY%kfLDy~jTQ|8 zUgChgHwbv~{B|@l;CXa!k|+VExsd_SL}wL2kxDo>;eZ$R9yb}(W1(8S4YjzxR|_wx z6@Rk4Pm@bo?u;KeVC3M5WAWD{6;#kGK7#J3X@F8-seo6!5%pa)LHWJnIk$7%6PEIM z#m+S2g7SLBwo!9R)4e=ivH7*RmU4T=2EV}p^~0EQdBy78#0BN_ie=rw1toh$zi-P0 zN8k={=nb%IFoT^biNs2V=_M74fL|qA?Kygi7)WRas3C>c(p7Qfq=xCGNqH zQTgPf17>l@RxbgXG`- zpaBl5e-rH|_52j}ApDs~B=vF7{^J=9{Dk~(ns0zZ1?ZF20EZ$_g-$f}#@7e)f;y0j z*3tSnl!8x5YxQxc25b8>$0z(mL6|}(+4}ZS6FSloP#=e~aBFb`KhcKmPVFOodngX| zX&)R9wf2kc~|7>WLcBw6hTsRU*$( zBP8m?*%TurD#gw(jF6}m3+aZH?%awQLbVu19Y9@VKOj*r8g^rQE0CxdIleSPqGp^L zVuVE1*h=cp87Jz-Jkn!bWM7OEm19s&wzmR_+7Yn;qncgPm zc#BhkcF!2uL`~^Q2Syy5s45NV3^+dWsZd?n8>%Y}L|s`*?_L&E>Pq<6w*TuAV@OFV z7L{cTX|+B!#cLpHOEWr~@`*w>C{6d))mrkfs4qWXGQ^_7>>g!^MU7cRtzw?f zs4^o+xAn38FwdwnH`B2n4~t5ZbBw`wgi~AG?j-%-TA(Xq0gv1Z@y`WMeRwUjxf(>RG;d#4D9y7Jfr@k zZDfc=1^VJvLo900x*3L8RH6Gf8e&n0CJ!~lq7rra&Jc@Q)SLP)`tcPpLN&ViLj$}0 zfki!fWQRC8hIUbpewky4MLpV^&Jc@w zw0No^7WL?kYX(?Zj~Y)i#G)SM&R~c|J^GQFvik8AKBFG(pdJAai+Z$x)^z$~sdl!=fIYe8H$@UW=* z!e{J7GXBi91l9kzAQuF=|N06S1f731I~N3jpI(g%g0lDM!v!UICF&GS44M_{b^R~Y z>#8aC7RyOkaw@&9&Yf;;-9l`5l2itUeW{Xbxi2f{sg z3Io#z^M+nh9gc4TgC5egG)z0`yds%s&761+gdK?XF*-NX!$7k)KF#zElKs#&CI*Uq z>uDwif?bz-oUz6;C#2WgLwa3X==F1SGL~D>>kdttx9ZxYb8EPfq@tkM6Ic7*>7Y`) z+CsD2l8)%2pxQ;=VB3Oj|5l%kf^r|ᮒpQ+47LA}Sm!b0`K974ZaP%m0%4h0Gd zo_{mj7Bu{qrfd{ce9vJv3Oc^HKMU0tDMnTPUU0|c2=RZzjqoC;9 z|7D|~=?iAFQBd`vY1t^~dWAn&s6L1nx-oTm;}R%ndluSd=;XXWLEVqj?Jr#v^!@c4 z+2ex3&uGa;LF0RkW22z*^=oRN=7n^AM@Z*y5<0(fgOAFi=zQoshW{=$sW1rrT{U>H z6z?XX^zF9rVUYS_Bsf}53Tx2%OVkh5hC%G#ZNhgFYQOA%d>G{Z&U!ppKa34}-;&l2 zT4NJ12!26oFKfe~_~%KxwPBF_eYCyQhC%a}oa0SNUyKc+Kk8mS465JcA3hASpN9^u zw8kc!gzleB&9?^O-$_?9wXH$<7g7&U8wTm`d5P~Nw13zF9;^?h1ma(xIW_@<`e&WX zw+8t?K?`-QDG}D7|C{J6NgIX&@W>~8Cs6@^c(mzpBpV^aZ8|H*rF0V!TJq5B=gb|HLgD1ik5fZ`Ybhy#Gu|9&e60F^)ZAqN0` zU*47jfU=K&R~OI^gNCNJrk-C&=+HX zYLCgJYe4{@*Ue}(t6>33oi7W=0yO$((lHGSQ0Lur0HOhaE^mC0;}R5k(OM1wT0Ek- zE}##l6)N0Ndkg{q{moAADkm7U06=-ar}n1?0Ghj*7GfFzsO_V-a|R8a?K6i1fWqF~ zIX*z`VMAM2kyPgNwN-tuOvON6@1b2oVvOv2cYNP>Ob-K*EqMzQ1D*ZjD<%d~`+j#O z25P(F5EBEzo!Ehak$vwrw6aY+Zvq3^Ez*N&2g-Z#J0=F=dteL`1O0sl5Lsyxk4N^s z$IfG7pu#Qaj7rZAggF0vrX6VUFL{_4$nn>-eAV*}ioEAprX7g#V(MG!*?}(Kv5H|Q z-1jbWGsic9fjZ}Sj%f!1eR4e$1C4(3Z6*dXJuBkOB%vmX@j$8jS@QBdtt-?BUuwxHWr2D4F6?hlT!QPA#{ zJ=rLz_oUBRDB1UJ*NcsUf)_o`MnS_bjbx*s;_uKE0G%j{aY4tI&0^V-eeXMIzprCU zprGX~m$Gd^%?lP}qcVF%Gw|k6cnC#5zk+QGn!c|r8wFKgvYCy7u8*q1Ldm{&W1fey z@13&(+ZNRQR2?=7`o5(T8wG`*J(i7v#-~tgMkjm4Y(V9k(4ICnO8s^Nbbjqo-^0AV z&R_rDm5PJX-`AIkliz&k^1T5LYG0b37d=0PJ?Q;k7Y*z|@eki&fP>~g|APSzsy}%U z6DPm<(4N*1dh;T``A~e6fjwydWm0i{dr<#_GYsrO|Ci@w;^a3U#;-QOp$4=rV}L^y zs6qD;^yWqQi8@e-8XEdIRDuhY4E#hbc$;nu=-WdzSV{*b`Z&~s(c74Qif=HW4bz0{ zr@&#opNsBQ>G>(Y`EZ)n@A^2@g{@y3j1QGz4xP5>+e2*_K$TJ-hw9Lj8kl;x{+LhH zhnGgg9AD%+3D?z!`+s5LWPRv1!2pN)P$u?#2Ky=N!`0~q_D~-_ylQ|$eONWg0EhZ; z_hlwd)`yOF8Q`!pDDkfW4)x&=TCYdvS#TVLpQsP-$JY7SIMjy~vGp?;C+ow+o0#Jh zIMj!fAqF_qhc>ZwDYl=e4@K%4*h77|xWfR4`f#A70S@)yY3ecP#a+xN>cd#-RqEkn zeQ3t@Q`Uz(ZJ6Ve_2EbAQRw@L`minbIx9Ht!XE0wJUZ3Yw}<+0`xb-op*}RfR|_{i z`pk6Q-#(7cCL-QTQUTaFOnpEXAkR#1Ny`DC6qKW>OW=~QfIY&$xi}Ue{vXfb0HFG7 z(sKZi`zbSY0rJeWa|R9o0$+-n-3ew%Sb(x$p+1KO0FwRzNwWq3TE3FBP6L25(^<6H zN&t{&rjy>$oh1Q)Y_}u1(69i#E_#q-0YZI=8Ved0pwRDp&anV#UN)EmfF_TlZf63Q zS5}4 z#viZnA9}i()+~wtA9DIU?bYM|e_!xpEj+oMrvKvqA*Su$XZ%03v^cE|6Z-=xy-e4D z68}Gx^k8p>KM>Mq&oKTUIy!!U=KuG>{GjL2Qr92x|LAq(rygwl@rwUP=lkao41b`Z zJEt-J9}>D?CFB30po2eP{67S=)#Gvh|KX5+ZVBmUb)lc@ulfI%pyy`+jdy=N;AfTcZ=~k5vBA-`0DWCQ@gjZ0~Pw9q%jtyw(OPN?Upr-ee`X;i` z4_-k}yJgn4ArMg1GEcA&(A29`#&x`cs-B?EpAG`L`r*qgub`}}8fqi@V&u@)yGbh( zc_k1~*N&u?Itb`%i9;-}ps;_?*Ftn`Kx5zgie&>TyJ9#C0iB&dcZw2u)dzD0rER6} zl|Vpi3(%Ta#|G5)+*$2$2piDb-L!twu>r+hSdwJ}nme>33jx(_L#w`EuV#dFcWX#@ zYY5$amEIdo_H|czk4A++cpuK-yOj`<;?)q!+l#jOS`bKY<-9yA(B4$bco2y1=XrP# zsP8(eY+7T1{7%iLZ>1k*4Eo!hszO350s;XpN2Ny#lI#`BQitalB>0~fco1mt$JKZc zi16w?`jEaDK2-SstzCJbP1PGc&pecQOk{eVH<=>RBy;9jlFak$y=TrWnM!G%h?Iy# zWU44CA&D}Sk~H~gQc|RTYu~#&-#NE)@7>?t`~SPv{`UUXS>K+{J(Jq=Ah6-KGzpD6 zVxkrJaAk5;B?yf8kE6W6;KciT@gT6`XMf~D;KgGHszdHX_hH7jF#;11xbf9}d9A>X zf1;}xaR*tn0zZCh4zCp$vgdxfQY&!eLG)^j5(Jjq;*ofP{pUZV*5`8}#e82n_PpQX z2f>x^A&XT2$^Ve*PP?g!AUJb%vROqCyg3I|KSdDS`RG%efe?LyKd&K6S8NFmJ^f)$ zOY%RYx=@KyXi5HuR6UxIDuUqCDYW~b2!d1plfvy2y!y~mIrw!V@e-pdjbdJ@9>od z4&I;SxvQx(E}}iy|0Qxg_5Ps(d^lPoP85OXX#Z2aJ=B2-NwPd-H&PX4`t!8EgCpfhM}i5a3~I~Ur@p=4%G+GcbYbmzmyKt=fCgo z_>rg(b6<2@R7J|~G~HEQ3yCUGYqJ&-bt3o8T1ZriPiaQV=_hK%I@)*OAW<#uU#=A` z>cy;eY^3~7Q@2`LNYspU(zT$&m21O~Rv@vm zsE~u*Pl3ep;XUto$xU&$l$Os4lhXjsmY;)R#O5HQPmnInhEBiyE`7vL+T) z=9OKVSk#$&8fjp~_bG5ta0i{Ds*iK{J5A-Qx6*7E)h7EckxWE=IYEk7~AyDs=uiO)Tord)qXzs6rrpIrxo-Z1-C1|(^QA{9C%pNqkMxkV@5stW|bxu_2~0cnpo7M z4R>l{QIF=(Ee-XtlJ)3Lx*NyCV(nAov_{O~?=+RqNym)5c2SRxUDj+D^=K^})v32D z>(R`z8tuw@)Rj(=cd>>loYLhGaLp?gnt#J#62w7=^=bo0CqWAF1T7lfry-N{@KYEBW_ z?@|n}q*LLFY}}SmWlm1wf>2&wrPpE=!$Vz}OWINZ$ussIG~rhSp_0_Po6{%J5|%W% z^KwC`9iKkJ1)*rHqXRRAKFKroS(`X52@sYo-DuN6u_e@pblPuF1feiwyNMefD#9nU zdR1%*rC>GLn<5A`VCp_jc=C+B11mgv#-8{dwfo3WC z1ZN*pkJ}O~z2l|$pt&J0_k5>mTb2td>f7t0_pkgISo!^bGBEQ0D*H`kVqoMqt!852 z4+s z(e_LXjQgc8m>Br>^xGL2`G1w2II+q9t4!*|YzL-&u{RR~&)z?tiGgJ=eVmDbV`uDQ zVqn;vX`eFMGZ&sld%n}O{cpFO;yyp{zVGEn!Lc78M^HuK&4a=8tg4{IKTpE72ap4+ zqTt*0A7J%Mv<2rbSdxu`cYn2rje>h`&c{Zfuh00Bg_8eQ*_jui{J+YoC)jPl-7lSCqu}oc=mcFwQ6fS({4$=i2^2iu^MB*2 zZNcTc7E;xV6RpAQ3oYlj2DktEYCa5hzlAm$l!Jud-&dU1n*2^vA391?YE8i4_;tyDMcRyRHj#-HnRJ`B$P z=>R?q*8lQvd>Fic7Hz#MB|v_ssVg%``JJYe!Mq>^4EBF{EFT8{|8OB6h61phuHGvT z2`a$YWBk@o0&Z7w<+Y(2Z~*)4y`AQGVX(saRJXMx-?#e0_mKWQljzyAVT=BmJ$sHC zmX!fOf}basQ{fEV|Ip{j=~Vc_l&2T^mBXoU_<6&A37+gug*}u+y?zOvY)*xp?^uW@ z!KtvLlon5BWZr*$EhbOahyf#q_3Pg+GVgvKj7R4Ez4{h- zkoR|D=RN*7i+B>8a=EDF1nDpu&#+M5_lBPL#^QPZo!q68`+oxdJBN`u_j4fUXN$UH zqsM`Ke`(5rXZ*9-266Oq8jI)q6WSx<_eF3Z$9wNIj3dD*d+cX6$BM|@{?Q~VM>HSz zb0D`r&>ni^c&Kq8x2H@v#F!$tM|K$WMdtP()op%XWN!D-#8Qs~p8a+vyDu4$e*Gt{ zT$6ljwb;+Dp9lFpM|;WoJjn4+mEAt+^B~WsO?jkWAM?x`w8ls9;935F=COMHl74;d z2U_E^CNl4rdc@?3_DuSDkoTWy@fXxDH6G;s+t~V1<3ZlPHjPK-{kW$t2IM_{JTmVG zP8yDLIPVp9m)7Fh8Or-n)agD>d(`Qqe=4~=HDb=oyI@|P(YkkL-$8>%45SIVpD>qG zG5trIggKpx6=~^aA;KI^#ggT1Mc5l-nBA$Ef2mEvY)(aowqOmrEFwf@xSY={!a0$$ zxlF6Tpa^N27)3UILdYgw8SfNEYH6)wEA-8D_x2fu!cE>O+>Z%}$Wc*~1O!b#-kwGq%=~|UxWRS`=y=+AW zcPT}u9R`v0c3KOGTuH~yhGf_Z%0_D|HrbX)X=LjgbNTcXU8{L%BC}52Y(#TF$#2&No9}UtRrD~3=-EP(hn0v*o4Sqt% z<2sUu1wu%q*0(kzgiMlp+9ZTjuJ#lgvoVy#rjVx`niE1YIa``_**_CSvB|kCCSlyi z%})r~)TT9CkZhQQkWC6Xiv>c+rs~%=yA0WsU1Ez+%BILQHVGk{TnEhv-wqX`W0<4# z#k4!OQ}IbU@vq5Q5e*+aB7A1yXM}EM9&tmo!3g1u-D#B(${A9_8e<=fBc#*gW~+?Z zoN{xUTV=#<_iVamte2U2k?MJ(W6T(%)+$#$enzNgMr*5#P><(3T0y<$#t8KcA|o?p zlr;YHQr8k4^S2=z*C}+|RYiv-> z_*vH2pqLDLBgM3*@S35&T8XWX(?%$zC*8v`iA}IUCG+ze#I`L`AxkPmWs5rha24Wb zgFII-l{`k3BP{_NZt;Gg~Z0~A~4GMWB zyEQf_puw!#Eu zyuZc@6I8ML4=YSi#G8w)FhLEk-eQFbN_dgphT!FHL!=5$(aa{2DPj&1p#nb>RB&Wk z)ZTHJpn`qmgeEaT1v{^>5))Lg^*Jj{P{GE{R+ylIRrI>|$(dKNh228C??pS9SakTtY+;>Q+V zMSKDe>F>0~e4xWc8CTD26ZWNuMgHFC(XcFDbP_gj{r2_qFc2IP|lVVpWemV%H1YKw`rh`!O z{S~vbtmxn-|9oPP4np~DtUWpi<=n4!=%i4-r-MU{R7?s9<)e_Jv=oj~dA}Q7iqZKuUelr> zntJ?o8p`Pn#x79|BP*2)L!x$%r z)FbxR282#?D-EJmSmZ62_<8oTK{-9vx$&8@K{Xv}S!07@T6<3ZqORzoJtO{}qChpJ zGai$k_7B}gt8|0stj-`d!3LGo@a+4s*)~TiWXQ;{U>R$`M1skzO7&0jCDS2B#=tBZYiEZWoMXM{@n#lE!? z-rK^Zd?)hDIua~Xayb=C7Ti=THMoaln~Lq zh3!&8M3ZORrG$tk(vGX~SV|F%u4Wo#_^%4YzVN{C8T6V(E@d_4S+^@7KOF?Lj5Z34 z=^&m*UbGh-g!8~jdvp-Z{O~Wx;WK(LpHpbh1YWp-gXMhfWG5>w*bg`2TV|Wog|$ zm#$XKlcuP18$Tt)vhq#4ln~5{v34mTn#aGkO9|oJ_o!V;h-YSgdz7*)jicjl&5ZkJ zvzQ>S?-(;mg=gAN2@yT-smB4jWK%*!PkZXI4N8dUNzXjd1|>wasGQxggotK6Xpd5g zXaX(Uf<&Z{p75@nC|QHTbDN*ax={vQu8E|KxTNc+gMd!crF~;Mi03FRGK}dUoc$~9 zbrzy|tCu}G2LfLW3UUU%3oBiz3K`0yPLYzit$AyFq1(8vz5qJkJcJYpv*h~fPScBmkR-S^v}f*7_=)TN@04_QKf zrP*cBU7a}Y;HQHi&X#v0Gp2(mzN4L7V>$@q#3S~igE&6A*B%`N@@@lrbP&n*R1>diw;6L>Dhx)Sbjvzg#6J#C|`KyX{K}#%3;q8+>{PN zd5=o4@z_Bqudg!6Z1{DaBbaLpcG_XCQMnfRTy=#rBAE(z$IZ`|%c-11mtxKNaypf- zri}!1z8p^FLO;60XU>=1shpGcPR#kBmWy^Lo`QsNOzVLq#918hME8#D- zTey6waZY|By~WQ3tt2N};(|`fzGI0C8YxXDg=T$(K8ny;lo=Pak-LMXK0+6NrE7DA z|B=9>i8^mb<%%+saQ(y21x=(pV~GozxaJm1T+l?Zik7&bi7U5S;({jf?6t%NOGYy+uCgqx?CGe9%a7+A}gAKjx2d(_m z&8WZrc_#EykB$?A;)~}aem?j}`bRP2-6`v(1KXhoFPtV9+E{nzNzjyr(>s{JwxZiJt@Df1`iyR>H7w!p9@+U z(#!3u85eZYv%DoPXrz;;<{0%6`e=T}Qe4o+O|)5PHeS$0{q@>$h2K-~Xd>ggm|RqI z{;LT;7c?=5?obEyQJo8#=<%GTxS)v+o*EsX3H7+3iDq;HVAe-yq6yVMGcIW2#=+X- zl@;;{@txiw_%0pl%D6HvS5j!bLLL!oto%IC!?2PTc%X$|v^%aJ4|H&6bqn!81D%gq z;6eUdJZpgmxxeKpEuNPn^FE$#C`OKRtlHq`LEeWih#HSN5AxoJj(zmwLEgKzu@DdP z-j3#5`tcy|t!V#4p9gurmDb!Go=K7Of{C6rpS$i*d*=OI$o&}FD@rxuLjFh6MHMqH zXrS*UR|95T(829=0%gVpEwtWkDK6-t;SOys|NIU$V3KFO9gxRZ&#|8ini#byW?brA z(8NH`9*j|3&_sV)ZwKX3JuYbC4!Q$u#sy8ZZD47%iVV7#|C%i_2xA)EJTvSj zlyUcH{m8->u{_e)L{qe&$bzPgt~7{DN@Mk&n2|BaAdRKdZIMA557RXU!)`(v^RBlQ z8KiOVHd|zn#?%yjvazAr#fKr+xJ|gmRyr-Ks<4`HWkEzko!f+Cyo{inemOcdui4aq z2$ER7*#;5hu!Ig~4I+XR9(%<`M3BM4Q8tJmff>E^h}K1>f7`66L@^5n5y|wgqiP*F z*bE}1e+6wz8W17AcOT)Y>`0<&wp-<403qVQ%v>8Omq{HSoDak$RLZ^bM(o=%L0!y z-XPNpl19vjn#kmwVZ-v6kuk_1ja5Z#kwF?OyW1jzG@hpS+YHAH(s;0_t;isaIXU#n z!nbtA4&?Cg4&=oeE?H8{l}3vu9eVb3rwo1`7{*Us4S9wk9th%eV+%ac!ZA9*((9Lq z2Qv6zkcD`lfVb)9n|?e<{tmr(!c#m?-v6LohoE?y}j$uw?@tl8s-jD;i{<4=L2cGXwD;shk#~)H|^u`1E-Q9p4$I8guUTzze zgQ~JWw|)-f_8iUqBl{AcL+18d`Y(1p4&?T;2Mzin;y`Xcp1qC=-VLE{tcne_7@@8=(i8IKwd@_x3aA&<=a8QLt= z>zB;?7wasG2%^YVtF+e7ZeoHlcYhxYOk zYy4@0D5sN{_&*y&Ih@2YWE}=w%kCr=qE{#lh_X3}IUCvNT7r|9jo!{@MHIGaPx|ZA zaw>?3rqvPYCnA~t>N8_@O_2!cubgFr2?`cuMU_>a>1QDX7r;lIo{GQnMb_r;te< zLdS)XV-YtBL4l<9q@|7)1(Mp$FbX8K)4OJ4fu!C>w?q{?k`a1RJ=Q!UzZ*rBm@{!- zV*NBo?8KhtG`=sAlV!5b0g8BK%!U8HKV{&z2c%i#Yl_=61-%$83mI2Q1ppe+~E{aAhC1*;zmJJ zWtr5S(K`~w-TV|t>ZVIkqfnwiQdj@3Pa(zvNqx4K83mI1MD*c|+Yu23lKMz(vnY_% z`GdI>G)n?)l$6-i!p z%$x>^e$+F;&>3P`ix(EsiN-%`f;I2@A|_1~_4;X$@U6woX^`-BbjT5uaHVLF@Rwtc zOh?8@la18n>HrJ5?Oo?FqWo>Wv0_+wSEQ zKMyi~eug_ZdOV{L4^n=H7N7b&$oUsfS%?Qu`C*!9=*NS{e9x;E;z8QqS*XQBw+F*{ zPyGAqm^@xf_U4^$I0gS>x7TSGzPtQHUQ{yDv&q|bxAA1Y;`U&#AGs#*H+ zAn)(dP7{Y`L};-kZjFz|O zcAg;z^896MLk{HllUocqkl(%i*&M4PbDNzG7$e6tnveTAklR0=jT%RM4w>5@$fNji z2oB`-)HcI7klW8D8*(7GN7fi}Ah-Jtu{pwLiQ@LKtkXH^?S`ORw$YwlKM(SId6XNA zJ`eo*VoO6FQ9qF9pXyrRL9S2J^P|@<5fAcxj5c`nd64rD=<)%Fhi-ezyytp6=J-T= zCjC6f`=uR*JaT?^?oJCl$otPPS>Qq5zospzpmB~rJ~Hpe_gRPsc|UMYi)T&9%lG4X zALBF?b^2C1$W08`6s3N(>r9*A#4;KzxtuE39dx77Wy$GONj+lBlEbM|p`0;GcBe`S zI%e1HQ8uSae%cz*Wl3MTP;&-9+qGu==;(~WL&dsHFnEfa5+ ziD9(UP(07|rWi3G!|Ah(7?9nn^cspzFOb=adyQg1R!fp2>BNAHURi<}!}F2ZY`iim zL$Es&gUsgj-y1O?o7H|YVn8-4)8Sj>NUHP#*({gSC#Y>kKAG0|l~b`w7{WL9U0Far>lAgJk!i z1*$d;vfY*Pq)mf#x1#NFZ5rhJwxK#>Lsvj#!l%=ljX}})P25emp9TpZOY>JP8kz8+ zFU0Ir+-L+165fm6tI>`I3GYlB{@OH1c&E|kokGG}UZq1r*FZ9GN{%Od7WqWS1WF$PiK$sX~(83mF&q`nyi65NlffL2G4+-`ZyqCjHX&>BZE3c3a& zlX?$LC?hGNShk-6Nu6{<)Q-fZkVzd?*Ng&5?JsU7hW~t%-4PK5lG^h|GYTa24l+fp zD3H{)bR|=fg6?ieYo0?hMnn!K*)&7(oG+qc)D32!}mCSP+sv!;MfMR^SUG)VZA&M~7=qCvtl|2CtM z3D1ZoI&fk5K`D&wR)oyG-S<)n0XQ?=JbZg^KZRco*1g066?<~hp+<##IH$I6rF zQ$8o@yJeh~G6Bl#B)v{WOJT?bD36o$z(d@Yayv-_igH1@oTU2Xjf&yrbdn0Nw3mTC zVV3vR2u^qcgbMH)y>g=vo&dq`7u?Qm3GO~`UwqI5AuoS3}SSzlxR^YW*9F{=wRRILd5K;Nb2% zU(vw9;2(ZO0|$>EOto0Of3W$R$lcU&aQeb@E~Sox*?&Es*}p7|C;Wa3ZLO%aCvdR* zh1~oJ99+K-t*zDDgYjQiU!#BU{(R##aIpVH-293Dp#q$##%wPWI23_5p4Y&k4lFLO zfkP=6xI_boYH%aX57hD}`iFv0h^x;G;7}7zUu4E9a3~8~hHKzZ88)@kz@a!SJgtO# zI8+~YhU!CoQ6KhE9!dw+hkhf+iL#hL$Z<}6Q6ZMnoe2&SH6r7Z8!HEiD$(o>EhOqh z8S-D%Hpil$s1!d9X16Jjs1ap7tNK}sd`S}z`)Q-N7YPE^#QFpHv67}PXyjn<9kO%qs6#YaE z`QMxDHZy@l71=>st*ZGHNYs&sXkNuZqLK`*p%pD^$xSb5AyG|=@O7JkexjayLv={C zp8|=B^5rphv;v8mvX!>XIBlY;+)q2sv5}95>dM=px^knaD<9BXk7ayyCH%GRe|L$| zr3Au`b8ZxszgoHD=cl z&2~{`9^0;oMV%Q&ccyqTqtY~3s@X1TO|j=Rv8XoRk;kaVoP}{lz1hBBqg{bT#aX12 zUx7u<>7SyZnVn4tC9@HSX1j5RCw0vC5n4@D+kH#fyVo{G;oYlml9+jh`prEk|j#<{Db2QK8 zVNs9X5vN1plX4yw_2>yYA>?6Ek4D6Pj-vM$^{Cd~;fG_j~h4UTJKQIG0f(8Qu171wzdWIg(Ri$=Qwi+c1%?DpwPX-RRa%ZCbTOkN$hc{?UzYM-@S+Kr24yf>3%U%;SPk zb6PjxfWl|&DHVW`1w)}v0)#5_+xuJ)%FBB+rB!SRb>*qa+?G&KM)l)@P)Qnh;erY~ z$)!4QK=O?J?Co3-7BQZW94d^SJY#=|POlU}C=-L|C_)j0`f%f6ZlCfyRSRFi1)(DR zJevcOXY9K!b3v#9kB{SmVE)5@=Yrt+{pN5%u=|E%xFGm^iBlYqJY)Y~doBph{wB?U z6=Wn@f~7x1*G;2>=7!c5+d_L?wXxTg>d+2kQQuxyyS81MH))3069_ZTsV(-n4!!4g zNeu%he_;p{10$bQf{B5Tx1n`({Mg2#KP8=%iVGO+2n<~OcXB+nc3|S~r!d=rhd)D? zq1D=fg^&K3*$y1M329rc9T<3Ns?PB-Sr|9$Mg4G^5u3okzIXn{#9;5~5xP5~78}fa zNG)bN@a~&?F)^_2B9AdKaPD)38JJ9r8}^jmnajk$x1Y?;#K5)}(P>Tmu@x5q? z(0ZY`&#znG+PGPVTiZ1UWdfnbIoAot{+No8Dhi&x@>e%LRTNx%GM&n+qTt)@X#Ye7 zH5UDXb5}0Ticp~7-4|$ms@fLZd+!EzTk!8C53o^i@Qev;6g<4yP&NuKUS=Q*m4z{c zkN-q>L{!F5py1?h-_33dUj8WU`KY!9Hy^r<-4^`(mhagpIC?R$5)8YHYJ~9gU!P#1 zGBJj5_4kUfQSkL`G{;eiP_zYSe~OQSw~wZJpxPGPy$Q|ZRZ;Nw(zI`*g33U@;P5}v z4x}my9{<)c*2s&t;PQ`BWr*I^qL9zO5%T%E!soM#sdrdWC44>~dXM4Xi%kR@=hPKO z{|Q-}G7L_?YC69)SpAfAJ`7&pp57%@3UVy^2D4A3Eh}Xh-2P8;DrFe#em`x>E5qRT z&tAhD645spepYTi436LYPd*HmfAx7DEDJ%x^M9thZ%RWVU@-mNWCzMHxc=jb{2*cb z!*Aom;QP1IeuZ+7F#h5%^IL=SpRdY?!TP@?>sJag6A6I#?^()gO~7FO&s@ca!TrZP z&WFMNo6%HGxo`0QGPI+m1j|6*Pyl|SC8aV96<`<5m6Ty90gur_IR5cS9=bdaM7vGQ!Ax?_;zdqK1LPu6dZ*4or4BO+D4x zfzNiL5sQzJ``%SwW5g!+z5k+>f?7K;+z&f5+kxjkM`b{*9oX*pGR$`1ysfTeVqm`I z^D;1U-}|?MObjgeJz7h|9|O@2T==Q$neD)cNA+N0;Khxpo~XqJJI>vg*-k#EYN@xG z7?|=Obk!h!Y+^qezP#^MMmqunYksyG69ae7TEoP^pj(imtMvySU5;kq@iB7W`&Z9d zQ(R9J7&!I2lNkLG7?|~w=i*`(M(%s(rgNj>|B3D2?GQrAeeX}{281dKp8aw;H$GJq zTzkg*Y!rOE%QO~B?t7;+V52Zo{yQHV1@}Hkix-vA5D~(^S8Qju1qYw-92*4>Z~X`x z1sAXIAPXh;z0cFxoyus)eeXRh*=@ngpMHytf}4;2hK+)sUt58Vf}=Mr#zw)@uWH3c z!PWoV%|eMCZ20;IH?dK0_GL6PQpvJt3*J8NI(A!d_m=C~DENE%iY%1e_x{au8l~W( z0tJuXlY`wBT>fdQ4AIAs{<~p3-aW>=Jnv|?t&(np|9$U@aPmJNPSBi89S5&pw_2k; zxc#iI8aVj44IEtm&`=E=eE)^pG;nbKNi{Wa@cy=y zG;nbLN;J1t%Y*#ShYN|!_T+y)?5(4LLk(EcRs)ABaM$-5IMjg*+OJa|7gT~~&uX-X zT2O{Ifz;bWHMrD)*`D|>1=NEBG)q%!PvB4yo@=bp9%{mPTED2bhpNz;wyxB1s0$Tn zuTl*s|MTH@n)9mTP#fN-ilUA~byz}c4Kce_k zORD2gA7+225hv}{0S5azYD8?gx4_D1WdyVn=VX8+9 zGx*`DtAAqr9+o=w2;=u~)Q)=?zlWhF(T;5V{^AbGsk;^VvoM~p(@T39zlWP1_>}Q` znCbHKjNijc$CqaO9#-0l>PPJH5&eaemY>SthmkHqZIkY@3H5x@(knm zaM8O5Gky;fZBMQndwfJZ@X#uq75Ovpdt5uYxQp?7IOu`8jNijRm#>Za``ID?+!FH7 zTEagI(ww%i>!05Db_g-fsU;lrtD0^LDhPP!rb8?QTy!3(PNfa_=v{S`+Zc;}z)5S; zlNPTHfq<9he3R7%-1OLD76N{{W;6={N1fh{g@C7aZplKxRSOJfAqqLk$>cNfMmGzi z3unFDTDc8@fVUp(!9u`YSBzsJ;I9*&WFg?Nt*K(E^c)^rF@e;~$O_kda2zc&?v=UWm1FpM* zZcqjz=7fBAYsh!66~0@T_O^?-zUzCBh9KjdYlZQirfobW2%L8_{WFCU1lGHNHq(?K z@ZO$V-4Tup8H>Kae6O9zgTQ@r(`koNE3n^BOYvHP|E~R=2Y~_4_?!oU1J@_rEA<5y z+=VvRlpyfnlzr-fWg#yx;lDrPLEyq4o#H`Y!_U)3ecZl?z~IA^%JW)*5w~r_gTRR^ z-NS>xil1jZm6_-Zy!Z%Jp}5@_5Sa1mU(^E=5V-L)nprEg0z2+Rn;J?G_;K=`>Vah- zFc|V>T0+HbML^)lAKlDr1(y8$#(0qVAy207G^G|<;X;b}KFsBLzlA{ZJ5Arv0$ULT zU*7Tsw zdcaU_OYrGg{kb4G_3d4`Ab53JM-E8*9aKRl*`cYw!a&IXkUC5&Vnq-fd*w`Sc<}7W zFK|I{?RFn?LGbOwE4d&z_e$22E5Flp^lwf}0tEM7GoA~Ae^0-_1;N3)(DtB0az%LX z@RYAOEy@3o`iCyND6}NM({zY-=M+J3@|ASmAu5ROR>(UU#b`!e(&y#=_h|?x@9cg1 zo;w!mI5_$?&zZ1tdocC;OK7wQU++V6JGD6Fos8=iFxv~?*-OdSMFR(e|C~1T)#HT6 zKR7|7J=pwu&mA76Jc$0m>1Y3}(H_jc8_j;z`v<>Ir$vJrE_`P%IpH)jPJzQL{o~&> za4`N=1vGH*{wbcj6-s%KcQQKk(r6DApbFiAS07Ilffu>YyS$Ted@eIic_-uL>>4;! zgPAila3}~}sa~m#i-;37A#DV+z3`okB3f=VageAH+v!xAgG7~BL^CN45_O_q4|YGp-)TyzS5^y&l|cS) zwUDS5Z#^21w zi;8s*X(3TdW*5~$qMCHOUkizPlKu}H8U9XFQZ}kYs`-@PY5L?#tu|3rR@1(D>^A9J zOXKk$=GfOS+1YKgjIXZz_kAJ4%I{*F`9Twl+VTcvhSx5t%Y!#)wu}1GpPn}LcEjJr zNV)zMjdtaCn$B#{#1?l_uAHQaMVaZN0$O^@7~Sk#-EG+$NE zZ}__y$vN|Cv@5@hajdi^7FB1>wVGJeo$0iB$LlXDPiLx=JS=KY^6Q#0qx!t0;~Vlj zO()iBv@5WvKtxSyhO(`@vu*X);aXuz|>Md#*Eo}PoA){9-XDru%LFMV^NQG#MVZ1Eb7tX!N`y=>d~fmG_j~h^J4o(aDGL9 zQIGCgs@X2;QLU#mu(BTITBeD`8tC|XO)TosTH32s*OiDF^=Jm|OY*R&M_tQl#*BKD zO8dRMc2SSk>iCANM_)A4h*{R74Ya4D9o_2J#-1nI>0_W^_>4U% zcO6c6@{Ik{m0S=kecd(jLG<5~^8YHYrg>{o-(DBJe?=Jif0aMIrh3Hj|qRLDLL55bQ zcEbNxnUsgtAZi%Q#ZS~`#3ui*@|D$bG4#)qUWAhWSNRJS zAypJS`$;kNVv&#>pi^I z1Ppdxyd58g`TF;?C8QiA4FC0P{MO+3i_Y+2u>5|X@nP`%dNik2>Rb3bP03f#HCbgC zT>s>cydXtuu>FmB_%QhX+%!H6#^2)(J`B!Z^Fclg*8eK=F_qtG`kLleN(qqPY1(p` z7o>n;W`7^8RFqqT{rCBU-x~bC?jRm4{GFzx{4_~b3R1u@%RfnVLm7q=@G31RV!+mh zYQO=!Up&ESjxt!GI#VzpU zbW(5FWPvA#lUki>wqC!+V|=nZsg-E1rq7ejNx3k_uwNpc1SjP;n%#2Z$%xE*hea{t z$r>?WTw|7H_^0Cj{~``M+U3M zf#y`RD781FQOW2Yvzc5|c0f#>?q@@$S3k-6eewG&p4;1LT9)KntHpkH{XEETd(wzL z4|3d!_VV?4kmm-p3#HG4T-ScW9b-Kn;j?&_tKMaS2RZ+%rWVhd$T~fc7B)fsil#w7 z5AxpKQ>O#;qsD{0H>T%OKOW@0?WmaJqZSYH-l(}DkIZ{r+6&R^SGZ26rqWprhi7Lf z??+LmCpzs#oqpz4w_i14&da-CUY^mqcV^!~gGUUc3A&#U0(g)n`W6VGgxNF`wLl0- zOsHb3%M&n$(8jRGZ4$y9deKI^VV6aO2~KJc+85U%4FC6&M>bDwjoD?I{zj3FpAfQH zL@Vu}2$_VC&CK_0MhMxAqZOKAgi{NMkbH6)=np(#ZP8Mr2YN zBahi4gEacml9}C2e{FzkEU)iIRwH0e$*c{K$CLCzmIXpc_h5ke|c zuCW!NEH=Qo%{1Bvvp@)^$)rO{EyCR)*&M?hWs=iD%u&9lm1j-PifH)Y z5#cinKO=N=VxpTB8;lUnhx4p5LOHuCSYw=safEbU>uQw|lb6+W$Ha6TMT`*73v>#k z$2c!iJ>SvIk08dVwaQhGpAqW$^yir4$Yq3j4uH_EmvEKO|U^FFVRK>Kela=3i;uNsIf(zF1QNu zvq2#zcUWVCLJoasjSUKUcc3*kC}hWHR@kIMwmxl*4GLLH+XaydiJ#MZBP-C4bUU(U zz@##Mz3?+aCEtAI_SOa?lyb`BoEBn)T0W+|FB>sJF?%Xn>$Rw9@RjYfUt$`goWHEg zWfs@cDFiBL#@m0q}3Et%ka!HJ^tFMW8dDx`qNR8pDmY@R@UQ-hHN>Vw5rvt z#5M^%&EcdK_`w=mb|)>nxa?*=ZX!1L%HB#s-BX{9=tQo0H~@w8jR7obP48Ha_Gb2SW;JAw1+- zs^rPB*FX{K@H0Uj*W`BNGGT%;lFwLSf^Q_!8x*=RO>$abe_|hwOK@AmaS&I#dNH1=M zP1GKELs7alXEJW#`9xZtJFLY9bzJh)Y<3p5Mk=JCE9Mhfn_aeQ%Bu)>%gj#) ziS(Iij}96cmd_p?MAGpidvx%T1~=QGn}$?CCN1c!&L|av4mxR0JA@i^Gb4qPStn*^ zgO?_*Q2cZdN^h{qHC<42EIJ5f$Q*lg5K0HCg~riAC^ysQgfSh2a#KA!os~klo{rEo zqMH;F%10qbX(=3KT$USMiqZKr-L8{owG*=Q4Abqj}F2Z zb-^AT#BuwZ_UIsxCaHGlWC>|o*n}>;we1ng9fUG}wQ-ytI?YYHkJh}VF$zY^N*0)o<2{k; zSxW^ZC`MX?L{pES5$f5hdebXDqg2mhv`b<=`&@6 zVjiJaJWSc3mgi|nV9Exi+(Ywk6SnCXH+amV;s#mR9I22Ep8ffVm2mvZ6Bs_5zg}RkWFw7R&^?&= zKH_JDO4bZ>Ghu@fUb3^OHAY#1o}n{r(->t1T2;krj8M(o2G$s51zOzFkkLO^g?cv8 z8#h7Y82?%EGeSLY(Yx9<7@?lcM`HGx6QfkmQd(=8#wgXZnr^n(V1#<+y<@G{QaukH zG-P}$RGp4v{xZwyDE4X34|f@B1%4ZLPWc1+uaH!M6|1$-Sj|22N#)AhX1NSZ1>FypFv-!?ow8Zm?tY_ zo9JeO4lb1KS9^32&q>d*tZ8%*&ZjhuH|{J%^FLZQ8q+~Adp!HXrk#aY4%36D5ncF? z3p_&kV{O#TDwHdtlS0Wc#vUDn^5t@SbP&o9bnp^6cI?hVDBna+1)B~+d7pNXjH824 zJ|UyhpbP&ef~PE9q`MEbVxBZb?!&od$xn%?@wvO)>>5*wN|)0~&qWWL6-tQaD-w+r zN(kpC+7q!t3GwXfYj-Rmprb`hDQPw$|cMOf_#6&KKlU|*+VT|drJLy+=>a)TMHR{-j=(0KKrEam;*#sxOcy1Fq znzc%y)bkwh2Xr=aA{^0KKOKb9cxlY&Sac9d1=@ZH%B(V-6iWIFcA^t>5K2DJjNfGJ zq)_r^+KUcCDY3(ZF8tn(xL-FLx3h9P={L~DlE@oJanj(Yf=HSjbt5yPf3W6y8hi+7_gf{U!Vz_B!RH`_o#ZLt>+(xw^ zvV$xth@r}K^QgqILkzXL+M$9NQh&EY1u+z)rJnX!i4H;xWk%^n<@bCz!!6}qs@V7I z!mFNmC8TZxdvp**%LIFL5JuHncIadYsh!6j9RyOCULY|_g@_I!DOLL~M+c#lTWUfVzBzdW zbB%kPc4DqEwZBVOJz!!LB^B-_ik}ZknQxO1QW@0DVtmj_=1rFPW@G$Na@yZukq>I= zkz|n%a_L5EXr26q|5MAOm%C5Lj4#Uk)YXfh4|HfSC--OWg^-#!PS_yxdPIWCc&T)gBt+-n8b3rQ$ZE-;-L$bU5 zG>Z!w>EGHC7yKmaPYYbLomOH&(}C8J`h65!(8XP}W60%tHBu8Zo`~AVc=g851x?&% ziwl|WKImh34U6$XBc6|Xn2n#PQ}B*1bS7@j2d(reWYk~(JQI4E zeIO=Zyl2eM2faLClMi~ix2nbXpqCM}#tF&?ufNdC#1|~ZC-u^m-uyF*Pw+u6eIL+| F@BfS=7y1AI diff --git a/dps/analysis/HLT_scripts_for_Sergeys_thesis/__init__.py b/dps/legacy/HLT_scripts_for_Sergeys_thesis/__init__.py similarity index 100% rename from dps/analysis/HLT_scripts_for_Sergeys_thesis/__init__.py rename to dps/legacy/HLT_scripts_for_Sergeys_thesis/__init__.py diff --git a/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py b/dps/legacy/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py similarity index 100% rename from dps/analysis/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py rename to dps/legacy/HLT_scripts_for_Sergeys_thesis/make_HLT_plots_Sergey.py diff --git a/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py b/dps/legacy/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py similarity index 100% rename from dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py rename to dps/legacy/HLT_scripts_for_Sergeys_thesis/make_jet_response_plot_pt_bins.py diff --git a/dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py b/dps/legacy/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py similarity index 100% rename from dps/analysis/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py rename to dps/legacy/HLT_scripts_for_Sergeys_thesis/make_jet_response_plots_correction_levels.py diff --git a/dps/analysis/check_CRAB_jobs.py b/dps/legacy/check_CRAB_jobs.py similarity index 100% rename from dps/analysis/check_CRAB_jobs.py rename to dps/legacy/check_CRAB_jobs.py diff --git a/dps/analysis/estimate_QCD_rate.py b/dps/legacy/estimate_QCD_rate.py similarity index 100% rename from dps/analysis/estimate_QCD_rate.py rename to dps/legacy/estimate_QCD_rate.py diff --git a/dps/analysis/generate_QCD_template_from_data.py b/dps/legacy/generate_QCD_template_from_data.py similarity index 100% rename from dps/analysis/generate_QCD_template_from_data.py rename to dps/legacy/generate_QCD_template_from_data.py diff --git a/dps/analysis/grid.py b/dps/legacy/grid.py similarity index 100% rename from dps/analysis/grid.py rename to dps/legacy/grid.py diff --git a/dps/analysis/make_CRAB_configuration.py b/dps/legacy/make_CRAB_configuration.py similarity index 100% rename from dps/analysis/make_CRAB_configuration.py rename to dps/legacy/make_CRAB_configuration.py diff --git a/dps/analysis/make_HLT_plots.py b/dps/legacy/make_HLT_plots.py similarity index 100% rename from dps/analysis/make_HLT_plots.py rename to dps/legacy/make_HLT_plots.py diff --git a/dps/analysis/make_ntuples_CRAB_configurations.sh b/dps/legacy/make_ntuples_CRAB_configurations.sh similarity index 100% rename from dps/analysis/make_ntuples_CRAB_configurations.sh rename to dps/legacy/make_ntuples_CRAB_configurations.sh diff --git a/dps/analysis/make_unfolding_CRAB_configurations.sh b/dps/legacy/make_unfolding_CRAB_configurations.sh similarity index 100% rename from dps/analysis/make_unfolding_CRAB_configurations.sh rename to dps/legacy/make_unfolding_CRAB_configurations.sh diff --git a/dps/analysis/read_processed_events.py b/dps/legacy/read_processed_events.py similarity index 100% rename from dps/analysis/read_processed_events.py rename to dps/legacy/read_processed_events.py diff --git a/dps/analysis/read_skim_information.py b/dps/legacy/read_skim_information.py similarity index 100% rename from dps/analysis/read_skim_information.py rename to dps/legacy/read_skim_information.py diff --git a/dps/analysis/search/__init__.py b/dps/legacy/search/__init__.py similarity index 100% rename from dps/analysis/search/__init__.py rename to dps/legacy/search/__init__.py diff --git a/dps/analysis/search/translate_results_to_theta.py b/dps/legacy/search/translate_results_to_theta.py similarity index 100% rename from dps/analysis/search/translate_results_to_theta.py rename to dps/legacy/search/translate_results_to_theta.py diff --git a/dps/analysis/search/validate_systematic_method.py b/dps/legacy/search/validate_systematic_method.py similarity index 100% rename from dps/analysis/search/validate_systematic_method.py rename to dps/legacy/search/validate_systematic_method.py diff --git a/dps/analysis/trash/README b/dps/legacy/trash/README similarity index 100% rename from dps/analysis/trash/README rename to dps/legacy/trash/README diff --git a/dps/analysis/xsection/06_compare_energies.py b/dps/legacy/xsection/06_compare_energies.py similarity index 100% rename from dps/analysis/xsection/06_compare_energies.py rename to dps/legacy/xsection/06_compare_energies.py diff --git a/dps/analysis/xsection/98_fit_cross_checks.py b/dps/legacy/xsection/98_fit_cross_checks.py similarity index 100% rename from dps/analysis/xsection/98_fit_cross_checks.py rename to dps/legacy/xsection/98_fit_cross_checks.py diff --git a/dps/analysis/xsection/98b_fit_cross_checks.py b/dps/legacy/xsection/98b_fit_cross_checks.py similarity index 100% rename from dps/analysis/xsection/98b_fit_cross_checks.py rename to dps/legacy/xsection/98b_fit_cross_checks.py diff --git a/dps/analysis/xsection/98c_fit_cross_checks.py b/dps/legacy/xsection/98c_fit_cross_checks.py similarity index 100% rename from dps/analysis/xsection/98c_fit_cross_checks.py rename to dps/legacy/xsection/98c_fit_cross_checks.py diff --git a/dps/analysis/xsection/make_control_plots.py b/dps/legacy/xsection/make_control_plots.py similarity index 100% rename from dps/analysis/xsection/make_control_plots.py rename to dps/legacy/xsection/make_control_plots.py diff --git a/dps/analysis/xsection/make_cutflow_8TeV.py b/dps/legacy/xsection/make_cutflow_8TeV.py similarity index 100% rename from dps/analysis/xsection/make_cutflow_8TeV.py rename to dps/legacy/xsection/make_cutflow_8TeV.py diff --git a/dps/analysis/xsection/make_fit_variable_plots.py b/dps/legacy/xsection/make_fit_variable_plots.py similarity index 100% rename from dps/analysis/xsection/make_fit_variable_plots.py rename to dps/legacy/xsection/make_fit_variable_plots.py diff --git a/dps/analysis/xsection/make_new_physics_plots_8TeV.py b/dps/legacy/xsection/make_new_physics_plots_8TeV.py similarity index 100% rename from dps/analysis/xsection/make_new_physics_plots_8TeV.py rename to dps/legacy/xsection/make_new_physics_plots_8TeV.py diff --git a/dps/analysis/zprime_analysis/__init__.py b/dps/legacy/zprime_analysis/__init__.py similarity index 100% rename from dps/analysis/zprime_analysis/__init__.py rename to dps/legacy/zprime_analysis/__init__.py diff --git a/dps/analysis/zprime_analysis/estimate_QCD_rate.py b/dps/legacy/zprime_analysis/estimate_QCD_rate.py similarity index 100% rename from dps/analysis/zprime_analysis/estimate_QCD_rate.py rename to dps/legacy/zprime_analysis/estimate_QCD_rate.py diff --git a/dps/analysis/zprime_analysis/make_control_plots.py b/dps/legacy/zprime_analysis/make_control_plots.py similarity index 100% rename from dps/analysis/zprime_analysis/make_control_plots.py rename to dps/legacy/zprime_analysis/make_control_plots.py diff --git a/dps/analysis/zprime_analysis/make_control_region_plots.py b/dps/legacy/zprime_analysis/make_control_region_plots.py similarity index 100% rename from dps/analysis/zprime_analysis/make_control_region_plots.py rename to dps/legacy/zprime_analysis/make_control_region_plots.py diff --git a/dps/analysis/zprime_analysis/make_control_region_plots_2.py b/dps/legacy/zprime_analysis/make_control_region_plots_2.py similarity index 100% rename from dps/analysis/zprime_analysis/make_control_region_plots_2.py rename to dps/legacy/zprime_analysis/make_control_region_plots_2.py From dc5450533224cfdc40f8ca40a810efeb4a6a4a72 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 9 Dec 2016 12:18:58 +0000 Subject: [PATCH 48/90] 00 OptionParser->ArgumentParser --- dps/analysis/xsection/00_pick_bins.py | 40 ++++++++++++++++----------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/dps/analysis/xsection/00_pick_bins.py b/dps/analysis/xsection/00_pick_bins.py index 955ed732..9481da61 100644 --- a/dps/analysis/xsection/00_pick_bins.py +++ b/dps/analysis/xsection/00_pick_bins.py @@ -45,7 +45,7 @@ from dps.utils.Calculation import calculate_purities, calculate_stabilities from dps.utils.hist_utilities import rebin_2d from dps.config.xsection import XSectionConfig -from optparse import OptionParser +from argparse import ArgumentParser from dps.config.variable_binning import bin_edges_full, minimum_bin_width from dps.utils.file_utilities import write_data_to_JSON from ROOT import TH1, TCanvas, TLine, gDirectory, TObjArray, TColor, TLegend @@ -59,15 +59,23 @@ def main(): Step 3: Check if it is true for all other histograms. If not back to step 2 Step 4: Repeat step 2 & 3 until no mo bins can be created ''' - - parser = OptionParser() - parser.add_option( '-v', dest = "visiblePhaseSpace", action = "store_true", - help = "Consider visible phase space or not" ) - parser.add_option( '-c', dest = "combined", action = "store_true", - help = "Combine channels" ) - parser.add_option( '-r', dest = "redo_resolution", action = "store_true", - help = "Recalculate the resolution plots" ) - ( options, _ ) = parser.parse_args() + parser = ArgumentParser() + parser.add_argument( '-v', + dest = "visiblePhaseSpace", + action = "store_true", + help = "Consider visible phase space or not" + ) + parser.add_argument( '-c', + dest = "combined", + action = "store_true", + help = "Combine channels" + ) + parser.add_argument( '-r', + dest = "redo_resolution", + action = "store_true", + help = "Recalculate the resolution plots" + ) + args = parser.parse_args() measurement_config = XSectionConfig(13) @@ -89,9 +97,9 @@ def main(): variableToUse = variable if 'Rap' in variable: variableToUse = 'abs_%s' % variable - histogram_information = get_histograms( variableToUse, options ) + histogram_information = get_histograms( variableToUse, args ) - if options.redo_resolution: + if args.redo_resolution: rs.generate_resolution_plots(histogram_information, variable) if variable == 'HT': @@ -145,7 +153,7 @@ def main(): outputInfo['N'] = info['N'] outputInfo['res'] = info['res'] outputJsonFile = 'unfolding/13TeV/binningInfo_%s_%s_FullPS.txt' % ( variable, info['channel'] ) - if options.visiblePhaseSpace: + if args.visiblePhaseSpace: outputJsonFile = 'unfolding/13TeV/binningInfo_%s_%s_VisiblePS.txt' % ( variable, info['channel'] ) write_data_to_JSON( outputInfo, outputJsonFile ) for key in outputInfo: @@ -157,14 +165,14 @@ def main(): for variable in bin_choices: print('\''+variable+'\' : '+str(bin_choices[variable])+',') -def get_histograms( variable, options ): +def get_histograms( variable, args ): config = XSectionConfig( 13 ) path_electron = '' path_muon = '' path_combined = '' histogram_name = '' - if options.visiblePhaseSpace: + if args.visiblePhaseSpace: histogram_name = 'responseVis_without_fakes' else : histogram_name = 'response_without_fakes' @@ -184,7 +192,7 @@ def get_histograms( variable, options ): 'channel':'muon'}, ] - if options.combined: + if args.combined: histogram_information = [ {'file': config.unfolding_central_raw, 'CoM': 13, From 0b405444d383b4fed2ff9d7ffa7432b1e248393f Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 9 Dec 2016 15:19:39 +0000 Subject: [PATCH 49/90] 00 Add comments --- dps/analysis/xsection/00_pick_bins.py | 191 +++++++++++++++----------- 1 file changed, 108 insertions(+), 83 deletions(-) diff --git a/dps/analysis/xsection/00_pick_bins.py b/dps/analysis/xsection/00_pick_bins.py index 9481da61..5d36f236 100644 --- a/dps/analysis/xsection/00_pick_bins.py +++ b/dps/analysis/xsection/00_pick_bins.py @@ -79,16 +79,19 @@ def main(): measurement_config = XSectionConfig(13) - p_min = 0.6 # 0.5 for MET + # Initialise binning parameters + bin_choices = {} + + # Min Purity and Stability + p_min = 0.6 s_min = 0.6 - # we also want the statistical error to be larger than 5% - # this translates (error -= 1/sqrt(N)) to (1/0.05)^2 = 400 + # 0.5 for MET + + # Min events in bin for appropriate stat unc + # error = 1/sqrt(N) [ unc=5% : (1/0.05)^2 = 400] n_min = 500 n_min_lepton = 500 -# n_min = 200 # N = 200 -> 7.1 % stat error - bin_choices = {} - # variables = bin_edges_full.keys() variables = measurement_config.variables for variable in variables: global var @@ -97,11 +100,13 @@ def main(): variableToUse = variable if 'Rap' in variable: variableToUse = 'abs_%s' % variable - histogram_information = get_histograms( variableToUse, args ) + histogram_information = get_histograms( measurement_config, variableToUse, args ) + # Remake the resolution plots from the fine binned unfolding matrix if args.redo_resolution: rs.generate_resolution_plots(histogram_information, variable) + # Claculate the best binning if variable == 'HT': best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min, minimum_bin_width[variable], x_min=100. ) elif variable == 'ST': @@ -119,6 +124,7 @@ def main(): else: best_binning, histogram_information = get_best_binning( histogram_information , p_min, s_min, n_min, minimum_bin_width[variable] ) + # Symmetric binning for lepton_eta if 'Rap' in variable: for b in list(best_binning): if b != 0.0: @@ -134,6 +140,7 @@ def main(): newLastBinWidth = penultimateBinWidth * 5 best_binning[-1] = best_binning[-2] + newLastBinWidth + # Smooth bin edges if variable == 'abs_lepton_eta': best_binning = [ round(i,2) for i in best_binning ] elif variable != 'NJets' : @@ -141,76 +148,84 @@ def main(): bin_choices[variable] = best_binning + # Print the best binning to screen and JSON print('The best binning for', variable, 'is:') print('bin edges =', best_binning) print('N_bins =', len( best_binning ) - 1) print('The corresponding purities and stabilities are:') for info in histogram_information: - # print_latex_table(info, variable, best_binning) outputInfo = {} outputInfo['p_i'] = info['p_i'] outputInfo['s_i'] = info['s_i'] - outputInfo['N'] = info['N'] + outputInfo['N'] = info['N'] outputInfo['res'] = info['res'] outputJsonFile = 'unfolding/13TeV/binningInfo_%s_%s_FullPS.txt' % ( variable, info['channel'] ) if args.visiblePhaseSpace: outputJsonFile = 'unfolding/13TeV/binningInfo_%s_%s_VisiblePS.txt' % ( variable, info['channel'] ) write_data_to_JSON( outputInfo, outputJsonFile ) + print_latex_table(info, variable, best_binning) for key in outputInfo: print (key,outputInfo[key]) print('-' * 120) + + # Final print of all binnings to screen print('=' * 120) print('For config/variable_binning.py') print('=' * 120) for variable in bin_choices: print('\''+variable+'\' : '+str(bin_choices[variable])+',') -def get_histograms( variable, args ): - config = XSectionConfig( 13 ) - - path_electron = '' - path_muon = '' - path_combined = '' - histogram_name = '' +def get_histograms( config, variable, args ): + ''' + Return a dictionary of the unfolding histogram informations (inc. hist) + ''' + path_electron = '' + path_muon = '' + path_combined = '' + histogram_name = 'response_without_fakes' if args.visiblePhaseSpace: histogram_name = 'responseVis_without_fakes' - else : - histogram_name = 'response_without_fakes' path_electron = '%s_electron/%s' % ( variable, histogram_name ) - path_muon = '%s_muon/%s' % ( variable, histogram_name ) + path_muon = '%s_muon/%s' % ( variable, histogram_name ) path_combined = '%s_combined/%s' % ( variable, histogram_name ) histogram_information = [ - # {'file': config.unfolding_central_raw, - # 'CoM': 13, - # 'path':path_electron, - # 'channel':'electron'}, - {'file':config.unfolding_central_raw, - 'CoM': 13, - 'path':path_muon, - 'channel':'muon'}, - ] + { + 'file' : config.unfolding_central_raw, + 'CoM' : 13, + 'path' : path_electron, + 'channel' :'electron' + }, + { + 'file' : config.unfolding_central_raw, + 'CoM' : 13, + 'path' : path_muon, + 'channel' :'muon' + }, + ] if args.combined: histogram_information = [ - {'file': config.unfolding_central_raw, - 'CoM': 13, - 'path': path_combined, - 'channel':'combined'}, - ] + { + 'file' : config.unfolding_central_raw, + 'CoM' : 13, + 'path' : path_combined, + 'channel' : 'combined' + }, + ] for histogram in histogram_information: - f = File( histogram['file'] ) - # scale to lumi - # nEvents = f.EventFilter.EventCounter.GetBinContent( 1 ) # number of processed events - # config = XSectionConfig( histogram['CoM'] ) - # lumiweight = config.ttbar_xsection * config.new_luminosity / nEvents - lumiweight = 1 - + f = File( histogram['file'] ) histogram['hist'] = f.Get( histogram['path'] ).Clone() + + # scale to current lumi + lumiweight = config.luminosity_scale + if round(lumiweight, 1) != 1.0: + print( "Scaling to {}".format(lumiweight) ) histogram['hist'].Scale( lumiweight ) + # change scope from file to memory histogram['hist'].SetDirectory( 0 ) f.close() @@ -218,76 +233,90 @@ def get_histograms( variable, args ): return histogram_information - def get_best_binning( histogram_information, p_min, s_min, n_min, min_width, x_min = None, is_NJet=False ): ''' Step 1: Change the size of the first bin until it fulfils the minimal criteria - Step 3: Check if it is true for all other histograms. If not back to step 2 + Step 3: Check if it is true for other channel histograms. If not back to step 2 Step 4: Repeat step 2 & 3 until no more bins can be created ''' - histograms = [info['hist'] for info in histogram_information] - bin_edges = [] + histograms = [info['hist'] for info in histogram_information] + bin_edges = [] resolutions = [] - purities = {} + purities = {} stabilities = {} current_bin_start = 0 current_bin_end = 0 first_hist = histograms[0] - n_bins = first_hist.GetNbinsX() + n_bins = first_hist.GetNbinsX() + + # Start at minimum x instead of 0 if x_min: current_bin_start = first_hist.ProjectionX().FindBin(x_min) - 1 current_bin_end = current_bin_start + # Calculate the bin edges until no more bins can be iterated over while current_bin_end < n_bins: - # bin_End, p, s, N_reco + # Return the next bin end + (p, s, N_reco, res) current_bin_end, _, _, _, r = get_next_end( histograms, current_bin_start, current_bin_end, p_min, s_min, n_min, min_width, is_NJet=is_NJet ) resolutions.append(r) + + # Attach first bin low edge if not bin_edges: - # if empty bin_edges.append( first_hist.GetXaxis().GetBinLowEdge( current_bin_start + 1 ) ) + # Attachs the current bin end edge bin_edges.append( first_hist.GetXaxis().GetBinLowEdge( current_bin_end ) + first_hist.GetXaxis().GetBinWidth( current_bin_end ) ) current_bin_start = current_bin_end + # add the purity and stability values for the final binning - for info in histogram_information: - new_hist = rebin_2d( info['hist'], bin_edges, bin_edges ).Clone( info['channel'] + '_' + str( info['CoM'] ) ) - get_bin_content = new_hist.ProjectionX().GetBinContent - purities = calculate_purities( new_hist.Clone() ) - stabilities = calculate_stabilities( new_hist.Clone() ) - n_events = [int( get_bin_content( i ) ) for i in range( 1, len( bin_edges ) )] + for hist_info in histogram_information: + new_hist = rebin_2d( hist_info['hist'], bin_edges, bin_edges ).Clone( hist_info['channel'] + '_' + str( hist_info['CoM'] ) ) + get_bin_content = new_hist.ProjectionX().GetBinContent + purities = calculate_purities( new_hist.Clone() ) + stabilities = calculate_stabilities( new_hist.Clone() ) + n_events = [int( get_bin_content( i ) ) for i in range( 1, len( bin_edges ) )] + # Now check if the last bin also fulfils the requirements if ( purities[-1] < p_min or stabilities[-1] < s_min or n_events[-1] < n_min ) and len(purities) > 3: - # if not, merge last two bins - bin_edges[-2] = bin_edges[-1] - bin_edges = bin_edges[:-1] - new_hist = rebin_2d( info['hist'], bin_edges, bin_edges ).Clone() - get_bin_content = new_hist.ProjectionX().GetBinContent - purities = calculate_purities( new_hist.Clone() ) - stabilities = calculate_stabilities( new_hist.Clone() ) - n_events = [int( get_bin_content( i ) ) for i in range( 1, len( bin_edges ) )] - - info['p_i'] = purities - info['s_i'] = stabilities - info['N'] = n_events - info['res'] = resolutions + # Merge last two bins + bin_edges[-2] = bin_edges[-1] + bin_edges = bin_edges[:-1] + # Merge the resolutions in the last bins + resolutions[-2] = (resolutions[-2]+resolutions[-1]) / 2 + resolutions = resolutions[:-1] + # Recalculate purities and stabilites + new_hist = rebin_2d( hist_info['hist'], bin_edges, bin_edges ).Clone() + purities = calculate_purities( new_hist.Clone() ) + stabilities = calculate_stabilities( new_hist.Clone() ) + n_events = [int( get_bin_content( i ) ) for i in range( 1, len( bin_edges ) )] + + # Add purites, stabilities, n_events and resolutions to the hstogram information + hist_info['p_i'] = purities + hist_info['s_i'] = stabilities + hist_info['N'] = n_events + hist_info['res'] = resolutions return bin_edges, histogram_information def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width, is_NJet=False ): + ''' + Getting the next bin end + ''' current_bin_start = bin_start current_bin_end = bin_end + for gen_vs_reco_histogram in histograms: reco = asrootpy( gen_vs_reco_histogram.ProjectionX() ) - gen = asrootpy( gen_vs_reco_histogram.ProjectionY( 'py', 1 ) ) + gen = asrootpy( gen_vs_reco_histogram.ProjectionY( 'py', 1 ) ) reco_i = list( reco.y() ) - gen_i = list( gen.y() ) + gen_i = list( gen.y() ) # keep the start bin the same but roll the end bin for bin_i in range ( current_bin_end, len( reco_i ) + 1 ): x_high = reco.GetXaxis().GetBinLowEdge(bin_i) - x_mid = reco.GetXaxis().GetBinCenter(int( (current_bin_start+current_bin_end)/2 ) ) - x_low = reco.GetXaxis().GetBinUpEdge(current_bin_start) + x_mid = reco.GetXaxis().GetBinCenter(int( (current_bin_start+current_bin_end)/2 ) ) + x_low = reco.GetXaxis().GetBinUpEdge(current_bin_start) binWidth = x_high - x_low if binWidth < min_width: @@ -295,7 +324,7 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width continue n_reco = sum( reco_i[current_bin_start:bin_i] ) - n_gen = sum( gen_i[current_bin_start:bin_i] ) + n_gen = sum( gen_i[current_bin_start:bin_i] ) n_gen_and_reco = 0 if bin_i < current_bin_start + 1: @@ -312,16 +341,9 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width if n_gen > 0: s = round( n_gen_and_reco / n_gen, 3 ) - # find the bin range that matches - # print('New bin : ',current_bin_start,current_bin_end,p,s - if p >= p_min and s >= s_min and n_reco >= n_min: # Now that purity and stability are statisfied... What about the resolution? - # Find slices of X and Y between bin edges and fit them with a Gaussian. - # The StdDev of Gaussian = Resolution. - # If Resolution < Bin width then we are all good - # Initiate res # Dont use resolution information on NJets if is_NJet: @@ -337,6 +359,7 @@ def get_next_end( histograms, bin_start, bin_end, p_min, s_min, n_min, min_width # if it gets to the end, this is the best we can do current_bin_end = bin_i + # And now for the next channel starting with current_bin_end. return current_bin_end, p, s, n_reco, res def print_console(info, old_purities, old_stabilities, print_old = False): @@ -352,8 +375,8 @@ def print_console(info, old_purities, old_stabilities, print_old = False): def print_latex_table( info, variable, best_binning ): print('CoM =', info['CoM'], 'channel =', info['channel']) - header = """\%s bin (\GeV) & purity & stability & number of events\\\\ - \hline""" % variable.lower() + header = """\{var} bin (\GeV) & purity & stability & resolution & number of events\\\\ + \hline""".format(var=variable) print(header) firstBin = 0 lastBin = len( best_binning ) - 1 @@ -365,8 +388,10 @@ def print_latex_table( info, variable, best_binning ): if i == len( best_binning ) - 2: bin_range = '$\geq %d$' % best_binning[i] else: - bin_range = '%d - %d' % ( best_binning[i], best_binning[i + 1] ) - print('%s & %.3f & %.3f & %d\\\\' % (bin_range, info['p_i'][i], info['s_i'][i], info['N'][i])) + bin_range = '{start} - {end}'.format(start=best_binning[i],end=best_binning[i + 1] ) + if 'abs_lepton_eta' in variable: + bin_range = '{start} - {end}'.format(start=best_binning[i],end=best_binning[i + 1] ) + print('%s & %.3f & %.3f & %.3f & %d\\\\' % (bin_range, info['p_i'][i], info['s_i'][i], info['res'][i], info['N'][i])) print('\hline') if __name__ == '__main__': From 96d9ed8f062d3b330d1ba2aa1d4f44bee7ecba06 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 13:16:48 +0000 Subject: [PATCH 50/90] Add symmetric arg for tuple to graph (v,e)->graph as well as (v,e,e)->graph --- dps/utils/hist_utilities.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/dps/utils/hist_utilities.py b/dps/utils/hist_utilities.py index 03db4003..cf01b729 100644 --- a/dps/utils/hist_utilities.py +++ b/dps/utils/hist_utilities.py @@ -33,17 +33,27 @@ def values_and_errors_to_hist( values, errors, bins ): value_error_tuplelist = zip( values, errors ) return value_error_tuplelist_to_hist( value_error_tuplelist, bins ) -def value_errors_tuplelist_to_graph( value_errors_tuplelist, bin_edges ): - value_error_tuplelist = [( value, 0 ) for value, lower_error, upper_error in value_errors_tuplelist] +def value_errors_tuplelist_to_graph( value_errors_tuplelist, bin_edges, is_symmetric_errors=False ): + value_error_tuplelist = [] + if is_symmetric_errors: + value_error_tuplelist = [( value, 0 ) for value, error in value_errors_tuplelist] + else: + value_error_tuplelist = [( value, 0 ) for value, lower_error, upper_error in value_errors_tuplelist] + hist = value_error_tuplelist_to_hist( value_error_tuplelist, bin_edges ) rootpy_graph = asrootpy( TGraphAsymmErrors( hist ) ) -# rootpy_graph = Graph(hist = hist) + set_lower_error = rootpy_graph.SetPointEYlow set_upper_error = rootpy_graph.SetPointEYhigh - for point_i, ( value, lower_error, upper_error ) in enumerate( value_errors_tuplelist ): - set_lower_error( point_i, lower_error ) - set_upper_error( point_i, upper_error ) + if is_symmetric_errors: + for point_i, ( value, error ) in enumerate( value_errors_tuplelist ): + set_lower_error( point_i, error ) + set_upper_error( point_i, error ) + else: + for point_i, ( value, lower_error, upper_error ) in enumerate( value_errors_tuplelist ): + set_lower_error( point_i, lower_error ) + set_upper_error( point_i, upper_error ) return rootpy_graph From 1ac44b58439a8c4ba3e59990561339f74f7ab4f7 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 13:17:47 +0000 Subject: [PATCH 51/90] 04 to work of pandas DF. --- .../xsection/04_make_plots_matplotlib.py | 137 +++++++++--------- 1 file changed, 67 insertions(+), 70 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index c0d379a2..898de786 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -4,11 +4,11 @@ import os, gc from copy import deepcopy -from dps.config.latex_labels import variables_latex, measurements_latex, fit_variables_latex -from dps.config.variable_binning import bin_edges_full, variable_bins_ROOT, variable_bins_visiblePS_ROOT, fit_variable_bin_edges,\ - bin_edges_vis +from dps.config.latex_labels import variables_latex, measurements_latex +from dps.config.variable_binning import bin_edges_full, bin_edges_vis from dps.config.xsection import XSectionConfig -from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.file_utilities import make_folder_if_not_exists, read_data_from_JSON +from dps.utils.pandas_utilities import read_tuple_from_file, file_to_df, tupleise_cols from dps.utils.hist_utilities import value_error_tuplelist_to_hist, \ value_tuplelist_to_hist, value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist from math import sqrt @@ -18,7 +18,6 @@ import matplotlib as mpl from matplotlib import rc -from dps.utils.plotting import get_best_max_y mpl.use( 'agg' ) import rootpy.plotting.root2matplotlib as rplt import matplotlib.pyplot as plt @@ -39,7 +38,7 @@ def read_xsection_measurement_results( category, channel ): global path_to_JSON, variable, met_type, phase_space, method - file_template = '{path}/{category}/{name}_{channel}_{method}{suffix}.txt' + file_template = 'TESTING/{path}/{category}/{name}_{channel}_{method}{suffix}.txt' filename = file_template.format( path = path_to_JSON, category = category, @@ -50,7 +49,7 @@ def read_xsection_measurement_results( category, channel ): ) xsec_04_log.debug('Reading file {0}'.format(filename)) - normalised_xsection_unfolded = read_data_from_JSON( filename ) + normalised_xsection_unfolded = read_tuple_from_file( filename ) edges = bin_edges_full[variable] if phase_space == 'VisiblePS': edges = bin_edges_vis[variable] @@ -66,42 +65,55 @@ def read_xsection_measurement_results( category, channel ): if category == 'central': # true distributions - h_normalised_xsection_powhegPythia8 = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegPythia8'], edges ) - # h_normalised_xsection_amcatnlo = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnlo'], edges ) - # h_normalised_xsection_madgraphMLM = value_error_tuplelist_to_hist( normalised_xsection_unfolded['madgraphMLM'], edges ) - h_normalised_xsection_powhegHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegHerwig'], edges ) + h_normalised_xsection_powhegPythia8 = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegPythia8'], edges ) + h_normalised_xsection_amcatnlo = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnlo'], edges ) + h_normalised_xsection_madgraphMLM = value_error_tuplelist_to_hist( normalised_xsection_unfolded['madgraphMLM'], edges ) + h_normalised_xsection_powhegHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegHerwig'], edges ) # h_normalised_xsection_amcatnloHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnloHerwig'], edges ) - # h_normalised_xsection_scaleup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaleup'], edges ) - # h_normalised_xsection_scaledown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaledown'], edges ) - h_normalised_xsection_massup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massup'], edges ) - h_normalised_xsection_massdown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massdown'], edges ) - - histograms_normalised_xsection_different_generators.update( { - 'powhegPythia8':h_normalised_xsection_powhegPythia8, - # 'amcatnloPythia8':h_normalised_xsection_amcatnlo, - # 'madgraphMLM':h_normalised_xsection_madgraphMLM, - 'powhegHerwig':h_normalised_xsection_powhegHerwigpp, - # 'amcatnloHerwig':h_normalised_xsection_amcatnloHerwigpp, - }) - - histograms_normalised_xsection_systematics_shifts.update( {'powhegPythia8':h_normalised_xsection_powhegPythia8, - # 'scaledown': h_normalised_xsection_scaledown, - # 'scaleup': h_normalised_xsection_scaleup, - 'massdown': h_normalised_xsection_massdown, - 'massup': h_normalised_xsection_massup - }) + # h_normalised_xsection_scaleup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaleup'], edges ) + # h_normalised_xsection_scaledown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaledown'], edges ) + h_normalised_xsection_massup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massup'], edges ) + h_normalised_xsection_massdown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massdown'], edges ) + + histograms_normalised_xsection_different_generators.update( + { + 'powhegPythia8':h_normalised_xsection_powhegPythia8, + 'amcatnloPythia8':h_normalised_xsection_amcatnlo, + 'madgraphMLM':h_normalised_xsection_madgraphMLM, + 'powhegHerwig':h_normalised_xsection_powhegHerwigpp, + # 'amcatnloHerwig':h_normalised_xsection_amcatnloHerwigpp, + } + ) + + histograms_normalised_xsection_systematics_shifts.update( + { + 'powhegPythia8':h_normalised_xsection_powhegPythia8, + # 'scaledown': h_normalised_xsection_scaledown, + # 'scaleup': h_normalised_xsection_scaleup, + 'massdown': h_normalised_xsection_massdown, + 'massup': h_normalised_xsection_massup + } + ) filename = file_template.format( - path = path_to_JSON, - category = category, - name = 'xsection_normalised', - channel = channel, - method = method, - suffix = '_with_errors', - ) + path = path_to_JSON, + category = category, + name = 'xsection_normalised', + channel = channel, + method = method, + suffix = '_summary_absolute', + ) + + normalised_xsection_unfolded_with_errors = file_to_df( filename ) + normalised_xsection_unfolded_with_errors['TTJet_unfolded'] = tupleise_cols( + normalised_xsection_unfolded_with_errors['central'], + normalised_xsection_unfolded_with_errors['systematic'], + ) + print(normalised_xsection_unfolded_with_errors['TTJet_unfolded']) + + - normalised_xsection_unfolded_with_errors = read_data_from_JSON( filename ) xsec_04_log.debug('Reading file {0}'.format(filename)) # filename = file_template.format( # path = path_to_JSON, @@ -111,7 +123,7 @@ def read_xsection_measurement_results( category, channel ): # method = method, # suffix = '_with_systematics_but_without_generator_errors', # ) - ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory = read_data_from_JSON( file_template + '_with_systematics_but_without_ttbar_theory_errors.txt' ) + ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory = read_tuple_from_file( file_template + '_with_systematics_but_without_ttbar_theory_errors.txt' ) # normalised_xsection_unfolded_with_errors_with_systematics_but_without_generator = normalised_xsection_unfolded_with_errors # a rootpy.Graph with asymmetric errors! @@ -122,20 +134,22 @@ def read_xsection_measurement_results( category, channel ): ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory['TTJet_unfolded'], ### edges ) - h_normalised_xsection_unfolded_with_errors = value_errors_tuplelist_to_graph( - normalised_xsection_unfolded_with_errors['TTJet_measured'], - edges ) + # h_normalised_xsection_unfolded_with_errors = value_errors_tuplelist_to_graph( + # normalised_xsection_unfolded_with_errors['TTJet_measured'], + # edges ) h_normalised_xsection_unfolded_with_errors_unfolded = value_errors_tuplelist_to_graph( - normalised_xsection_unfolded_with_errors['TTJet_unfolded'], - edges ) + normalised_xsection_unfolded_with_errors['TTJet_unfolded'], + edges, + is_symmetric_errors=True + ) # histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory # histograms_normalised_xsection_different_generators['unfolded_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory_unfolded - histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors + # histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors histograms_normalised_xsection_different_generators['unfolded_with_systematics'] = h_normalised_xsection_unfolded_with_errors_unfolded - histograms_normalised_xsection_systematics_shifts['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors + # histograms_normalised_xsection_systematics_shifts['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors histograms_normalised_xsection_systematics_shifts['unfolded_with_systematics'] = h_normalised_xsection_unfolded_with_errors_unfolded return histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts @@ -654,12 +668,8 @@ def get_unit_string(fit_variable): measurement_config = XSectionConfig( options.CoM ) # caching of variables for shorter access - translate_options = measurement_config.translate_options - ttbar_theory_systematic_prefix = measurement_config.ttbar_theory_systematic_prefix - vjets_theory_systematic_prefix = measurement_config.vjets_theory_systematic_prefix - met_systematics = measurement_config.met_systematics method = options.unfolding_method - + translate_options = measurement_config.translate_options variable = options.variable show_generator_ratio = options.show_generator_ratio visiblePS = options.visiblePS @@ -679,25 +689,12 @@ def get_unit_string(fit_variable): ) # path_to_JSON = options.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' - categories = deepcopy( measurement_config.categories_and_prefixes.keys() ) - ttbar_generator_systematics = [ttbar_theory_systematic_prefix + systematic for systematic in measurement_config.generator_systematics] - vjets_generator_systematics = [vjets_theory_systematic_prefix + systematic for systematic in measurement_config.generator_systematics] - categories.extend( ttbar_generator_systematics ) - # categories.extend( vjets_generator_systematics ) - - pdf_uncertainties = ['PDFWeights_%d' % index for index in range( measurement_config.pdfWeightMin, measurement_config.pdfWeightMax )] - # # all MET uncertainties except JES as this is already included - # met_uncertainties = [met_type + suffix for suffix in met_systematics if not 'JetEn' in suffix and not 'JetRes' in suffix] - # new_uncertainties = ['QCD_shape'] - rate_changing_systematics = [systematic for systematic in measurement_config.rate_changing_systematics.keys()] - - all_measurements = deepcopy( categories ) - # all_measurements.extend( pdf_uncertainties ) - # all_measurements.extend( met_uncertainties ) - # all_measurements.extend( new_uncertainties ) - all_measurements.extend( rate_changing_systematics ) + all_measurements = deepcopy( measurement_config.measurements ) + pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 1, 45 )] + all_measurements.extend( pdf_uncertainties ) + # for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - for channel in ['combinedBeforeUnfolding', 'combined']: + for channel in ['muon']: for category in all_measurements: if not category == 'central' and not options.additional_plots: continue @@ -737,7 +734,7 @@ def get_unit_string(fit_variable): del histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts if options.additional_plots: - plot_central_and_systematics( channel, categories, exclude = ttbar_generator_systematics ) + plot_central_and_systematics( channel, measurements, exclude = ttbar_generator_systematics ) plot_central_and_systematics( channel, ttbar_generator_systematics, suffix = 'ttbar_generator_only' ) From 484d5ef8cebfb470c460d49a2864e27c082ce809 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 13:18:47 +0000 Subject: [PATCH 52/90] path_to_JSON->path_to_DF --- .../xsection/04_make_plots_matplotlib.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 898de786..2b386687 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -7,7 +7,7 @@ from dps.config.latex_labels import variables_latex, measurements_latex from dps.config.variable_binning import bin_edges_full, bin_edges_vis from dps.config.xsection import XSectionConfig -from dps.utils.file_utilities import make_folder_if_not_exists, read_data_from_JSON +from dps.utils.file_utilities import make_folder_if_not_exists from dps.utils.pandas_utilities import read_tuple_from_file, file_to_df, tupleise_cols from dps.utils.hist_utilities import value_error_tuplelist_to_hist, \ value_tuplelist_to_hist, value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist @@ -36,11 +36,11 @@ @xsec_04_log.trace() def read_xsection_measurement_results( category, channel ): - global path_to_JSON, variable, met_type, phase_space, method + global path_to_DF, variable, met_type, phase_space, method file_template = 'TESTING/{path}/{category}/{name}_{channel}_{method}{suffix}.txt' filename = file_template.format( - path = path_to_JSON, + path = path_to_DF, category = category, name = 'xsection_normalised', channel = channel, @@ -97,7 +97,7 @@ def read_xsection_measurement_results( category, channel ): ) filename = file_template.format( - path = path_to_JSON, + path = path_to_DF, category = category, name = 'xsection_normalised', channel = channel, @@ -116,7 +116,7 @@ def read_xsection_measurement_results( category, channel ): xsec_04_log.debug('Reading file {0}'.format(filename)) # filename = file_template.format( -# path = path_to_JSON, +# path = path_to_DF, # category = category, # name = 'normalised_xsection', # channel = channel, @@ -682,12 +682,12 @@ def get_unit_string(fit_variable): output_folder += '/' met_type = translate_options[options.metType] b_tag_bin = translate_options[options.bjetbin] - path_to_JSON = '{path}/{com}TeV/{variable}/{phase_space}/' - path_to_JSON = path_to_JSON.format(path = options.path, com = options.CoM, + path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/' + path_to_DF = path_to_DF.format(path = options.path, com = options.CoM, variable = variable, phase_space = phase_space, ) -# path_to_JSON = options.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' +# path_to_DF = options.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' all_measurements = deepcopy( measurement_config.measurements ) pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 1, 45 )] From 06d610102bca53897d45e0a9919f1bf36dc4fc85 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 13:28:11 +0000 Subject: [PATCH 53/90] mvoe argparser to function and update --- .../xsection/04_make_plots_matplotlib.py | 181 +++++++++++------- 1 file changed, 111 insertions(+), 70 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 2b386687..6bf912d8 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -1,6 +1,6 @@ # the result of the division will be always a float from __future__ import division, print_function -from optparse import OptionParser +from argparse import ArgumentParser import os, gc from copy import deepcopy @@ -632,62 +632,99 @@ def get_unit_string(fit_variable): return unit_string + +def parse_arguments(): + parser = ArgumentParser() + parser.add_argument( "-p", "--path", + dest = "path", + default = 'data/normalisation/background_subtraction/', + help = "set path to files containing dataframes" + ) + parser.add_argument( "-o", "--output_folder", + dest = "output_folder", + default = 'plots/', + help = "set path to save plots" + ) + parser.add_argument( "-v", "--variable", + dest = "variable", + default = 'MET', + help = "set variable to plot (MET, HT, ST, WPT, NJets, lepton_pt, abs_lepton_eta )" + ) + parser.add_argument( "-m", "--metType", + dest = "metType", + default = 'type1', + help = "set MET type used in the analysis of MET, ST or MT" + ) + parser.add_argument( "-b", "--bjetbin", + dest = "bjetbin", + default = '2m', + help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" + ) + parser.add_argument( "-c", "--centre-of-mass-energy", + dest = "CoM", + default = 13, + type = int, + help = "set the centre of mass energy for analysis. Default = 13 [TeV]" + ) + parser.add_argument( "-a", "--additional-plots", + action = "store_true", + dest = "additional_plots", + help = "Draws additional plots like the comparison of different systematics to the central result." + ) + parser.add_argument( "-g", "--show-generator-ratio", + action = "store_true", + dest = "show_generator_ratio", + help = "Show the ratio of generators to central" + ) + parser.add_argument( "-d", "--debug", + action = "store_true", + dest = "debug", + help = "Enables debugging output" + ) + parser.add_argument( '--visiblePS', + dest = "visiblePS", + action = "store_true", + help = "Unfold to visible phase space" + ) + parser.add_argument( "-u", "--unfolding_method", + dest = "unfolding_method", + default = 'TUnfold', + help = "Unfolding method: TUnfold (default)" + ) + + args = parser.parse_args() + return args + if __name__ == '__main__': set_root_defaults() - parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/M3_angle_bl/', - help = "set path to JSON files" ) - parser.add_option( "-o", "--output_folder", dest = "output_folder", default = 'plots/', - help = "set path to save plots" ) - parser.add_option( "-v", "--variable", dest = "variable", default = 'MET', - help = "set variable to plot (MET, HT, ST, MT)" ) - parser.add_option( "-m", "--metType", dest = "metType", default = 'type1', - help = "set MET type used in the analysis of MET, ST or MT" ) - parser.add_option( "-b", "--bjetbin", dest = "bjetbin", default = '2m', - help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" ) - parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, type = int, - help = "set the centre of mass energy for analysis. Default = 13 [TeV]" ) - parser.add_option( "-a", "--additional-plots", action = "store_true", dest = "additional_plots", - help = """Draws additional plots like the comparison of different - systematics to the central result.""" ) - parser.add_option( "-g", "--show-generator-ratio", action = "store_true", dest = "show_generator_ratio", - help = "Show the ratio of generators to central" ) - parser.add_option( "-d", "--debug", action = "store_true", dest = "debug", - help = """Enables debugging output""" ) - parser.add_option("--draw-systematics", action = "store_true", dest = "draw_systematics", - help = "creates a set of plots for each systematic (in addition to central result)." ) - parser.add_option( '--visiblePS', dest = "visiblePS", action = "store_true", - help = "Unfold to visible phase space" ) - parser.add_option( "-u", "--unfolding_method", dest = "unfolding_method", default = 'TUnfold', - help = "Unfolding method: TUnfold (default), RooUnfoldSvd, TSVDUnfold, RooUnfoldTUnfold, RooUnfoldInvert, RooUnfoldBinByBin, RooUnfoldBayes" ) + args = parse_arguments() - output_formats = ['pdf'] - ( options, args ) = parser.parse_args() - if options.debug: + if args.debug: log.setLevel(log.DEBUG) - measurement_config = XSectionConfig( options.CoM ) + output_formats = ['pdf'] + measurement_config = XSectionConfig( args.CoM ) # caching of variables for shorter access - method = options.unfolding_method + method = args.unfolding_method translate_options = measurement_config.translate_options - variable = options.variable - show_generator_ratio = options.show_generator_ratio - visiblePS = options.visiblePS + variable = args.variable + show_generator_ratio = args.show_generator_ratio + visiblePS = args.visiblePS phase_space = 'FullPS' if visiblePS: phase_space = 'VisiblePS' - output_folder = options.output_folder + output_folder = args.output_folder if not output_folder.endswith( '/' ): output_folder += '/' - met_type = translate_options[options.metType] - b_tag_bin = translate_options[options.bjetbin] + met_type = translate_options[args.metType] + b_tag_bin = translate_options[args.bjetbin] path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/' - path_to_DF = path_to_DF.format(path = options.path, com = options.CoM, + path_to_DF = path_to_DF.format(path = args.path, com = args.CoM, variable = variable, phase_space = phase_space, ) -# path_to_DF = options.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' +# path_to_DF = args.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' all_measurements = deepcopy( measurement_config.measurements ) pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 1, 45 )] @@ -697,12 +734,12 @@ def get_unit_string(fit_variable): for channel in ['muon']: for category in all_measurements: - if not category == 'central' and not options.additional_plots: continue + if not category == 'central' and not args.additional_plots: continue # if variable == 'HT' and category in met_uncertainties: # continue # setting up systematic MET for JES up/down samples for reading fit templates - met_type = translate_options[options.metType] + met_type = translate_options[args.metType] if category == 'JES_up': met_type += 'JetEnUp' elif category == 'JES_down': @@ -718,38 +755,42 @@ def get_unit_string(fit_variable): # plot_fit_results( fit_results, category, channel ) # change back to original MET type - met_type = translate_options[options.metType] + met_type = translate_options[args.metType] if met_type == 'PFMET': met_type = 'patMETsPFlow' histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts = read_xsection_measurement_results( category, channel ) - histname = '{variable}_normalised_xsection_{channel}_{phase_space}' - histname = histname.format(variable = variable, channel = channel, - phase_space = phase_space) - if method != 'RooUnfoldSvd': - histname += '_' + method - make_plots( histograms_normalised_xsection_different_generators, category, output_folder, histname + '_different_generators', show_generator_ratio = show_generator_ratio ) - make_plots( histograms_normalised_xsection_systematics_shifts, category, output_folder, histname + '_systematics_shifts' ) + histname = '{variable}_normalised_xsection_{channel}_{phase_space}_{method}' + histname = histname.format( + variable = variable, + channel = channel, + phase_space = phase_space, + method = method + ) + + make_plots( + histograms_normalised_xsection_different_generators, + category, + output_folder, + histname + '_different_generators', + show_generator_ratio = show_generator_ratio + ) + make_plots( + histograms_normalised_xsection_systematics_shifts, + category, + output_folder, + histname + '_systematics_shifts' + ) del histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts - if options.additional_plots: - plot_central_and_systematics( channel, measurements, exclude = ttbar_generator_systematics ) - - plot_central_and_systematics( channel, ttbar_generator_systematics, suffix = 'ttbar_generator_only' ) - -# exclude = set( pdf_uncertainties ).difference( set( pdf_uncertainties_1_to_11 ) ) -# plot_central_and_systematics( channel, pdf_uncertainties_1_to_11, exclude = exclude, suffix = 'PDF_1_to_11' ) -# -# exclude = set( pdf_uncertainties ).difference( set( pdf_uncertainties_12_to_22 ) ) -# plot_central_and_systematics( channel, pdf_uncertainties_12_to_22, exclude = exclude, suffix = 'PDF_12_to_22' ) -# -# exclude = set( pdf_uncertainties ).difference( set( pdf_uncertainties_23_to_33 ) ) -# plot_central_and_systematics( channel, pdf_uncertainties_23_to_33, exclude = exclude, suffix = 'PDF_23_to_33' ) -# -# exclude = set( pdf_uncertainties ).difference( set( pdf_uncertainties_34_to_45 ) ) -# plot_central_and_systematics( channel, pdf_uncertainties_34_to_45, exclude = exclude, suffix = 'PDF_34_to_45' ) -# -# plot_central_and_systematics( channel, met_uncertainties, suffix = 'MET_only' ) -# plot_central_and_systematics( channel, new_uncertainties, suffix = 'new_only' ) - plot_central_and_systematics( channel, rate_changing_systematics, suffix = 'rate_changing_only' ) + # if args.additional_plots: + # TODO + # Generator Only + # PDF Only + # MET Only + # Rate Changing Only + # etc... + # plot_central_and_systematics( channel, measurements, exclude = ttbar_generator_systematics ) + # plot_central_and_systematics( channel, ttbar_generator_systematics, suffix = 'ttbar_generator_only' ) + # plot_central_and_systematics( channel, rate_changing_systematics, suffix = 'rate_changing_only' ) From fd3d8e5edb8f8b4f1db74549892eba2ad98aff41 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 13:36:26 +0000 Subject: [PATCH 54/90] remove btagbin for now --- dps/analysis/xsection/04_make_plots_matplotlib.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 6bf912d8..87609ec3 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -156,7 +156,6 @@ def read_xsection_measurement_results( category, channel ): @xsec_04_log.trace() def get_cms_labels( channel ): - global b_tag_bin lepton = 'e' if channel == 'electron': lepton = 'e + jets' @@ -164,7 +163,6 @@ def get_cms_labels( channel ): lepton = '$\mu$ + jets' else: lepton = 'e, $\mu$ + jets combined' -# channel_label = '%s, $\geq$ 4 jets, %s' % ( lepton, b_tag_bins_latex[b_tag_bin] ) channel_label = lepton template = '%.1f fb$^{-1}$ (%d TeV)' label = template % ( measurement_config.new_luminosity/1000, measurement_config.centre_of_mass_energy) @@ -556,7 +554,7 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True @xsec_04_log.trace() def plot_central_and_systematics( channel, systematics, exclude = [], suffix = 'altogether' ): - global variable, b_tag_bin, met_type + global variable, met_type plt.figure( figsize = ( 16, 16 ), dpi = 200, facecolor = 'white' ) axes = plt.axes() @@ -655,11 +653,6 @@ def parse_arguments(): default = 'type1', help = "set MET type used in the analysis of MET, ST or MT" ) - parser.add_argument( "-b", "--bjetbin", - dest = "bjetbin", - default = '2m', - help = "set b-jet multiplicity for analysis. Options: exclusive: 0-3, inclusive (N or more): 0m, 1m, 2m, 3m, 4m" - ) parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, @@ -718,7 +711,6 @@ def parse_arguments(): if not output_folder.endswith( '/' ): output_folder += '/' met_type = translate_options[args.metType] - b_tag_bin = translate_options[args.bjetbin] path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/' path_to_DF = path_to_DF.format(path = args.path, com = args.CoM, variable = variable, From 5fcf34c649d8271c2d3ba7dc8198c480852e9077 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 15:12:09 +0000 Subject: [PATCH 55/90] Add comments --- .../xsection/04_make_plots_matplotlib.py | 607 +++++++++--------- 1 file changed, 314 insertions(+), 293 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 87609ec3..c49c20ef 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -2,6 +2,7 @@ from __future__ import division, print_function from argparse import ArgumentParser import os, gc +import sys from copy import deepcopy from dps.config.latex_labels import variables_latex, measurements_latex @@ -36,63 +37,63 @@ @xsec_04_log.trace() def read_xsection_measurement_results( category, channel ): - global path_to_DF, variable, met_type, phase_space, method + ''' + Reading the unfolded xsection results from DFs into graphs + ''' + global path_to_DF, variable, phase_space, method file_template = 'TESTING/{path}/{category}/{name}_{channel}_{method}{suffix}.txt' filename = file_template.format( - path = path_to_DF, - category = category, - name = 'xsection_normalised', - channel = channel, - method = method, - suffix = '', - ) + path = path_to_DF, + category = category, + name = 'xsection_normalised', + channel = channel, + method = method, + suffix = '', + ) xsec_04_log.debug('Reading file {0}'.format(filename)) - normalised_xsection_unfolded = read_tuple_from_file( filename ) + edges = bin_edges_full[variable] if phase_space == 'VisiblePS': edges = bin_edges_vis[variable] - h_normalised_xsection = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_measured'], edges ) - h_normalised_xsection_unfolded = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_unfolded'], edges ) - - histograms_normalised_xsection_different_generators = {'measured':h_normalised_xsection, - 'unfolded':h_normalised_xsection_unfolded} + # Collect the cross section measured/unfolded results from dataframes + normalised_xsection_unfolded = read_tuple_from_file( filename ) + h_normalised_xsection = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_measured'], edges ) + h_normalised_xsection_unfolded = value_error_tuplelist_to_hist( normalised_xsection_unfolded['TTJet_unfolded'], edges ) - histograms_normalised_xsection_systematics_shifts = {'measured':h_normalised_xsection, - 'unfolded':h_normalised_xsection_unfolded} + histograms_normalised_xsection_different_generators = { + 'measured':h_normalised_xsection, + 'unfolded':h_normalised_xsection_unfolded, + } + histograms_normalised_xsection_systematics_shifts = deepcopy( histograms_normalised_xsection_different_generators ) if category == 'central': - # true distributions + + # Add in distributions for the different MC to be shown h_normalised_xsection_powhegPythia8 = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegPythia8'], edges ) h_normalised_xsection_amcatnlo = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnlo'], edges ) h_normalised_xsection_madgraphMLM = value_error_tuplelist_to_hist( normalised_xsection_unfolded['madgraphMLM'], edges ) h_normalised_xsection_powhegHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegHerwig'], edges ) - # h_normalised_xsection_amcatnloHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnloHerwig'], edges ) - # h_normalised_xsection_scaleup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaleup'], edges ) - # h_normalised_xsection_scaledown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['scaledown'], edges ) h_normalised_xsection_massup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massup'], edges ) h_normalised_xsection_massdown = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massdown'], edges ) + # And update histograms_normalised_xsection_different_generators.update( { - 'powhegPythia8':h_normalised_xsection_powhegPythia8, - 'amcatnloPythia8':h_normalised_xsection_amcatnlo, - 'madgraphMLM':h_normalised_xsection_madgraphMLM, - 'powhegHerwig':h_normalised_xsection_powhegHerwigpp, - # 'amcatnloHerwig':h_normalised_xsection_amcatnloHerwigpp, + 'powhegPythia8' : h_normalised_xsection_powhegPythia8, + 'amcatnloPythia8' : h_normalised_xsection_amcatnlo, + 'madgraphMLM' : h_normalised_xsection_madgraphMLM, + 'powhegHerwig' : h_normalised_xsection_powhegHerwigpp, } ) - histograms_normalised_xsection_systematics_shifts.update( { - 'powhegPythia8':h_normalised_xsection_powhegPythia8, - # 'scaledown': h_normalised_xsection_scaledown, - # 'scaleup': h_normalised_xsection_scaleup, - 'massdown': h_normalised_xsection_massdown, - 'massup': h_normalised_xsection_massup + 'powhegPythia8' : h_normalised_xsection_powhegPythia8, + 'massdown' : h_normalised_xsection_massdown, + 'massup' : h_normalised_xsection_massup } ) @@ -105,51 +106,25 @@ def read_xsection_measurement_results( category, channel ): suffix = '_summary_absolute', ) + # Now for the systematic uncertainties normalised_xsection_unfolded_with_errors = file_to_df( filename ) normalised_xsection_unfolded_with_errors['TTJet_unfolded'] = tupleise_cols( normalised_xsection_unfolded_with_errors['central'], normalised_xsection_unfolded_with_errors['systematic'], ) - print(normalised_xsection_unfolded_with_errors['TTJet_unfolded']) - xsec_04_log.debug('Reading file {0}'.format(filename)) -# filename = file_template.format( -# path = path_to_DF, -# category = category, -# name = 'normalised_xsection', -# channel = channel, -# method = method, -# suffix = '_with_systematics_but_without_generator_errors', -# ) - ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory = read_tuple_from_file( file_template + '_with_systematics_but_without_ttbar_theory_errors.txt' ) -# normalised_xsection_unfolded_with_errors_with_systematics_but_without_generator = normalised_xsection_unfolded_with_errors - - # a rootpy.Graph with asymmetric errors! - ### h_normalised_xsection_with_systematics_but_without_ttbar_theory = value_errors_tuplelist_to_graph( - ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory['TTJet_measured'], - ### edges ) - ### h_normalised_xsection_with_systematics_but_without_ttbar_theory_unfolded = value_errors_tuplelist_to_graph( - ### normalised_xsection_unfolded_with_errors_with_systematics_but_without_ttbar_theory['TTJet_unfolded'], - ### edges ) - - # h_normalised_xsection_unfolded_with_errors = value_errors_tuplelist_to_graph( - # normalised_xsection_unfolded_with_errors['TTJet_measured'], - # edges ) + + # Transform unfolded data into graph form h_normalised_xsection_unfolded_with_errors_unfolded = value_errors_tuplelist_to_graph( normalised_xsection_unfolded_with_errors['TTJet_unfolded'], edges, is_symmetric_errors=True ) - - # histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory - # histograms_normalised_xsection_different_generators['unfolded_with_systematics'] = h_normalised_xsection_with_systematics_but_without_ttbar_theory_unfolded - # histograms_normalised_xsection_different_generators['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors + # Add to list of histograms histograms_normalised_xsection_different_generators['unfolded_with_systematics'] = h_normalised_xsection_unfolded_with_errors_unfolded - - # histograms_normalised_xsection_systematics_shifts['measured_with_systematics'] = h_normalised_xsection_unfolded_with_errors histograms_normalised_xsection_systematics_shifts['unfolded_with_systematics'] = h_normalised_xsection_unfolded_with_errors_unfolded return histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts @@ -169,10 +144,14 @@ def get_cms_labels( channel ): return label, channel_label @xsec_04_log.trace() -def make_plots( histograms, category, output_folder, histname, show_ratio = True, show_generator_ratio = False, show_before_unfolding = False ): +def make_plots( histograms, category, output_folder, histname, show_ratio = False, show_generator_ratio = False, show_before_unfolding = False ): global variable, phase_space - channel = 'electron' + if show_generator_ratio and not show_ratio: + print("Cannot be done, Use both show_ratio and show_generator_ratio") + sys.exit() + + channel = '' if 'electron' in histname: channel = 'electron' elif 'muon' in histname: @@ -180,106 +159,126 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True else: channel = 'combined' - # plot with matplotlib + # Initailise data histograms hist_data = histograms['unfolded'] - if category == 'central': - hist_data_with_systematics = histograms['unfolded_with_systematics'] - hist_measured = histograms['measured'] - hist_data.markersize = 2 hist_data.marker = 'o' - if category == 'central': + hist_data_with_systematics = histograms['unfolded_with_systematics'] hist_data_with_systematics.markersize = 2 hist_data_with_systematics.marker = 'o' - hist_measured.markersize = 2 - hist_measured.marker = 'o' - hist_measured.color = 'red' - + # Create base figure to be plotted plt.figure( figsize = CMS.figsize, dpi = CMS.dpi, facecolor = CMS.facecolor ) + # Split into 3 for MC/Data ratio and generator ratio and plot if show_ratio and show_generator_ratio: gs = gridspec.GridSpec( 3, 1, height_ratios = [5, 1, 1] ) axes = plt.subplot( gs[0] ) + # Split into 2 for MC/Data ratio or generator Ratio and plot elif show_ratio or show_generator_ratio: gs = gridspec.GridSpec( 2, 1, height_ratios = [5, 1] ) axes = plt.subplot( gs[0] ) + # Just 1 for plot and setup x axis labels else: axes = plt.axes() - if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: - plt.xlabel( '$%s$' % variables_latex[variable], CMS.x_axis_title ) - else: - plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title ) - - if not variable in ['NJets']: - axes.minorticks_on() - if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: - plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '}$', CMS.y_axis_title ) - else: - plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '} \left[\mathrm{GeV}^{-1}\\right]$', CMS.y_axis_title ) + x_label = '${}$'.format(variables_latex[variable]) + if variable in ['HT', 'ST', 'MET', 'WPT']: + x_label += ' [GeV]' + plt.xlabel( x_label, CMS.x_axis_title ) + + # set y axis x-section labels + y_label = r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '}$' + if variable in ['HT', 'ST', 'MET', 'WPT']: + y_label.replace('}$', ' \left[\mathrm{GeV}^{-1}\\right]$') + plt.ylabel( y_label, CMS.y_axis_title ) + + # Set up ticks on axis. Minor ticks on axis for non NJet variables plt.tick_params( **CMS.axis_label_major ) if not variable in ['NJets']: + axes.minorticks_on() plt.tick_params( **CMS.axis_label_minor ) + # Set raw unfolded data with stat+unfolding uncertianty to be visible hist_data.visible = True + # Set raw unfolded data with systematic uncertianty to be visible + # label = 'do_not_show' = do not show in legend if category == 'central': hist_data_with_systematics.visible = True - rplt.errorbar( hist_data_with_systematics, axes = axes, label = 'do_not_show', xerr = None, capsize = 0, elinewidth = 2, zorder = len( histograms ) + 1 ) - rplt.errorbar( hist_data, axes = axes, label = 'do_not_show', xerr = None, capsize = 15, capthick = 3, elinewidth = 2, zorder = len( histograms ) + 2 ) - rplt.errorbar( hist_data, axes = axes, label = 'data', xerr = None, yerr = False, zorder = len( histograms ) + 3 ) # this makes a nicer legend entry - - if show_before_unfolding: - rplt.errorbar( hist_measured, axes = axes, label = 'data (before unfolding)', xerr = None, zorder = len( histograms ) ) + rplt.errorbar( + hist_data_with_systematics, + axes = axes, + label = 'do_not_show', + xerr = None, + capsize = 0, + elinewidth = 2, + zorder = len( histograms ) + 1 + ) + + # Show stat+unf uncertainty on plot + rplt.errorbar( hist_data, + axes = axes, + label = 'do_not_show', + xerr = None, + capsize = 15, + capthick = 3, + elinewidth = 2, + zorder = len( histograms ) + 2 + ) + # And one for a nice legend entry + rplt.errorbar( hist_data, + axes = axes, + label = 'data', + xerr = None, + yerr = False, + zorder = len( histograms ) + 3 + ) dashes = {} for key, hist in sorted( histograms.items() ): zorder = sorted( histograms, reverse = False ).index( key ) - print (key) + + # Ordering such that systematic uncertainties are plotted first then central powhegPythia then data if key == 'powhegPythia8' and zorder != len(histograms) - 3: zorder = len(histograms) - 3 elif key != 'powhegPythia8' and not 'unfolded' in key: while zorder >= len(histograms) - 3: zorder = zorder - 1 + # Colour and style of MC hists if not 'unfolded' in key and not 'measured' in key: hist.linewidth = 4 - # setting colours linestyle = None - if 'powhegHerwig' in key or 'massdown' in key: - hist.SetLineColor( kBlue ) - dashes[key] = [25,5,5,5,5,5,5,5] - elif 'madgraphMLM' in key or 'scaledown' in key: - hist.SetLineColor( 417 ) - dashes[key] = [5,5] - elif 'MADGRAPH_ptreweight' in key: - hist.SetLineColor( kBlack ) - elif 'powhegPythia8' in key: + + if 'powhegPythia8' in key: linestyle = 'solid' dashes[key] = None hist.SetLineColor( 633 ) + elif 'powhegHerwig' in key or 'massdown' in key: + hist.SetLineColor( kBlue ) + dashes[key] = [25,5,5,5,5,5,5,5] elif 'amcatnloPythia8' in key or 'massup' in key: hist.SetLineColor( 807 ) dashes[key] = [20,5] - # elif 'amcatnloHerwig' in key: - # hist.SetLineColor( 734 ) - # dashes[key] = [15,5] - elif 'MCATNLO' in key or 'scaleup' in key: - hist.SetLineColor( 619 ) - dashes[key] = [5,5,10,5] + elif 'madgraphMLM' in key: + hist.SetLineColor( 417 ) + dashes[key] = [5,5] if linestyle != None: hist.linestyle = linestyle + # Add hist to plot line, h = rplt.hist( hist, axes = axes, label = measurements_latex[key], zorder = zorder ) + # Set the dashes and lines if dashes[key] != None: line.set_dashes(dashes[key]) h.set_dashes(dashes[key]) handles, labels = axes.get_legend_handles_labels() - # making data first in the list + + # Making data first in the legend data_label_index = labels.index( 'data' ) data_handle = handles[data_label_index] labels.remove( 'data' ) @@ -287,13 +286,13 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True labels.insert( 0, 'data' ) handles.insert( 0, data_handle ) + # Order the rest of the labels in the legend new_handles, new_labels = [], [] zipped = dict( zip( labels, handles ) ) labelOrder = ['data', measurements_latex['powhegPythia8'], measurements_latex['amcatnloPythia8'], measurements_latex['powhegHerwig'], - measurements_latex['amcatnloHerwig'], measurements_latex['madgraphMLM'], measurements_latex['scaleup'], measurements_latex['scaledown'], @@ -305,6 +304,7 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True new_handles.append(zipped[label]) new_labels.append(label) + # Location of the legend legend_location = (0.97, 0.82) if variable == 'MT': legend_location = (0.05, 0.82) @@ -314,13 +314,21 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True legend_location = (1.0, 0.84) elif variable == 'abs_lepton_eta': legend_location = (1.0, 0.94) - plt.legend( new_handles, new_labels, numpoints = 1, prop = CMS.legend_properties, frameon = False, bbox_to_anchor=legend_location, - bbox_transform=plt.gcf().transFigure ) - label, channel_label = get_cms_labels( channel ) - # title - plt.title( label,loc='right', **CMS.title ) - # CMS text + + # Add legend to plot + plt.legend( new_handles, new_labels, + numpoints = 1, + prop = CMS.legend_properties, + frameon = False, + bbox_to_anchor=legend_location, + bbox_transform=plt.gcf().transFigure + ) + + # Title and CMS labels # note: fontweight/weight does not change anything as we use Latex text!!! + plt.title( label,loc='right', **CMS.title ) + label, channel_label = get_cms_labels( channel ) + # Locations of labels logo_location = (0.05, 0.98) prelim_location = (0.05, 0.92) channel_location = ( 0.05, 0.86) @@ -332,71 +340,91 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True logo_location = (0.03, 0.98) prelim_location = (0.03, 0.92) channel_location = (0.03, 0.86) - plt.text(logo_location[0], logo_location[1], r"\textbf{CMS}", transform=axes.transAxes, fontsize=42, - verticalalignment='top',horizontalalignment='left') + + # Add labels to plot + plt.text(logo_location[0], logo_location[1], + r"\textbf{CMS}", + transform=axes.transAxes, + fontsize=42, + verticalalignment='top', + horizontalalignment='left' + ) # preliminary - plt.text(prelim_location[0], prelim_location[1], r"\emph{Preliminary}", - transform=axes.transAxes, fontsize=42, - verticalalignment='top',horizontalalignment='left') + plt.text(prelim_location[0], prelim_location[1], + r"\emph{Preliminary}", + transform=axes.transAxes, + fontsize=42, + verticalalignment='top', + horizontalalignment='left' + ) # channel text - plt.text(channel_location[0], channel_location[1], r"\emph{%s}" %channel_label, transform=axes.transAxes, fontsize=40, - verticalalignment='top',horizontalalignment='left') + plt.text(channel_location[0], channel_location[1], + r"\emph{{cl}}".format(cl=channel_label), + transform=axes.transAxes, + fontsize=40, + verticalalignment='top', + horizontalalignment='left' + ) + + # Set y limits on plot ylim = axes.get_ylim() if ylim[0] < 0: axes.set_ylim( ymin = 0.) - if variable == 'WPT': - axes.set_ylim(ymax = ylim[1]*1.3) - elif variable == 'abs_lepton_eta': - axes.set_ylim(ymax = ylim[1]*1.3) - else : - axes.set_ylim(ymax = ylim[1]*1.2) - + axes.set_ylim(ymax = ylim[1]*1.3) - if show_ratio or show_generator_ratio: + # Now to show either of the ratio plots + if show_ratio: + # Set previous x axis ticks and labels to invisible plt.setp( axes.get_xticklabels(), visible = False ) + # Go to ratio subplot ax1 = plt.subplot( gs[1] ) - if not variable in ['NJets']: - ax1.minorticks_on() - #ax1.grid( True, 'major', linewidth = 1 ) + # setting the x_limits identical to the main plot x_limits = axes.get_xlim() ax1.set_xlim(x_limits) + + # Setting tick marks ax1.yaxis.set_major_locator( MultipleLocator( 0.5 ) ) + plt.tick_params( **CMS.axis_label_major ) if not variable in ['NJets']: + ax1.minorticks_on() ax1.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) + plt.tick_params( **CMS.axis_label_minor ) - if not show_ratio or not show_generator_ratio: - if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: - plt.xlabel('$%s$' % variables_latex[variable], CMS.x_axis_title ) - else: - plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title ) + # x axis labels as before + x_label = '${}$'.format(variables_latex[variable]) + if variable in ['HT', 'ST', 'MET', 'WPT']: + x_label += ' [GeV]' + plt.xlabel( x_label, CMS.x_axis_title ) - plt.tick_params( **CMS.axis_label_major ) - if not variable in ['NJets']: - plt.tick_params( **CMS.axis_label_minor ) - plt.ylabel( '$\\frac{\\textrm{pred.}}{\\textrm{data}}$', CMS.y_axis_title ) + y_label = '$\\frac{\\textrm{pred.}}{\\textrm{data}}$' + plt.ylabel( y_label, CMS.y_axis_title ) ax1.yaxis.set_label_coords(-0.115, 0.8) - #draw a horizontal line at y=1 for data + + # Draw a horizontal line at y=1 for data plt.axhline(y = 1, color = 'black', linewidth = 2) + # Create ratios and plot to subplot for key, hist in sorted( histograms.iteritems() ): if not 'unfolded' in key and not 'measured' in key: ratio = hist.Clone() - ratio.Divide( hist_data ) #divide by data + ratio.Divide( hist_data ) line, h = rplt.hist( ratio, axes = ax1, label = 'do_not_show' ) if dashes[key] != None: line.set_dashes(dashes[key]) h.set_dashes(dashes[key]) + # Now for the error bands stat_lower = hist_data.Clone() stat_upper = hist_data.Clone() syst_lower = hist_data.Clone() syst_upper = hist_data.Clone() - # plot error bands on data in the ratio plot + # Plot relative error bands on data in the ratio plot stat_errors = graph_to_value_errors_tuplelist(hist_data) if category == 'central': syst_errors = graph_to_value_errors_tuplelist(hist_data_with_systematics) + for bin_i in range( 1, hist_data.GetNbinsX() + 1 ): stat_value, stat_error, _ = stat_errors[bin_i-1] stat_rel_error = stat_error/stat_value @@ -408,30 +436,41 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True syst_rel_error_up = syst_error_up/syst_value syst_lower.SetBinContent( bin_i, 1 - syst_rel_error_down ) syst_upper.SetBinContent( bin_i, 1 + syst_rel_error_up ) + + # Colour if category == 'central': - rplt.fill_between( syst_lower, syst_upper, ax1, - color = 'yellow' ) - - rplt.fill_between( stat_upper, stat_lower, ax1, color = '0.75', - ) + rplt.fill_between( + syst_lower, + syst_upper, + ax1, + color = 'yellow' + ) + rplt.fill_between( + stat_upper, + stat_lower, + ax1, + color = '0.75', + ) + # Add legend loc = 'upper left' - # if variable in ['ST']: - # loc = 'upper right' # legend for ratio plot p_stat = mpatches.Patch(facecolor='0.75', label='Stat.', edgecolor='black' ) p_stat_and_syst = mpatches.Patch(facecolor='yellow', label=r'Stat. $\oplus$ Syst.', edgecolor='black' ) - l1 = ax1.legend(handles = [p_stat, p_stat_and_syst], loc = loc, - frameon = False, prop = {'size':26}, ncol = 2) - - # ax1.legend(handles = [p_stat_and_syst], loc = 'lower left', - # frameon = False, prop = {'size':30}) + l1 = ax1.legend( + handles = [p_stat, p_stat_and_syst], + loc = loc, + frameon = False, + prop = {'size':26}, + ncol = 2 + ) ax1.add_artist(l1) + # Setting y limits and tick parameters if variable == 'MET': ax1.set_ylim( ymin = 0.8, ymax = 1.2 ) ax1.yaxis.set_major_locator( MultipleLocator( 0.5 ) ) -# ax1.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) + ax1.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) if variable == 'MT': ax1.set_ylim( ymin = 0.8, ymax = 1.2 ) ax1.yaxis.set_major_locator( MultipleLocator( 0.2 ) ) @@ -460,41 +499,44 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True ax1.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) - if show_ratio and show_generator_ratio: + if show_generator_ratio: - plt.setp( axes.get_xticklabels(), visible = False ) #Remove DataMC Comparision Axis - plt.setp( ax1.get_xticklabels(), visible = False ) # Remove Ratio Axis + # Remove Data/MC Ratio Axis + plt.setp( ax1.get_xticklabels(), visible = False ) ax2 = plt.subplot( gs[2] ) - if not variable in ['NJets']: - ax2.minorticks_on() - #ax2.grid( True, 'major', linewidth = 1 ) + # setting the x_limits identical to the main plot x_limits = axes.get_xlim() ax2.set_xlim(x_limits) + # Setting ticks ax2.yaxis.set_major_locator( MultipleLocator( 0.5 ) ) + plt.tick_params( **CMS.axis_label_major ) if not variable in ['NJets']: + ax2.minorticks_on() ax2.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) + plt.tick_params( **CMS.axis_label_minor ) - if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: - plt.xlabel('$%s$' % variables_latex[variable], CMS.x_axis_title ) - else: - plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title ) + # x axis labels as before + x_label = '${}$'.format(variables_latex[variable]) + if variable in ['HT', 'ST', 'MET', 'WPT']: + x_label += ' [GeV]' + plt.xlabel( x_label, CMS.x_axis_title ) + + y_label = '$\\frac{\\textrm{generator}}{\\textrm{central}}$' + plt.ylabel( y_label, CMS.y_axis_title ) - plt.tick_params( **CMS.axis_label_major ) - if not variable in ['NJets']: - plt.tick_params( **CMS.axis_label_minor ) - plt.ylabel( '$\\frac{\\textrm{generator}}{\\textrm{central}}$', CMS.y_axis_title ) ax2.yaxis.set_label_coords(-0.115, 0.8) - #draw a horizontal line at y=1 for data + + #draw a horizontal line at y=1 for central MC plt.axhline(y = 1, color = 'black', linewidth = 2) central_mc = histograms['powhegPythia8'] for key, hist in sorted( histograms.iteritems() ): if not 'unfolded' in key and not 'measured' in key: ratio = hist.Clone() - ratio.Divide( central_mc ) #divide by data + ratio.Divide( central_mc ) #divide by central mc sample line, h = rplt.hist( ratio, axes = ax2, label = 'do_not_show' ) if dashes[key] != None: line.set_dashes(dashes[key]) @@ -503,7 +545,7 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True if variable == 'MET': ax2.set_ylim( ymin = 0.8, ymax = 1.2 ) ax2.yaxis.set_major_locator( MultipleLocator( 0.5 ) ) -# ax2.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) + ax2.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) if variable == 'MT': ax2.set_ylim( ymin = 0.8, ymax = 1.2 ) ax2.yaxis.set_major_locator( MultipleLocator( 0.2 ) ) @@ -532,91 +574,90 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = True ax2.yaxis.set_minor_locator( MultipleLocator( 0.1 ) ) - if CMS.tight_layout: plt.tight_layout() + # Save the plots path = '{output_folder}/{centre_of_mass_energy}TeV/{phaseSpace}/{variable}/' path = path.format( - output_folder = output_folder, - centre_of_mass_energy = measurement_config.centre_of_mass_energy, - phaseSpace = phase_space, - variable = variable - ) + output_folder = output_folder, + centre_of_mass_energy = measurement_config.centre_of_mass_energy, + phaseSpace = phase_space, + variable = variable + ) make_folder_if_not_exists( path ) for output_format in output_formats: filename = path + '/' + histname + '.' + output_format plt.savefig( filename ) - del hist_data, hist_measured - plt.close() - gc.collect() - -@xsec_04_log.trace() -def plot_central_and_systematics( channel, systematics, exclude = [], suffix = 'altogether' ): - global variable, met_type - - plt.figure( figsize = ( 16, 16 ), dpi = 200, facecolor = 'white' ) - axes = plt.axes() - if not variable in ['NJets']: - axes.minorticks_on() - - hist_data_central = read_xsection_measurement_results( 'central', channel )[0]['unfolded_with_systematics'] - hist_data_central.markersize = 2 # points. Imagine, tangible units! - hist_data_central.marker = 'o' - - if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: - plt.xlabel( '$%s$' % variables_latex[variable], CMS.x_axis_title ) - plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '}$', CMS.y_axis_title ) - else: - plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title ) - plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '} \left[\mathrm{GeV}^{-1}\\right]$', CMS.y_axis_title ) - plt.tick_params( **CMS.axis_label_major ) - if not variable in ['NJets']: - plt.tick_params( **CMS.axis_label_minor ) - - rplt.errorbar( hist_data_central, axes = axes, label = 'data', xerr = True ) - - for systematic in sorted( systematics ): - if systematic in exclude or systematic == 'central': - continue - - hist_data_systematic = read_xsection_measurement_results( systematic, channel )[0]['unfolded'] - hist_data_systematic.markersize = 2 - hist_data_systematic.marker = 'o' - colour_number = systematics.index( systematic ) + 2 - if colour_number == 10: - colour_number = 42 - hist_data_systematic.SetMarkerColor( colour_number ) - if 'PDF' in systematic: - rplt.errorbar( hist_data_systematic, axes = axes, label = systematic.replace( 'Weights_', ' ' ), xerr = None ) - elif met_type in systematic: - rplt.errorbar( hist_data_systematic, axes = axes, label = measurements_latex[systematic.replace( met_type, '' )], xerr = None ) - else: - rplt.errorbar( hist_data_systematic, axes = axes, label = measurements_latex[systematic], xerr = None ) - - plt.legend( numpoints = 1, loc = 'center right', prop = {'size':25}, ncol = 2 ) - label, channel_label = get_cms_labels( channel ) - plt.title( label, CMS.title ) - # CMS text - # note: fontweight/weight does not change anything as we use Latex text!!! - plt.text(0.95, 0.95, r"\textbf{CMS}", transform=axes.transAxes, fontsize=42, - verticalalignment='top',horizontalalignment='right') - # channel text - axes.text(0.95, 0.90, r"\emph{%s}" %channel_label, transform=axes.transAxes, fontsize=40, - verticalalignment='top',horizontalalignment='right') - plt.tight_layout() - - - path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable - make_folder_if_not_exists( path ) - for output_format in output_formats: - filename = path + '/normalised_xsection_' + channel + '_' + suffix + '.' + output_format - - plt.savefig( filename ) - + del hist_data + if 'central' in category: del hist_data_with_systematics plt.close() gc.collect() + return + +# @xsec_04_log.trace() +# def plot_central_and_systematics( channel, systematics, exclude = [], suffix = 'altogether' ): +# global variable + +# plt.figure( figsize = ( 16, 16 ), dpi = 200, facecolor = 'white' ) +# axes = plt.axes() +# if not variable in ['NJets']: +# axes.minorticks_on() + +# hist_data_central = read_xsection_measurement_results( 'central', channel )[0]['unfolded_with_systematics'] +# hist_data_central.markersize = 2 # points. Imagine, tangible units! +# hist_data_central.marker = 'o' + +# if variable in ['NJets', 'abs_lepton_eta', 'lepton_eta']: +# plt.xlabel( '$%s$' % variables_latex[variable], CMS.x_axis_title ) +# plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '}$', CMS.y_axis_title ) +# else: +# plt.xlabel( '$%s$ [GeV]' % variables_latex[variable], CMS.x_axis_title ) +# plt.ylabel( r'$\frac{1}{\sigma} \frac{d\sigma}{d' + variables_latex[variable] + '} \left[\mathrm{GeV}^{-1}\\right]$', CMS.y_axis_title ) +# plt.tick_params( **CMS.axis_label_major ) +# if not variable in ['NJets']: +# plt.tick_params( **CMS.axis_label_minor ) + +# rplt.errorbar( hist_data_central, axes = axes, label = 'data', xerr = True ) + +# for systematic in sorted( systematics ): +# if systematic in exclude or systematic == 'central': +# continue + +# hist_data_systematic = read_xsection_measurement_results( systematic, channel )[0]['unfolded'] +# hist_data_systematic.markersize = 2 +# hist_data_systematic.marker = 'o' +# colour_number = systematics.index( systematic ) + 2 +# if colour_number == 10: +# colour_number = 42 +# hist_data_systematic.SetMarkerColor( colour_number ) +# if 'PDF' in systematic: +# rplt.errorbar( hist_data_systematic, axes = axes, label = systematic.replace( 'Weights_', ' ' ), xerr = None )tranelse: +# rplt.errorbar( hist_data_systematic, axes = axes, label = measurements_latex[systematic], xerr = None ) + +# plt.legend( numpoints = 1, loc = 'center right', prop = {'size':25}, ncol = 2 ) +# label, channel_label = get_cms_labels( channel ) +# plt.title( label, CMS.title ) +# # CMS text +# # note: fontweight/weight does not change anything as we use Latex text!!! +# plt.text(0.95, 0.95, r"\textbf{CMS}", transform=axes.transAxes, fontsize=42, +# verticalalignment='top',horizontalalignment='right') +# # channel text +# axes.text(0.95, 0.90, r"\emph{%s}" %channel_label, transform=axes.transAxes, fontsize=40, +# verticalalignment='top',horizontalalignment='right') +# plt.tight_layout() + + +# path = output_folder + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable +# make_folder_if_not_exists( path ) +# for output_format in output_formats: +# filename = path + '/normalised_xsection_' + channel + '_' + suffix + '.' + output_format + +# plt.savefig( filename ) + +# plt.close() +# gc.collect() @xsec_04_log.trace() def get_unit_string(fit_variable): @@ -648,11 +689,6 @@ def parse_arguments(): default = 'MET', help = "set variable to plot (MET, HT, ST, WPT, NJets, lepton_pt, abs_lepton_eta )" ) - parser.add_argument( "-m", "--metType", - dest = "metType", - default = 'type1', - help = "set MET type used in the analysis of MET, ST or MT" - ) parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, @@ -695,28 +731,30 @@ def parse_arguments(): if args.debug: log.setLevel(log.DEBUG) - output_formats = ['pdf'] - measurement_config = XSectionConfig( args.CoM ) + output_formats = ['pdf'] + measurement_config = XSectionConfig( args.CoM ) + # caching of variables for shorter access - method = args.unfolding_method - translate_options = measurement_config.translate_options - variable = args.variable - show_generator_ratio = args.show_generator_ratio - visiblePS = args.visiblePS + method = args.unfolding_method + variable = args.variable + show_generator_ratio = args.show_generator_ratio + visiblePS = args.visiblePS + output_folder = args.output_folder + + if not output_folder.endswith( '/' ): + output_folder += '/' + phase_space = 'FullPS' if visiblePS: phase_space = 'VisiblePS' - output_folder = args.output_folder - if not output_folder.endswith( '/' ): - output_folder += '/' - met_type = translate_options[args.metType] path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/' - path_to_DF = path_to_DF.format(path = args.path, com = args.CoM, - variable = variable, - phase_space = phase_space, - ) -# path_to_DF = args.path + '/' + str( measurement_config.centre_of_mass_energy ) + 'TeV/' + variable + '/' + path_to_DF = path_to_DF.format( + path = args.path, + com = args.CoM, + variable = variable, + phase_space = phase_space, + ) all_measurements = deepcopy( measurement_config.measurements ) pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 1, 45 )] @@ -726,32 +764,13 @@ def parse_arguments(): for channel in ['muon']: for category in all_measurements: + # Show central only. TODO Add in additional systematic comparison plots if not category == 'central' and not args.additional_plots: continue + if variable in measurement_config.variables_no_met and category in measurement_config.met_specific_systematics: continue - # if variable == 'HT' and category in met_uncertainties: - # continue - # setting up systematic MET for JES up/down samples for reading fit templates - met_type = translate_options[args.metType] - if category == 'JES_up': - met_type += 'JetEnUp' - elif category == 'JES_down': - met_type += 'JetEnDown' - - # if not channel == 'combined': - # #Don't make additional plots for e.g. generator systematics, mass systematics, k value systematics and pdf systematics because they are now done \ - # #in the unfolding process with BLT unfolding files. - # if category in ttbar_generator_systematics or category in ttbar_mass_systematics or category in kValue_systematics or category in pdf_uncertainties: - # continue - # fit_templates, fit_results = read_fit_templates_and_results_as_histograms( category, channel ) - # make_template_plots( fit_templates, category, channel ) - # plot_fit_results( fit_results, category, channel ) - - # change back to original MET type - met_type = translate_options[args.metType] - if met_type == 'PFMET': - met_type = 'patMETsPFlow' - + # Read the xsection results from dataframe histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts = read_xsection_measurement_results( category, channel ) + histname = '{variable}_normalised_xsection_{channel}_{phase_space}_{method}' histname = histname.format( variable = variable, @@ -765,13 +784,15 @@ def parse_arguments(): category, output_folder, histname + '_different_generators', + show_ratio = True, show_generator_ratio = show_generator_ratio ) make_plots( histograms_normalised_xsection_systematics_shifts, category, output_folder, - histname + '_systematics_shifts' + histname + '_systematics_shifts', + show_ratio = True, ) del histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts From f92a92173948f37024fd744aa379ab7dcb1c8d4e Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Mon, 12 Dec 2016 15:15:14 +0000 Subject: [PATCH 56/90] Remove some imports --- dps/analysis/xsection/04_make_plots_matplotlib.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index c49c20ef..1243a8a9 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -11,10 +11,10 @@ from dps.utils.file_utilities import make_folder_if_not_exists from dps.utils.pandas_utilities import read_tuple_from_file, file_to_df, tupleise_cols from dps.utils.hist_utilities import value_error_tuplelist_to_hist, \ -value_tuplelist_to_hist, value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist -from math import sqrt +value_errors_tuplelist_to_graph, graph_to_value_errors_tuplelist + # rootpy & matplotlib -from ROOT import kRed, kGreen, kMagenta, kBlue, kBlack +from ROOT import kBlue from dps.utils.ROOT_utils import set_root_defaults import matplotlib as mpl from matplotlib import rc @@ -30,7 +30,6 @@ setup_matplotlib() import matplotlib.patches as mpatches -import latexcodec from dps.utils.logger import log xsec_04_log = log["src/cross_section_measurement/04_make_plots_matplotlib"] From cd1b0b585104fbba96a09c86cafcf926cbb920b2 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 15 Dec 2016 10:00:05 +0000 Subject: [PATCH 57/90] Add argument parser function --- .../BLTUnfold/produceUnfoldingHistograms.py | 258 +++++++++++------- 1 file changed, 156 insertions(+), 102 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index f08c40e6..82e02ff4 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -1,7 +1,7 @@ from rootpy.plotting import Hist, Hist2D from rootpy.io import root_open #from rootpy.interactive import wait -from optparse import OptionParser +from argparse import ArgumentParser from dps.config.xsection import XSectionConfig from dps.config.variable_binning import bin_edges_vis, reco_bin_edges_vis from dps.config.variableBranchNames import branchNames, genBranchNames_particle, genBranchNames_parton @@ -48,41 +48,40 @@ def getFileName( com, sample, measurementConfig ) : fileNames = { '13TeV' : { - 'central' : measurementConfig.ttbar_category_templates_trees['central'], - - 'amcatnlo' : measurementConfig.ttbar_amc_category_templates_trees, - 'madgraph' : measurementConfig.ttbar_madgraph_category_templates_trees, - 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_category_templates_trees, - # 'amcatnloherwigpp' : measurementConfig.ttbar_amcatnloherwigpp_category_templates_trees, - - 'scaleup' : measurementConfig.ttbar_scaleup_category_templates_trees, - 'scaledown' : measurementConfig.ttbar_scaledown_category_templates_trees, - 'massdown' : measurementConfig.ttbar_mtop1695_category_templates_trees, - 'massup' : measurementConfig.ttbar_mtop1755_category_templates_trees, - - 'jesdown' : measurementConfig.ttbar_jesdown_category_templates_trees, - 'jesup' : measurementConfig.ttbar_jesup_category_templates_trees, - 'jerdown' : measurementConfig.ttbar_jerdown_category_templates_trees, - 'jerup' : measurementConfig.ttbar_jerup_category_templates_trees, - - 'bjetdown' : measurementConfig.ttbar_category_templates_trees['central'], - 'bjetup' : measurementConfig.ttbar_category_templates_trees['central'], - 'lightjetdown' : measurementConfig.ttbar_category_templates_trees['central'], - 'lightjetup' : measurementConfig.ttbar_category_templates_trees['central'], - - 'leptondown' : measurementConfig.ttbar_category_templates_trees['central'], - 'leptonup' : measurementConfig.ttbar_category_templates_trees['central'], - 'pileupUp' : measurementConfig.ttbar_category_templates_trees['central'], - 'pileupDown' : measurementConfig.ttbar_category_templates_trees['central'], - - 'ElectronEnUp' : measurementConfig.ttbar_category_templates_trees['central'], - 'ElectronEnDown' : measurementConfig.ttbar_category_templates_trees['central'], - 'MuonEnUp' : measurementConfig.ttbar_category_templates_trees['central'], - 'MuonEnDown' : measurementConfig.ttbar_category_templates_trees['central'], - 'TauEnUp' : measurementConfig.ttbar_category_templates_trees['central'], - 'TauEnDown' : measurementConfig.ttbar_category_templates_trees['central'], - 'UnclusteredEnUp' : measurementConfig.ttbar_category_templates_trees['central'], - 'UnclusteredEnDown' : measurementConfig.ttbar_category_templates_trees['central'], + 'central' : measurementConfig.ttbar_trees['central'], + + 'amcatnlo' : measurementConfig.ttbar_amc_trees, + 'madgraph' : measurementConfig.ttbar_madgraph_trees, + 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_trees, + + 'scaleup' : measurementConfig.ttbar_scaleup_trees, + 'scaledown' : measurementConfig.ttbar_scaledown_trees, + 'massdown' : measurementConfig.ttbar_mtop1695_trees, + 'massup' : measurementConfig.ttbar_mtop1755_trees, + + 'jesdown' : measurementConfig.ttbar_jesdown_trees, + 'jesup' : measurementConfig.ttbar_jesup_trees, + 'jerdown' : measurementConfig.ttbar_jerdown_trees, + 'jerup' : measurementConfig.ttbar_jerup_trees, + + 'bjetdown' : measurementConfig.ttbar_trees['central'], + 'bjetup' : measurementConfig.ttbar_trees['central'], + 'lightjetdown' : measurementConfig.ttbar_trees['central'], + 'lightjetup' : measurementConfig.ttbar_trees['central'], + + 'leptondown' : measurementConfig.ttbar_trees['central'], + 'leptonup' : measurementConfig.ttbar_trees['central'], + 'pileupUp' : measurementConfig.ttbar_trees['central'], + 'pileupDown' : measurementConfig.ttbar_trees['central'], + + 'ElectronEnUp' : measurementConfig.ttbar_trees['central'], + 'ElectronEnDown' : measurementConfig.ttbar_trees['central'], + 'MuonEnUp' : measurementConfig.ttbar_trees['central'], + 'MuonEnDown' : measurementConfig.ttbar_trees['central'], + 'TauEnUp' : measurementConfig.ttbar_trees['central'], + 'TauEnDown' : measurementConfig.ttbar_trees['central'], + 'UnclusteredEnUp' : measurementConfig.ttbar_trees['central'], + 'UnclusteredEnDown' : measurementConfig.ttbar_trees['central'], }, } @@ -93,53 +92,108 @@ def getFileName( com, sample, measurementConfig ) : channel( 'muPlusJets', 'rootTupleTreeMuPlusJets', 'muon') ] + + +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument('--topPtReweighting', + dest='applyTopPtReweighting', + type='int', + default=0 + ) + parser.add_argument('--topEtaReweighting', + dest='applyTopEtaReweighting', + type='int', + default=0 + ) + parser.add_argument('-c', '--centreOfMassEnergy', + dest='centreOfMassEnergy', + type='int', + default=13 + ) + parser.add_argument('--pdfWeight', + type='int', + dest='pdfWeight', + default=-1 + ) + parser.add_argument('--muFmuRWeight', + type='int', + dest='muFmuRWeight', + default=-1 + ) + parser.add_argument('--alphaSWeight', + type='int', + dest='alphaSWeight', + default=-1 + ) + parser.add_argument('--nGeneratorWeights', + type='int', + dest='nGeneratorWeights', + default=1 + ) + parser.add_argument('-s', '--sample', + dest='sample', + default='central' + ) + parser.add_argument('-d', '--debug', + action='store_true', + dest='debug', + default=False + ) + parser.add_argument('-n', + action='store_true', + dest='donothing', + default=False + ) + parser.add_argument('-e', + action='store_true', + dest='extraHists', + default=False + ) + parser.add_argument('-f', + action='store_true', + dest='fineBinned', + default=False + ) + args = parser.parse_args() + return args + + + def main(): - - parser = OptionParser() - parser.add_option('--topPtReweighting', dest='applyTopPtReweighting', type='int', default=0 ) - parser.add_option('--topEtaReweighting', dest='applyTopEtaReweighting', type='int', default=0 ) - parser.add_option('-c', '--centreOfMassEnergy', dest='centreOfMassEnergy', type='int', default=13 ) - parser.add_option('--pdfWeight', type='int', dest='pdfWeight', default=-1 ) - parser.add_option('--muFmuRWeight', type='int', dest='muFmuRWeight', default=-1 ) - parser.add_option('--nGeneratorWeights', type='int', dest='nGeneratorWeights', default=1 ) - parser.add_option('-s', '--sample', dest='sample', default='central') - parser.add_option('-d', '--debug', action='store_true', dest='debug', default=False) - parser.add_option('-n', action='store_true', dest='donothing', default=False) - parser.add_option('-e', action='store_true', dest='extraHists', default=False) - parser.add_option('-f',action='store_true', dest='fineBinned', default=False) - - (options, _) = parser.parse_args() - - measurement_config = XSectionConfig( options.centreOfMassEnergy ) + args = parse_arguments() + + measurement_config = XSectionConfig( args.centreOfMassEnergy ) # Input file name file_name = 'crap.root' - if int(options.centreOfMassEnergy) == 13: - file_name = getFileName('13TeV', options.sample, measurement_config) - # if options.generatorWeight >= 0: + if int(args.centreOfMassEnergy) == 13: + file_name = getFileName('13TeV', args.sample, measurement_config) + # if args.generatorWeight >= 0: # file_name = 'localInputFile.root' else: print "Error: Unrecognised centre of mass energy." - pdfWeight = options.pdfWeight - muFmuRWeight = options.muFmuRWeight + pdfWeight = args.pdfWeight + muFmuRWeight = args.muFmuRWeight + alphaSWeight = args.alphaSWeight # Output file name outputFileName = 'crap.root' - outputFileDir = 'unfolding/%sTeV/' % options.centreOfMassEnergy + outputFileDir = 'TESTING/unfolding/%sTeV/' % args.centreOfMassEnergy make_folder_if_not_exists(outputFileDir) - energySuffix = '%sTeV' % ( options.centreOfMassEnergy ) + energySuffix = '%sTeV' % ( args.centreOfMassEnergy ) - if options.applyTopEtaReweighting != 0: - if options.applyTopEtaReweighting == 1: + if args.applyTopEtaReweighting != 0: + if args.applyTopEtaReweighting == 1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopEtaReweighting_up.root' % energySuffix - elif options.applyTopEtaReweighting == -1: + elif args.applyTopEtaReweighting == -1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopEtaReweighting_down.root' % energySuffix - elif options.applyTopPtReweighting != 0: - if options.applyTopPtReweighting == 1: + elif args.applyTopPtReweighting != 0: + if args.applyTopPtReweighting == 1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting_up.root' % energySuffix - elif options.applyTopPtReweighting == -1: + elif args.applyTopPtReweighting == -1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting_down.root' % energySuffix elif muFmuRWeight == 1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_1muR2muF.root' % ( energySuffix ) @@ -155,9 +209,9 @@ def main(): outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_05muR05muF.root' % ( energySuffix ) elif pdfWeight >= 0 and pdfWeight <= 99: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_pdfWeight_%i.root' % ( energySuffix, pdfWeight ) - elif options.sample != 'central': - outputFileName = outputFileDir+'/unfolding_TTJets_%s_%s_asymmetric.root' % ( energySuffix, options.sample ) - elif options.fineBinned : + elif args.sample != 'central': + outputFileName = outputFileDir+'/unfolding_TTJets_%s_%s_asymmetric.root' % ( energySuffix, args.sample ) + elif args.fineBinned : outputFileName = outputFileDir+'/unfolding_TTJets_%s.root' % ( energySuffix ) else: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric.root' % energySuffix @@ -166,13 +220,13 @@ def main(): # Get the tree treeName = "TTbar_plus_X_analysis/Unfolding/Unfolding" - if options.sample == "jesup": + if args.sample == "jesup": treeName += "_JESUp" - elif options.sample == "jesdown": + elif args.sample == "jesdown": treeName += "_JESDown" - elif options.sample == "jerup": + elif args.sample == "jerup": treeName += "_JERUp" - elif options.sample == "jerdown": + elif args.sample == "jerdown": treeName += "_JERDown" tree = f.Get(treeName) @@ -198,8 +252,8 @@ def main(): outputDirs = {} for variable in allVariablesBins: - if options.debug and variable != 'HT' : continue - if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: + if args.debug and variable != 'HT' : continue + if args.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue outputDirs[variable] = {} @@ -211,21 +265,21 @@ def main(): recoVariableName = branchNames[variable] sysIndex = None if variable in ['MET', 'ST', 'WPT']: - if options.sample == "jesup": + if args.sample == "jesup": recoVariableName += '_METUncertainties' sysIndex = 2 - elif options.sample == "jesdown": + elif args.sample == "jesdown": recoVariableName += '_METUncertainties' sysIndex = 3 - elif options.sample == "jerup": + elif args.sample == "jerup": recoVariableName += '_METUncertainties' sysIndex = 0 - elif options.sample == "jerdown": + elif args.sample == "jerdown": recoVariableName+= '_METUncertainties' sysIndex = 1 - elif options.sample in measurement_config.met_systematics: + elif args.sample in measurement_config.met_systematics: recoVariableName += '_METUncertainties' - sysIndex = measurement_config.met_systematics[options.sample] + sysIndex = measurement_config.met_systematics[args.sample] genVariable_particle_name = None genVariable_parton_name = None @@ -266,7 +320,7 @@ def main(): h['response_parton'] = Hist2D( reco_bin_edges_vis[variable], allVariablesBins[variable], name='response_parton') h['response_without_fakes_parton'] = Hist2D( reco_bin_edges_vis[variable], allVariablesBins[variable], name='response_without_fakes_parton') - if options.fineBinned: + if args.fineBinned: minVar = trunc( allVariablesBins[variable][0] ) maxVar = trunc( max( tree.GetMaximum(genVariable_particle_names[variable]), tree.GetMaximum( recoVariableNames[variable] ) ) * 1.2 ) nBins = int(maxVar - minVar) @@ -327,7 +381,7 @@ def main(): branch = event.__getattr__ n+=1 if not n%100000: print 'Processing event %.0f Progress : %.2g %%' % ( n, float(n)/nEntries*100 ) - # if n == 100000: break + if n == 10000: break # # # # # # Weights and selection # # # @@ -336,9 +390,9 @@ def main(): # Don't apply if calculating systematic pileupWeight = event.PUWeight # print event.PUWeight,event.PUWeight_up,event.PUWeight_down - if options.sample == "pileupUp": + if args.sample == "pileupUp": pileupWeight = event.PUWeight_up - elif options.sample == "pileupDown": + elif args.sample == "pileupDown": pileupWeight = event.PUWeight_down # Generator level weight @@ -350,20 +404,20 @@ def main(): # Lepton weight leptonWeight = event.LeptonEfficiencyCorrection - if options.sample == 'leptonup': + if args.sample == 'leptonup': leptonWeight = event.LeptonEfficiencyCorrectionUp - elif options.sample == 'leptondown': + elif args.sample == 'leptondown': leptonWeight == event.LeptonEfficiencyCorrectionDown # B Jet Weight bjetWeight = event.BJetWeight - if options.sample == "bjetup": + if args.sample == "bjetup": bjetWeight = event.BJetUpWeight - elif options.sample == "bjetdown": + elif args.sample == "bjetdown": bjetWeight = event.BJetDownWeight - elif options.sample == "lightjetup": + elif args.sample == "lightjetup": bjetWeight = event.LightJetUpWeight - elif options.sample == "lightjetdown": + elif args.sample == "lightjetdown": bjetWeight = event.LightJetDownWeight # Top pt systematic weight @@ -395,13 +449,13 @@ def main(): offlineWeight *= ptWeight genWeight *= ptWeight - if options.applyTopPtReweighting != 0: - ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), options.applyTopPtReweighting) + if args.applyTopPtReweighting != 0: + ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), args.applyTopPtReweighting) offlineWeight *= ptWeight genWeight *= ptWeight - if options.applyTopEtaReweighting != 0: - etaWeight = calculateTopEtaWeight( branch('lepTopRap_parton'), branch('hadTopRap_parton'), options.applyTopEtaReweighting) + if args.applyTopEtaReweighting != 0: + etaWeight = calculateTopEtaWeight( branch('lepTopRap_parton'), branch('hadTopRap_parton'), args.applyTopEtaReweighting) offlineWeight *= etaWeight genWeight *= etaWeight @@ -443,8 +497,8 @@ def main(): nOfflineNotVis[channel.channelName] += offlineWeight for variable in allVariablesBins: - if options.debug and variable != 'HT' : continue - if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: + if args.debug and variable != 'HT' : continue + if args.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue # # # @@ -462,7 +516,7 @@ def main(): # if recoVariable > allVariablesBins[variable][-1]: # print 'Big reco variable : ',recoVariable # print 'Setting to :',min( recoVariable, allVariablesBins[variable][-1] - 0.000001 ) - if not options.fineBinned: + if not args.fineBinned: recoVariable = min( recoVariable, allVariablesBins[variable][-1] - 0.000001 ) genVariable_particle = branch(genVariable_particle_names[variable]) if 'abs' in variable: @@ -471,7 +525,7 @@ def main(): # # Fill histograms # # histogramsToFill = histograms[variable][channel.channelName] - if not options.donothing: + if not args.donothing: if genSelection: histogramsToFill['truth'].Fill( genVariable_particle, genWeight) @@ -500,7 +554,7 @@ def main(): if fakeSelectionVis: histogramsToFill['fakeVis'].Fill( recoVariable, offlineWeight) - if options.extraHists: + if args.extraHists: if genSelection: histogramsToFill['eventWeightHist'].Fill(event.EventWeight) histogramsToFill['genWeightHist'].Fill(genWeight) @@ -510,8 +564,8 @@ def main(): # Output histgorams to file # for variable in allVariablesBins: - if options.debug and variable != 'HT' : continue - if options.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: + if args.debug and variable != 'HT' : continue + if args.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: continue for channel in channels: From 2e2b150a1a5c2e8a24fdee28b3b820bc3d0e1920 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 15 Dec 2016 10:39:15 +0000 Subject: [PATCH 58/90] Remove offline weight bug where pu weight was counted twice --- .../BLTUnfold/produceUnfoldingHistograms.py | 118 ++++++++---------- 1 file changed, 52 insertions(+), 66 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index 82e02ff4..861a930a 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -47,50 +47,50 @@ def ptWeight( pt ): def getFileName( com, sample, measurementConfig ) : fileNames = { - '13TeV' : { - 'central' : measurementConfig.ttbar_trees['central'], - - 'amcatnlo' : measurementConfig.ttbar_amc_trees, - 'madgraph' : measurementConfig.ttbar_madgraph_trees, - 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_trees, - - 'scaleup' : measurementConfig.ttbar_scaleup_trees, - 'scaledown' : measurementConfig.ttbar_scaledown_trees, - 'massdown' : measurementConfig.ttbar_mtop1695_trees, - 'massup' : measurementConfig.ttbar_mtop1755_trees, - - 'jesdown' : measurementConfig.ttbar_jesdown_trees, - 'jesup' : measurementConfig.ttbar_jesup_trees, - 'jerdown' : measurementConfig.ttbar_jerdown_trees, - 'jerup' : measurementConfig.ttbar_jerup_trees, - - 'bjetdown' : measurementConfig.ttbar_trees['central'], - 'bjetup' : measurementConfig.ttbar_trees['central'], - 'lightjetdown' : measurementConfig.ttbar_trees['central'], - 'lightjetup' : measurementConfig.ttbar_trees['central'], - - 'leptondown' : measurementConfig.ttbar_trees['central'], - 'leptonup' : measurementConfig.ttbar_trees['central'], - 'pileupUp' : measurementConfig.ttbar_trees['central'], - 'pileupDown' : measurementConfig.ttbar_trees['central'], - - 'ElectronEnUp' : measurementConfig.ttbar_trees['central'], - 'ElectronEnDown' : measurementConfig.ttbar_trees['central'], - 'MuonEnUp' : measurementConfig.ttbar_trees['central'], - 'MuonEnDown' : measurementConfig.ttbar_trees['central'], - 'TauEnUp' : measurementConfig.ttbar_trees['central'], - 'TauEnDown' : measurementConfig.ttbar_trees['central'], - 'UnclusteredEnUp' : measurementConfig.ttbar_trees['central'], - 'UnclusteredEnDown' : measurementConfig.ttbar_trees['central'], - }, - } + '13TeV' : { + 'central' : measurementConfig.ttbar_trees['central'], + + 'amcatnlo' : measurementConfig.ttbar_amc_trees, + 'madgraph' : measurementConfig.ttbar_madgraph_trees, + 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_trees, + + 'scaleup' : measurementConfig.ttbar_scaleup_trees, + 'scaledown' : measurementConfig.ttbar_scaledown_trees, + 'massdown' : measurementConfig.ttbar_mtop1695_trees, + 'massup' : measurementConfig.ttbar_mtop1755_trees, + + 'jesdown' : measurementConfig.ttbar_jesdown_trees, + 'jesup' : measurementConfig.ttbar_jesup_trees, + 'jerdown' : measurementConfig.ttbar_jerdown_trees, + 'jerup' : measurementConfig.ttbar_jerup_trees, + + 'bjetdown' : measurementConfig.ttbar_trees['central'], + 'bjetup' : measurementConfig.ttbar_trees['central'], + 'lightjetdown' : measurementConfig.ttbar_trees['central'], + 'lightjetup' : measurementConfig.ttbar_trees['central'], + + 'leptondown' : measurementConfig.ttbar_trees['central'], + 'leptonup' : measurementConfig.ttbar_trees['central'], + 'pileupUp' : measurementConfig.ttbar_trees['central'], + 'pileupDown' : measurementConfig.ttbar_trees['central'], + + 'ElectronEnUp' : measurementConfig.ttbar_trees['central'], + 'ElectronEnDown' : measurementConfig.ttbar_trees['central'], + 'MuonEnUp' : measurementConfig.ttbar_trees['central'], + 'MuonEnDown' : measurementConfig.ttbar_trees['central'], + 'TauEnUp' : measurementConfig.ttbar_trees['central'], + 'TauEnDown' : measurementConfig.ttbar_trees['central'], + 'UnclusteredEnUp' : measurementConfig.ttbar_trees['central'], + 'UnclusteredEnDown' : measurementConfig.ttbar_trees['central'], + }, + } return fileNames[com][sample] channels = [ - channel( 'ePlusJets', 'rootTupleTreeEPlusJets', 'electron'), - channel( 'muPlusJets', 'rootTupleTreeMuPlusJets', 'muon') - ] + channel( 'ePlusJets', 'rootTupleTreeEPlusJets', 'electron'), + channel( 'muPlusJets', 'rootTupleTreeMuPlusJets', 'muon'), +] @@ -169,12 +169,10 @@ def main(): file_name = 'crap.root' if int(args.centreOfMassEnergy) == 13: file_name = getFileName('13TeV', args.sample, measurement_config) - # if args.generatorWeight >= 0: - # file_name = 'localInputFile.root' else: print "Error: Unrecognised centre of mass energy." - pdfWeight = args.pdfWeight + pdfWeight = args.pdfWeight muFmuRWeight = args.muFmuRWeight alphaSWeight = args.alphaSWeight @@ -231,20 +229,13 @@ def main(): tree = f.Get(treeName) nEntries = tree.GetEntries() - # weightTree = f.Get('TTbar_plus_X_analysis/Unfolding/GeneratorSystematicWeights') - # if meWeight >= 0 : - # tree.AddFriend('TTbar_plus_X_analysis/Unfolding/GeneratorSystematicWeights') - # tree.SetBranchStatus('genWeight_*',1) - # tree.SetBranchStatus('genWeight_%i' % meWeight, 1) # For variables where you want bins to be symmetric about 0, use abs(variable) (but also make plots for signed variable) allVariablesBins = bin_edges_vis.copy() for variable in bin_edges_vis: - if 'Rap' in variable: allVariablesBins['abs_%s' % variable] = [0,bin_edges_vis[variable][-1]] - recoVariableNames = {} genVariable_particle_names = {} genVariable_parton_names = {} @@ -253,7 +244,8 @@ def main(): for variable in allVariablesBins: if args.debug and variable != 'HT' : continue - if args.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: + if args.sample in measurement_config.met_specific_systematics + and variable in measurement_config.variables_no_met: continue outputDirs[variable] = {} @@ -265,18 +257,19 @@ def main(): recoVariableName = branchNames[variable] sysIndex = None if variable in ['MET', 'ST', 'WPT']: - if args.sample == "jesup": - recoVariableName += '_METUncertainties' - sysIndex = 2 - elif args.sample == "jesdown": - recoVariableName += '_METUncertainties' - sysIndex = 3 - elif args.sample == "jerup": + if args.sample == "jerup": recoVariableName += '_METUncertainties' sysIndex = 0 elif args.sample == "jerdown": recoVariableName+= '_METUncertainties' sysIndex = 1 + elif args.sample == "jesup": + recoVariableName += '_METUncertainties' + sysIndex = 2 + elif args.sample == "jesdown": + recoVariableName += '_METUncertainties' + sysIndex = 3 + # Dont need this? elif args.sample in measurement_config.met_systematics: recoVariableName += '_METUncertainties' sysIndex = measurement_config.met_systematics[args.sample] @@ -398,9 +391,6 @@ def main(): # Generator level weight genWeight = event.EventWeight * measurement_config.luminosity_scale - # Offline level weights - offlineWeight = pileupWeight - # Lepton weight leptonWeight = event.LeptonEfficiencyCorrection @@ -420,11 +410,7 @@ def main(): elif args.sample == "lightjetdown": bjetWeight = event.LightJetDownWeight - # Top pt systematic weight - topPtSystematicWeight = 1 - if options.sample == 'topPtSystematic': - topPtSystematicWeight = calculateTopPtSystematicWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton')) - + # Offline level weights offlineWeight = event.EventWeight * measurement_config.luminosity_scale offlineWeight *= pileupWeight offlineWeight *= bjetWeight From b1de099b6a1a792c1e77fdf981a77477a6efe8b4 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 15 Dec 2016 14:57:32 +0000 Subject: [PATCH 59/90] Update ptreweighting --- .../BLTUnfold/produceUnfoldingHistograms.py | 104 +++++++++++------- 1 file changed, 62 insertions(+), 42 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index 861a930a..70fd9091 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -28,13 +28,23 @@ def calculateTopEtaWeight( lepTopRap, hadTopRap, whichWayToWeight = 1): else : return 1 -def calculateTopPtWeight( lepTopPt, hadTopPt, whichWayToWeight = 1 ): - if whichWayToWeight == -1 : - return max ( (-0.001 * lepTopPt + 1.1 ) * (-0.001 * hadTopPt + 1.1), 0.1 ) - elif whichWayToWeight == 1 : - return max ( (0.001 * lepTopPt + 0.9 ) * (0.001 * hadTopPt + 0.9), 0.1 ) - else : - return 1 +def calculateTopPtWeight( lepTopPt, hadTopPt ): + ''' + Calculating the top pt weight + ______________ A + B.Pt + W = / SF(t)SF(tbar) , SF(t) = e + + A = 0.0615 + B = -0.0005 + ''' + ptWeight = 1 + A = 0.0615 + B = -0.0005 + sf_lept = exp(A+(B*lepTopPt)) + sf_hadt = exp(A+(B*hadTopPt)) + ptWeight = sqrt(sf_hadt*sf_lept) + return ptWeight + def calculateTopPtSystematicWeight( lepTopPt, hadTopPt ): lepTopWeight = ptWeight( lepTopPt ) @@ -97,37 +107,37 @@ def getFileName( com, sample, measurementConfig ) : def parse_arguments(): parser = ArgumentParser(__doc__) parser.add_argument('--topPtReweighting', + action='store_true', dest='applyTopPtReweighting', - type='int', - default=0 + default=False ) parser.add_argument('--topEtaReweighting', dest='applyTopEtaReweighting', - type='int', + type=int, default=0 ) parser.add_argument('-c', '--centreOfMassEnergy', dest='centreOfMassEnergy', - type='int', + type=int, default=13 ) parser.add_argument('--pdfWeight', - type='int', + type=int, dest='pdfWeight', default=-1 ) parser.add_argument('--muFmuRWeight', - type='int', + type=int, dest='muFmuRWeight', default=-1 ) parser.add_argument('--alphaSWeight', - type='int', + type=int, dest='alphaSWeight', default=-1 ) parser.add_argument('--nGeneratorWeights', - type='int', + type=int, dest='nGeneratorWeights', default=1 ) @@ -158,8 +168,6 @@ def parse_arguments(): args = parser.parse_args() return args - - def main(): args = parse_arguments() @@ -188,11 +196,12 @@ def main(): outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopEtaReweighting_up.root' % energySuffix elif args.applyTopEtaReweighting == -1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopEtaReweighting_down.root' % energySuffix - elif args.applyTopPtReweighting != 0: - if args.applyTopPtReweighting == 1: - outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting_up.root' % energySuffix - elif args.applyTopPtReweighting == -1: - outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting_down.root' % energySuffix + elif args.applyTopPtReweighting: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting.root' % energySuffix + elif alphaSWeight == 0: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaSDown.root' % ( energySuffix ) + elif alphaSWeight == 1: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaSUp.root' % ( energySuffix ) elif muFmuRWeight == 1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_1muR2muF.root' % ( energySuffix ) elif muFmuRWeight == 2: @@ -244,8 +253,8 @@ def main(): for variable in allVariablesBins: if args.debug and variable != 'HT' : continue - if args.sample in measurement_config.met_specific_systematics - and variable in measurement_config.variables_no_met: + if args.sample in measurement_config.met_specific_systematics \ + and variable in measurement_config.variables_no_met: continue outputDirs[variable] = {} @@ -360,12 +369,12 @@ def main(): # Counters for studying phase space - nVis = {c.channelName : 0 for c in channels} - nVisNotOffline = {c.channelName : 0 for c in channels} - nOffline = {c.channelName : 0 for c in channels} - nOfflineNotVis = {c.channelName : 0 for c in channels} - nFull = {c.channelName : 0 for c in channels} - nOfflineSL = {c.channelName : 0 for c in channels} + nVis = {c.channelName : 0 for c in channels} + nVisNotOffline = {c.channelName : 0 for c in channels} + nOffline = {c.channelName : 0 for c in channels} + nOfflineNotVis = {c.channelName : 0 for c in channels} + nFull = {c.channelName : 0 for c in channels} + nOfflineSL = {c.channelName : 0 for c in channels} n=0 # Event Loop @@ -374,7 +383,7 @@ def main(): branch = event.__getattr__ n+=1 if not n%100000: print 'Processing event %.0f Progress : %.2g %%' % ( n, float(n)/nEntries*100 ) - if n == 10000: break + if n == 1000: break # # # # # # Weights and selection # # # @@ -435,7 +444,7 @@ def main(): offlineWeight *= ptWeight genWeight *= ptWeight - if args.applyTopPtReweighting != 0: + if args.applyTopPtReweighting: ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), args.applyTopPtReweighting) offlineWeight *= ptWeight genWeight *= ptWeight @@ -484,7 +493,8 @@ def main(): for variable in allVariablesBins: if args.debug and variable != 'HT' : continue - if args.sample in measurement_config.met_systematics and variable not in ['MET', 'ST', 'WPT']: + if args.sample in measurement_config.met_specific_systematics and \ + variable in measurement_config.variables_no_met: continue # # # @@ -555,15 +565,25 @@ def main(): continue for channel in channels: - # Fill phase space info - h = histograms[variable][channel.channelName]['phaseSpaceInfoHist'] - h.SetBinContent(1, nVisNotOffline[channel.channelName] / nVis[channel.channelName]) - h.SetBinContent(2, nOfflineNotVis[channel.channelName] / nOffline[channel.channelName]) - h.SetBinContent(3, nVis[channel.channelName] / nFull[channel.channelName]) - # Selection efficiency for SL ttbar - h.SetBinContent(4, nOfflineSL[channel.channelName] / nFull[channel.channelName]) - # Fraction of offline that are SL - h.SetBinContent(5, nOfflineSL[channel.channelName] / nOffline[channel.channelName]) + if nOffline[channel.channelName] != 0 : + # Fill phase space info + h = histograms[variable][channel.channelName]['phaseSpaceInfoHist'] + h.SetBinContent(1, nVisNotOffline[channel.channelName] / nVis[channel.channelName]) + # h.GetXaxis().SetBinLabel(1, "nVisNotOffline/nVis") + + h.SetBinContent(2, nOfflineNotVis[channel.channelName] / nOffline[channel.channelName]) + # h.GetXaxis().SetBinLabel(2, "nOfflineNotVis/nOffline") + + h.SetBinContent(3, nVis[channel.channelName] / nFull[channel.channelName]) + # h.GetXaxis().SetBinLabel(3, "nVis/nFull") + + # Selection efficiency for SL ttbar + h.SetBinContent(4, nOfflineSL[channel.channelName] / nFull[channel.channelName]) + # h.GetXaxis().SetBinLabel(4, "nOfflineSL/nFull") + + # Fraction of offline that are SL + h.SetBinContent(5, nOfflineSL[channel.channelName] / nOffline[channel.channelName]) + # h.GetXaxis().SetBinLabel(5, "nOfflineSL/nOffline") outputDirs[variable][channel.channelName].cd() for h in histograms[variable][channel.channelName]: From 0d1be61e0beb29f27ecc1802d346da77f88b2ea4 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Thu, 15 Dec 2016 15:28:03 +0000 Subject: [PATCH 60/90] Remove comments and TESTING --- dps/analysis/BLTUnfold/produceUnfoldingHistograms.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index 70fd9091..c24bff66 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -186,7 +186,7 @@ def main(): # Output file name outputFileName = 'crap.root' - outputFileDir = 'TESTING/unfolding/%sTeV/' % args.centreOfMassEnergy + outputFileDir = 'unfolding/%sTeV/' % args.centreOfMassEnergy make_folder_if_not_exists(outputFileDir) energySuffix = '%sTeV' % ( args.centreOfMassEnergy ) @@ -445,7 +445,7 @@ def main(): genWeight *= ptWeight if args.applyTopPtReweighting: - ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), args.applyTopPtReweighting) + ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton')) offlineWeight *= ptWeight genWeight *= ptWeight From e32cce570bdbe968ac91b7e3075f7677b1224194 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 11:14:53 +0000 Subject: [PATCH 61/90] Remove dependence on singly used list --- .../xsection/make_control_plots_fromTrees.py | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index 34a2b642..70078129 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -39,8 +39,8 @@ def getHistograms( histogram_files, # Channel specific files and weights if 'electron' in channel: - histogram_files['data'] = measurement_config.data_file_electron_trees - histogram_files['QCD'] = measurement_config.electron_QCD_MC_category_templates_trees[category] + histogram_files['data'] = measurement_config.data_file_electron + histogram_files['QCD'] = measurement_config.electron_QCD_MC_trees[category] if normalise_to_fit: normalisation = normalisations_electron[norm_variable] if use_qcd_data_region: @@ -48,8 +48,8 @@ def getHistograms( histogram_files, # if not 'QCD' in channel and not 'NPU' in branchName: # weightBranchSignalRegion += ' * ElectronEfficiencyCorrection' if 'muon' in channel: - histogram_files['data'] = measurement_config.data_file_muon_trees - histogram_files['QCD'] = measurement_config.muon_QCD_MC_category_templates_trees[category] + histogram_files['data'] = measurement_config.data_file_muon + histogram_files['QCD'] = measurement_config.muon_QCD_MC_trees[category] if normalise_to_fit: normalisation = normalisations_muon[norm_variable] if use_qcd_data_region: @@ -69,12 +69,12 @@ def getHistograms( histogram_files, # Get histograms for combined channel if channel == 'combined': histogram_files_electron = dict(histogram_files) - histogram_files_electron['data'] = measurement_config.data_file_electron_trees - histogram_files_electron['QCD'] = measurement_config.electron_QCD_MC_category_templates_trees[category] + histogram_files_electron['data'] = measurement_config.data_file_electron + histogram_files_electron['QCD'] = measurement_config.electron_QCD_MC_trees[category] histogram_files_muon = dict(histogram_files) - histogram_files_muon['data'] = measurement_config.data_file_muon_trees - histogram_files_muon['QCD'] = measurement_config.muon_QCD_MC_category_templates_trees[category] + histogram_files_muon['data'] = measurement_config.data_file_muon + histogram_files_muon['QCD'] = measurement_config.muon_QCD_MC_trees[category] # histograms_electron = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','EPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion + ' * ElectronEfficiencyCorrection', files = histogram_files_electron, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) # histograms_muon = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','MuPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion + ' * MuonEfficiencyCorrection', files = histogram_files_muon, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) @@ -352,14 +352,14 @@ def make_plot( channel, x_axis_title, y_axis_title, make_additional_QCD_plots = options.additional_QCD_plots histogram_files = { - 'TTJet': measurement_config.ttbar_category_templates_trees[category], - 'V+Jets': measurement_config.VJets_category_templates_trees[category], - 'QCD': measurement_config.electron_QCD_MC_category_templates_trees[category], - 'SingleTop': measurement_config.SingleTop_category_templates_trees[category], + 'TTJet': measurement_config.ttbar_trees[category], + 'V+Jets': measurement_config.VJets_trees[category], + 'QCD': measurement_config.electron_QCD_MC_trees[category], + 'SingleTop': measurement_config.SingleTop_trees[category], } - if (generator != 'PowhegPythia8'): - histogram_files['TTJet'] = measurement_config.ttbar_generator_category_templates_trees[generator] + if 'PowhegPythia8' not in generator: + histogram_files['TTJet'] = measurement_config.ttbar_trees[category].replace('PowhegPythia8', generator) # Leftover from run1, when fit method was used # Leave implementation for now @@ -371,7 +371,6 @@ def make_plot( channel, x_axis_title, y_axis_title, preliminary = True useQCDControl = True # showErrorBandOnRatio = True - b_tag_bin = '2orMoreBtags' norm_variable = 'MET' # comment out plots you don't want include_plots = [ @@ -387,13 +386,13 @@ def make_plot( channel, x_axis_title, y_axis_title, 'AbsLeptonEta', 'NJets', 'NBJets', - 'NBJetsNoWeight', - 'NBJetsUp', - 'NBJetsDown', - 'NBJets_LightUp', - 'NBJets_LightDown', - 'JetPt', - 'RelIso', + # 'NBJetsNoWeight', + # 'NBJetsUp', + # 'NBJetsDown', + # 'NBJets_LightUp', + # 'NBJets_LightDown', + # 'JetPt', + # 'RelIso', # 'sigmaietaieta' ] @@ -423,6 +422,7 @@ def make_plot( channel, x_axis_title, y_axis_title, b_tag_bin = '2orMoreBtags' # Set folder for this batch of plots + b_tag_bin = '2orMoreBtags' output_folder = output_folder_base + "/Variables/" + selection + "/" make_folder_if_not_exists(output_folder) print '--->', channel From fb373309a091b100596885a76016d8fec97286d4 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 11:17:47 +0000 Subject: [PATCH 62/90] opts->args --- .../xsection/make_control_plots_fromTrees.py | 95 +++++++++++++------ 1 file changed, 64 insertions(+), 31 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index 70078129..3cdb5fc9 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -310,46 +310,79 @@ def make_plot( channel, x_axis_title, y_axis_title, # make_plot_tmp( qcd_from_data, histogram_properties, save_folder = output_folder_to_use+'test' ) +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument( "-p", "--path", + dest = "path", + default = 'data/M3_angle_bl/', + help = "set path to JSON files" + ) + parser.add_argument( "-o", "--output_folder", + dest = "output_folder", + default = 'plots/control_plots/', + help = "set path to save plots" + ) + parser.add_argument( "-m", "--metType", + dest = "metType", + default = 'type1', + help = "set MET type used in the analysis of MET-dependent variables" + ) + parser.add_argument( "-c", "--centre-of-mass-energy", + dest = "CoM", + default = 13, type = int, + help = "set the centre of mass energy for analysis. Default = 13 [TeV]" + ) + parser.add_argument( "--category", + dest = "category", + default = 'central', + help = "set the category to take the fit results from (default: central)" + ) + parser.add_argument( "--generator", + dest = "generator", + default = 'PowhegPythia8', + help = "set the generator (PowhegPythia8, powhegHerwigpp, amc, amcatnloHerwigpp, madgraph)" + ) + parser.add_argument( "-n", "--normalise_to_fit", + dest = "normalise_to_fit", + action = "store_true", + help = "normalise the MC to fit results" + ) + parser.add_argument( "-d", "--normalise_to_data", + dest = "normalise_to_data", + action = "store_true", + help = "normalise the MC to data" + ) + parser.add_argument( "-a", "--additional-plots", + action = "store_true", + dest = "additional_QCD_plots", + help = "creates a set of QCD plots for exclusive bins for all variables" + ) + args = parser.parse_args() + return args + + if __name__ == '__main__': set_root_defaults() - parser = OptionParser() - parser.add_option( "-p", "--path", dest = "path", default = 'data/M3_angle_bl/', - help = "set path to JSON files" ) - parser.add_option( "-o", "--output_folder", dest = "output_folder", default = 'plots/control_plots/', - help = "set path to save plots" ) - parser.add_option( "-m", "--metType", dest = "metType", default = 'type1', - help = "set MET type used in the analysis of MET-dependent variables" ) - parser.add_option( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, type = int, - help = "set the centre of mass energy for analysis. Default = 13 [TeV]" ) - parser.add_option( "--category", dest = "category", default = 'central', - help = "set the category to take the fit results from (default: central)" ) - parser.add_option( "--generator", dest = "generator", default = 'PowhegPythia8', - help = "set the generator (PowhegPythia8, powhegHerwigpp, amc, amcatnloHerwigpp, madgraph)" ) - parser.add_option( "-n", "--normalise_to_fit", dest = "normalise_to_fit", action = "store_true", - help = "normalise the MC to fit results" ) - parser.add_option( "-d", "--normalise_to_data", dest = "normalise_to_data", action = "store_true", - help = "normalise the MC to data" ) - parser.add_option( "-a", "--additional-plots", action = "store_true", dest = "additional_QCD_plots", - help = "creates a set of QCD plots for exclusive bins for all variables" ) - - ( options, args ) = parser.parse_args() - measurement_config = XSectionConfig( options.CoM ) + + args = parse_arguments() + + measurement_config = XSectionConfig( args.CoM ) # caching of variables for shorter access translate_options = measurement_config.translate_options - path_to_JSON = '%s/%dTeV/' % ( options.path, measurement_config.centre_of_mass_energy ) - normalise_to_fit = options.normalise_to_fit - normalise_to_data = options.normalise_to_data + path_to_JSON = '%s/%dTeV/' % ( args.path, measurement_config.centre_of_mass_energy ) + normalise_to_fit = args.normalise_to_fit + normalise_to_data = args.normalise_to_data if normalise_to_fit: - output_folder = '%s/after_fit/%dTeV/' % ( options.output_folder, measurement_config.centre_of_mass_energy ) + output_folder = '%s/after_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) else: - output_folder = '%s' % ( options.output_folder ) + output_folder = '%s/before_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) make_folder_if_not_exists( output_folder ) output_folder_base = output_folder - category = options.category - generator = options.generator - met_type = translate_options[options.metType] - make_additional_QCD_plots = options.additional_QCD_plots + category = args.category + generator = args.generator + met_type = translate_options[args.metType] + make_additional_QCD_plots = args.additional_QCD_plots histogram_files = { 'TTJet': measurement_config.ttbar_trees[category], From 35ddac4488de63fea0bb6d52711db956578e4497 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 11:19:22 +0000 Subject: [PATCH 63/90] remove normalise_to_fit --- .../xsection/make_control_plots_fromTrees.py | 30 ++++--------------- 1 file changed, 5 insertions(+), 25 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index 3cdb5fc9..c5f41980 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -41,8 +41,6 @@ def getHistograms( histogram_files, if 'electron' in channel: histogram_files['data'] = measurement_config.data_file_electron histogram_files['QCD'] = measurement_config.electron_QCD_MC_trees[category] - if normalise_to_fit: - normalisation = normalisations_electron[norm_variable] if use_qcd_data_region: qcd_data_region = qcd_data_region_electron # if not 'QCD' in channel and not 'NPU' in branchName: @@ -50,8 +48,6 @@ def getHistograms( histogram_files, if 'muon' in channel: histogram_files['data'] = measurement_config.data_file_muon histogram_files['QCD'] = measurement_config.muon_QCD_MC_trees[category] - if normalise_to_fit: - normalisation = normalisations_muon[norm_variable] if use_qcd_data_region: qcd_data_region = qcd_data_region_muon # if not 'QCD' in channel: @@ -116,12 +112,7 @@ def getHistograms( histogram_files, control_region_hists[sample] = histograms_QCDControlRegion[sample][qcd_control_region] # Prepare histograms - if normalise_to_fit: - # only scale signal region to fit (results are invalid for control region) - prepare_histograms( signal_region_hists, rebin = rebin, - scale_factor = measurement_config.luminosity_scale, - normalisation = normalisation ) - elif normalise_to_data: + if normalise_to_data: totalMC = 0 for sample in signal_region_hists: if sample is 'data' : continue @@ -179,7 +170,7 @@ def make_plot( channel, x_axis_title, y_axis_title, ratio_y_limits = [0.5, 1.5], normalise = False, ): - global output_folder, measurement_config, category, normalise_to_fit, showErrorBandOnRatio + global output_folder, measurement_config, category, showErrorBandOnRatio global preliminary, norm_variable, sum_bins, b_tag_bin, histogram_files # Lumi title of plots @@ -277,10 +268,6 @@ def make_plot( channel, x_axis_title, y_axis_title, if branchName in ['NJets', 'NBJets', 'NBJetsNoWeight']: histogram_properties.integerXVariable = True - # if normalise_to_fit: - # histogram_properties.mc_error = get_normalisation_error( normalisation ) - # histogram_properties.mc_errors_label = 'fit uncertainty' - if normalise_to_data: histogram_properties.name += '_normToData' output_folder_to_use = output_folder @@ -342,11 +329,6 @@ def parse_arguments(): default = 'PowhegPythia8', help = "set the generator (PowhegPythia8, powhegHerwigpp, amc, amcatnloHerwigpp, madgraph)" ) - parser.add_argument( "-n", "--normalise_to_fit", - dest = "normalise_to_fit", - action = "store_true", - help = "normalise the MC to fit results" - ) parser.add_argument( "-d", "--normalise_to_data", dest = "normalise_to_data", action = "store_true", @@ -371,12 +353,10 @@ def parse_arguments(): translate_options = measurement_config.translate_options path_to_JSON = '%s/%dTeV/' % ( args.path, measurement_config.centre_of_mass_energy ) - normalise_to_fit = args.normalise_to_fit normalise_to_data = args.normalise_to_data - if normalise_to_fit: - output_folder = '%s/after_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) - else: - output_folder = '%s/before_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) + + output_folder = '%s/before_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) + make_folder_if_not_exists( output_folder ) output_folder_base = output_folder category = args.category From 27e3c096e7172c9d4708b1639f788b89b6199f0a Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 11:21:17 +0000 Subject: [PATCH 64/90] remove met_type --- dps/analysis/xsection/make_control_plots_fromTrees.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index c5f41980..a939098a 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -301,7 +301,7 @@ def parse_arguments(): parser = ArgumentParser(__doc__) parser.add_argument( "-p", "--path", dest = "path", - default = 'data/M3_angle_bl/', + default = 'data/normalisation/background_subtraction', help = "set path to JSON files" ) parser.add_argument( "-o", "--output_folder", @@ -309,11 +309,6 @@ def parse_arguments(): default = 'plots/control_plots/', help = "set path to save plots" ) - parser.add_argument( "-m", "--metType", - dest = "metType", - default = 'type1', - help = "set MET type used in the analysis of MET-dependent variables" - ) parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", default = 13, type = int, @@ -350,7 +345,6 @@ def parse_arguments(): measurement_config = XSectionConfig( args.CoM ) # caching of variables for shorter access - translate_options = measurement_config.translate_options path_to_JSON = '%s/%dTeV/' % ( args.path, measurement_config.centre_of_mass_energy ) normalise_to_data = args.normalise_to_data @@ -361,7 +355,6 @@ def parse_arguments(): output_folder_base = output_folder category = args.category generator = args.generator - met_type = translate_options[args.metType] make_additional_QCD_plots = args.additional_QCD_plots histogram_files = { From 2d36804bf564115efb1d41a92d5c05d222d121d0 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 11:22:13 +0000 Subject: [PATCH 65/90] remove path_to_JSON --- dps/analysis/xsection/make_control_plots_fromTrees.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index a939098a..93a83c48 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -299,11 +299,6 @@ def make_plot( channel, x_axis_title, y_axis_title, def parse_arguments(): parser = ArgumentParser(__doc__) - parser.add_argument( "-p", "--path", - dest = "path", - default = 'data/normalisation/background_subtraction', - help = "set path to JSON files" - ) parser.add_argument( "-o", "--output_folder", dest = "output_folder", default = 'plots/control_plots/', @@ -340,13 +335,11 @@ def parse_arguments(): if __name__ == '__main__': set_root_defaults() - args = parse_arguments() measurement_config = XSectionConfig( args.CoM ) # caching of variables for shorter access - path_to_JSON = '%s/%dTeV/' % ( args.path, measurement_config.centre_of_mass_energy ) normalise_to_data = args.normalise_to_data output_folder = '%s/before_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) From b27a3e341a7c5cfd04e5bfcba5acc07b5a26cd1a Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 16 Dec 2016 14:50:35 +0000 Subject: [PATCH 66/90] Fix some bugs and rearrange a little --- .../xsection/make_control_plots_fromTrees.py | 446 ++++++++++-------- 1 file changed, 262 insertions(+), 184 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index 93a83c48..6e67bf5b 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -1,4 +1,4 @@ -from optparse import OptionParser +from argparse import ArgumentParser from dps.config.latex_labels import b_tag_bins_latex, samples_latex, channel_latex, \ variables_latex, fit_variables_latex, control_plots_latex from dps.config.variable_binning import fit_variable_bin_edges, control_plots_bins @@ -31,27 +31,33 @@ def getHistograms( histogram_files, x_limits ): global measurement_config - b_Selection = signal_region_tree.split('/')[-2] + # 2b or No_b selection + b_Selection = signal_region_tree.split('/')[-2] + # Names of QCD regions to use - qcd_data_region = '' - qcd_data_region_electron = 'QCDConversions' - qcd_data_region_muon = 'QCD non iso mu+jets 1p5to3' + qcd_data_region = '' + qcd_data_region_electron = 'QCDConversions' + qcd_data_region_muon = 'QCD non iso mu+jets 1p5to3' # Channel specific files and weights if 'electron' in channel: histogram_files['data'] = measurement_config.data_file_electron - histogram_files['QCD'] = measurement_config.electron_QCD_MC_trees[category] + histogram_files['QCD'] = measurement_config.electron_QCD_MC_trees[category] if use_qcd_data_region: - qcd_data_region = qcd_data_region_electron - # if not 'QCD' in channel and not 'NPU' in branchName: - # weightBranchSignalRegion += ' * ElectronEfficiencyCorrection' + qcd_data_region = qcd_data_region_electron + # No Lepton Eff in QCD CR and PU distributions + if not 'QCD' in channel and not 'NPU' in branchName: + weightBranchSignalRegion += ' * ElectronEfficiencyCorrection' + if 'muon' in channel: histogram_files['data'] = measurement_config.data_file_muon - histogram_files['QCD'] = measurement_config.muon_QCD_MC_trees[category] + histogram_files['QCD'] = measurement_config.muon_QCD_MC_trees[category] if use_qcd_data_region: - qcd_data_region = qcd_data_region_muon - # if not 'QCD' in channel: - # weightBranchSignalRegion += ' * MuonEfficiencyCorrection' + qcd_data_region = qcd_data_region_muon + if not 'QCD' in channel: + weightBranchSignalRegion += ' * MuonEfficiencyCorrection' + + # Print all the weights applied to this plot print weightBranchSignalRegion # Apply selection to avoid non-physical values @@ -62,47 +68,103 @@ def getHistograms( histogram_files, histograms = {} histograms_QCDControlRegion = {} - # Get histograms for combined channel - if channel == 'combined': - histogram_files_electron = dict(histogram_files) - histogram_files_electron['data'] = measurement_config.data_file_electron - histogram_files_electron['QCD'] = measurement_config.electron_QCD_MC_trees[category] - histogram_files_muon = dict(histogram_files) - histogram_files_muon['data'] = measurement_config.data_file_muon - histogram_files_muon['QCD'] = measurement_config.muon_QCD_MC_trees[category] - - # histograms_electron = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','EPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion + ' * ElectronEfficiencyCorrection', files = histogram_files_electron, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) - # histograms_muon = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','MuPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion + ' * MuonEfficiencyCorrection', files = histogram_files_muon, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) - histograms_muon = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','MuPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion, files = histogram_files_muon, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) - histograms_electron = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','EPlusJets')], branch = branchName, weightBranch = weightBranchSignalRegion, files = histogram_files_electron, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) + # Retreive histograms for the combined channel + if channel == 'combined': + histogram_files_electron = dict(histogram_files) + histogram_files_electron['data'] = measurement_config.data_file_electron + histogram_files_electron['QCD'] = measurement_config.electron_QCD_MC_trees[category] + + histogram_files_muon = dict(histogram_files) + histogram_files_muon['data'] = measurement_config.data_file_muon + histogram_files_muon['QCD'] = measurement_config.muon_QCD_MC_trees[category] + + histograms_electron = get_histograms_from_trees( + trees = [signal_region_tree.replace('COMBINED','EPlusJets')], + branch = branchName, + weightBranch = weightBranchSignalRegion + ' * ElectronEfficiencyCorrection', + files = histogram_files_electron, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) + histograms_muon = get_histograms_from_trees( + trees = [signal_region_tree.replace('COMBINED','MuPlusJets')], + branch = branchName, + weightBranch = weightBranchSignalRegion + ' * MuonEfficiencyCorrection', + files = histogram_files_muon, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) if use_qcd_data_region: - qcd_control_region = signal_region_tree.replace(b_Selection ,'QCD_Control') + qcd_control_region = signal_region_tree.replace( b_Selection ,'QCD_Control') qcd_control_region_electron = signal_region_tree.replace( b_Selection , qcd_data_region_electron ).replace('COMBINED','EPlusJets') - histograms_electron_QCDControlRegion = get_histograms_from_trees( trees = [qcd_control_region_electron], branch = branchName, weightBranch = 'EventWeight', files = histogram_files_electron, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) - qcd_control_region_muon = signal_region_tree.replace( b_Selection , qcd_data_region_muon ).replace('COMBINED','MuPlusJets') - histograms_muon_QCDControlRegion = get_histograms_from_trees( trees = [qcd_control_region_muon], branch = branchName, weightBranch = 'EventWeight', files = histogram_files_muon, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) - + qcd_control_region_muon = signal_region_tree.replace( b_Selection , qcd_data_region_muon ).replace('COMBINED','MuPlusJets') + histograms_electron_QCDControlRegion = get_histograms_from_trees( + trees = [qcd_control_region_electron], + branch = branchName, + weightBranch = 'EventWeight', + files = histogram_files_electron, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) + histograms_muon_QCDControlRegion = get_histograms_from_trees( + trees = [qcd_control_region_muon], + branch = branchName, + weightBranch = 'EventWeight', + files = histogram_files_muon, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) + + # Combine the electron and muon histograms for sample in histograms_electron: h_electron = histograms_electron[sample][signal_region_tree.replace('COMBINED','EPlusJets')] - h_muon = histograms_muon[sample][signal_region_tree.replace('COMBINED','MuPlusJets')] + h_muon = histograms_muon[sample][signal_region_tree.replace('COMBINED','MuPlusJets')] h_combined = h_electron + h_muon histograms[sample] = { signal_region_tree : h_combined} if use_qcd_data_region: h_qcd_electron = histograms_electron_QCDControlRegion[sample][qcd_control_region_electron] - h_qcd_muon = histograms_muon_QCDControlRegion[sample][qcd_control_region_muon] + h_qcd_muon = histograms_muon_QCDControlRegion[sample][qcd_control_region_muon] h_qcd_combined = h_qcd_electron + h_qcd_muon histograms_QCDControlRegion[sample] = { qcd_control_region : h_qcd_combined } - # Get hsitgorams for specific channel + + # Now for histograms for an single channel else : - histograms = get_histograms_from_trees( trees = [signal_region_tree], branch = branchName, weightBranch = weightBranchSignalRegion, files = histogram_files, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) + histograms = get_histograms_from_trees( + trees = [signal_region_tree], + branch = branchName, + weightBranch = weightBranchSignalRegion, + files = histogram_files, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) if use_qcd_data_region: qcd_control_region = signal_region_tree.replace( b_Selection , qcd_data_region ) - histograms_QCDControlRegion = get_histograms_from_trees( trees = [qcd_control_region], branch = branchName, weightBranch = 'EventWeight', files = histogram_files, nBins = nBins, xMin = x_limits[0], xMax = x_limits[-1], selection = selection ) + histograms_QCDControlRegion = get_histograms_from_trees( + trees = [qcd_control_region], + branch = branchName, + weightBranch = 'EventWeight', + files = histogram_files, + nBins = nBins, + xMin = x_limits[0], + xMax = x_limits[-1], + selection = selection + ) # Technical step, don't need key for tree + # Book a dictionary full of the histograms to be used signal_region_hists = {} control_region_hists = {} for sample in histograms.keys(): @@ -111,29 +173,37 @@ def getHistograms( histogram_files, if use_qcd_data_region: control_region_hists[sample] = histograms_QCDControlRegion[sample][qcd_control_region] - # Prepare histograms + # Prepare histograms - Scale to data or luminosity if normalise_to_data: totalMC = 0 for sample in signal_region_hists: if sample is 'data' : continue totalMC += signal_region_hists[sample].Integral() newScale = signal_region_hists['data'].Integral() / totalMC - - prepare_histograms( signal_region_hists, rebin = rebin, - scale_factor = newScale, - ) + prepare_histograms( + signal_region_hists, + rebin = rebin, + scale_factor = newScale, + ) else: - prepare_histograms( signal_region_hists, rebin = rebin, - scale_factor = measurement_config.luminosity_scale ) - prepare_histograms( control_region_hists, rebin = rebin, - scale_factor = measurement_config.luminosity_scale ) - - # Use qcd from data control region or not + prepare_histograms( + signal_region_hists, + rebin = rebin, + scale_factor = measurement_config.luminosity_scale + ) + prepare_histograms( + control_region_hists, + rebin = rebin, + scale_factor = measurement_config.luminosity_scale + ) + + # Using QCD from data? qcd_from_data = None if use_qcd_data_region: - qcd_from_data = clean_control_region( control_region_hists, - - subtract = ['TTJet', 'V+Jets', 'SingleTop'] ) + qcd_from_data = clean_control_region( + control_region_hists, + subtract = ['TTJet', 'V+Jets', 'SingleTop'] + ) # Normalise control region correctly nBins = signal_region_hists['QCD'].GetNbinsX() n, error = signal_region_hists['QCD'].integral(0,nBins+1,error=True) @@ -148,8 +218,6 @@ def getHistograms( histogram_files, if not n_qcd_control_region == 0: dataDrivenQCDScale = n_qcd_predicted_mc_signal / n_qcd_predicted_mc_control qcd_from_data.Scale( dataDrivenQCDScale.nominal_value ) - # signalToControlScale = n_qcd_predicted_mc_signal / n_qcd_control_region - # dataToMCscale = n_qcd_control_region / n_qcd_predicted_mc_control else: qcd_from_data = signal_region_hists['QCD'] @@ -171,7 +239,7 @@ def make_plot( channel, x_axis_title, y_axis_title, normalise = False, ): global output_folder, measurement_config, category, showErrorBandOnRatio - global preliminary, norm_variable, sum_bins, b_tag_bin, histogram_files + global preliminary, norm_variable, b_tag_bin, histogram_files # Lumi title of plots title = title_template % ( measurement_config.new_luminosity/1000, measurement_config.centre_of_mass_energy ) @@ -179,123 +247,140 @@ def make_plot( channel, x_axis_title, y_axis_title, # Define weights weightBranchSignalRegion = 'EventWeight' + # Apply PU Weights if not "_NPUNoWeight" in name_prefix: if '_NPUUp' in name_prefix: weightBranchSignalRegion += ' * PUWeight_up' elif '_NPUDown' in name_prefix: weightBranchSignalRegion += ' * PUWeight_down' else: weightBranchSignalRegion += ' * PUWeight' + # Apply B Jet Weights if not "_NBJetsNoWeight" in name_prefix: if '_NBJetsUp' in name_prefix: weightBranchSignalRegion += ' * BJetUpWeight' elif '_NBJetsDown' in name_prefix: weightBranchSignalRegion += ' * BJetDownWeight' elif '_NBJets_LightUp' in name_prefix: weightBranchSignalRegion += ' * LightJetUpWeight' elif '_NBJets_LightDown' in name_prefix: weightBranchSignalRegion += ' * LightJetDownWeight' else: weightBranchSignalRegion += ' * BJetWeight' + # Get all histograms - signal_region_hists, control_region_hists, qcd_from_data = getHistograms( histogram_files, signal_region_tree, use_qcd_data_region, channel, branchName, weightBranchSignalRegion, nBins, rebin, x_limits ) - + signal_region_hists, control_region_hists, qcd_from_data = getHistograms( + histogram_files, + signal_region_tree, + use_qcd_data_region, + channel, + branchName, + weightBranchSignalRegion, + nBins, + rebin, + x_limits + ) # Which histograms to draw, and properties histograms_to_draw = [] - histogram_lables = [] - histogram_colors = [] - - histograms_to_draw = [signal_region_hists['data'], - qcd_from_data, - signal_region_hists['V+Jets'], - signal_region_hists['SingleTop'], - signal_region_hists['TTJet']] - histogram_lables = ['data', - 'QCD', - 'V+Jets', - 'Single-Top', - samples_latex['TTJet']] - histogram_colors = [colours['data'], - colours['QCD'], - colours['V+Jets'], - colours['Single-Top'], - colours['TTJet'] ] - - - # Printout on normalisation of different samples - print 'Normalisation after selection' - print 'Data :',signal_region_hists['data'].integral(overflow=True) - print 'TTJet :',signal_region_hists['TTJet'].integral(overflow=True) - print 'Single Top :',signal_region_hists['SingleTop'].integral(overflow=True) - print 'V+Jets :',signal_region_hists['V+Jets'].integral(overflow=True) - print 'QCD :',qcd_from_data.integral(overflow=True) - - mcSum = signal_region_hists['TTJet'].integral(overflow=True) + signal_region_hists['SingleTop'].integral(overflow=True) + signal_region_hists['V+Jets'].integral(overflow=True) + qcd_from_data.integral(overflow=True) - print 'Total MC :',mcSum - + histogram_lables = [] + histogram_colors = [] + + histograms_to_draw = [ + signal_region_hists['data'], + qcd_from_data, + signal_region_hists['V+Jets'], + signal_region_hists['SingleTop'], + signal_region_hists['TTJet'], + ] + histogram_lables = [ + 'data', + 'QCD', + 'V+Jets', + 'Single-Top', + samples_latex['TTJet'], + ] + histogram_colors = [ + colours['data'], + colours['QCD'], + colours['V+Jets'], + colours['Single-Top'], + colours['TTJet'], + ] + + # Print sample event yields + print_output(signal_region_hists, qcd_from_data) + + # Find maximum y of samples maxData = max( list(signal_region_hists['data'].y()) ) y_limits = [0, maxData * 1.4] if log_y: - y_limits = [0.1, maxData * 100 ] + y_limits = [0.1, maxData * 10 ] - # for i in range(0,signal_region_hists['data'].GetNbinsX()): - # print signal_region_hists['data'].GetBinContent() - # print i - # for h in signal_region_hists: - # print signal_region_hists[h].GetBinContent(i) - - # More histogram settings + # More histogram settings to look semi decent histogram_properties = Histogram_properties() - histogram_properties.name = name_prefix + b_tag_bin + histogram_properties.name = name_prefix + b_tag_bin if category != 'central': - histogram_properties.name += '_' + category - histogram_properties.title = title - histogram_properties.x_axis_title = x_axis_title - histogram_properties.y_axis_title = y_axis_title - histogram_properties.x_limits = x_limits - histogram_properties.y_limits = y_limits - histogram_properties.y_max_scale = y_max_scale - histogram_properties.xerr = None + histogram_properties.name += '_' + category + if normalise_to_data: + histogram_properties.name += '_normToData' + histogram_properties.title = title + histogram_properties.x_axis_title = x_axis_title + histogram_properties.y_axis_title = y_axis_title + histogram_properties.x_limits = x_limits + histogram_properties.y_limits = y_limits + histogram_properties.y_max_scale = y_max_scale + histogram_properties.xerr = None # workaround for rootpy issue #638 - histogram_properties.emptybins = True + histogram_properties.emptybins = True + histogram_properties.additional_text = channel_latex[channel] if b_tag_bin: - histogram_properties.additional_text = channel_latex[channel] + ', ' + b_tag_bins_latex[b_tag_bin] - else: - histogram_properties.additional_text = channel_latex[channel] - histogram_properties.legend_location = legend_location - histogram_properties.cms_logo_location = cms_logo_location - histogram_properties.preliminary = preliminary - histogram_properties.set_log_y = log_y - histogram_properties.legend_color = legend_color + histogram_properties.additional_text += b_tag_bins_latex[b_tag_bin] + histogram_properties.legend_location = legend_location + histogram_properties.cms_logo_location = cms_logo_location + histogram_properties.preliminary = preliminary + histogram_properties.set_log_y = log_y + histogram_properties.legend_color = legend_color if ratio_y_limits: - histogram_properties.ratio_y_limits = ratio_y_limits - + histogram_properties.ratio_y_limits = ratio_y_limits if branchName in ['NJets', 'NBJets', 'NBJetsNoWeight']: histogram_properties.integerXVariable = True - if normalise_to_data: - histogram_properties.name += '_normToData' output_folder_to_use = output_folder if use_qcd_data_region: output_folder_to_use += 'WithQCDFromControl/' make_folder_if_not_exists(output_folder_to_use) - if branchName == 'NPU': - getPUWeights(histograms_to_draw, histogram_lables) + # Prints the ratio of PU in Data/MC + # if branchName == 'NPU': + # getPUWeights(histograms_to_draw, histogram_lables) - # Actually draw histograms - # make_data_mc_comparison_plot( histograms_to_draw, histogram_lables, histogram_colors, - # histogram_properties, save_folder = output_folder_to_use, - # show_ratio = False, normalise = normalise, - # ) - # Draw same histogram, but with ratio plot + # Draw histogram with ratio plot histogram_properties.name += '_with_ratio' loc = histogram_properties.legend_location # adjust legend location as it is relative to canvas! histogram_properties.legend_location = ( loc[0], loc[1] + 0.05 ) - make_data_mc_comparison_plot( histograms_to_draw, histogram_lables, histogram_colors, - histogram_properties, save_folder = output_folder_to_use, - show_ratio = True, normalise = normalise - ) + make_data_mc_comparison_plot( + histograms_to_draw, + histogram_lables, + histogram_colors, + histogram_properties, + save_folder = output_folder_to_use, + show_ratio = True, + normalise = normalise + ) print ("Plot written to : ", output_folder_to_use) - # make_plot_tmp( qcd_from_data, histogram_properties, save_folder = output_folder_to_use+'test' ) + return +def print_output(signal_region_hists, qcd_from_data): + '''Printout on normalisation of different samples''' + print 'Normalisation after selection' + print 'Data :', signal_region_hists['data'].integral(overflow=True) + print 'TTJet :', signal_region_hists['TTJet'].integral(overflow=True) + print 'Single Top :', signal_region_hists['SingleTop'].integral(overflow=True) + print 'V+Jets :', signal_region_hists['V+Jets'].integral(overflow=True) + print 'QCD :', qcd_from_data.integral(overflow=True) + print '-'*60 + mcSum = signal_region_hists['TTJet'].integral(overflow=True) + signal_region_hists['SingleTop'].integral(overflow=True) + signal_region_hists['V+Jets'].integral(overflow=True) + qcd_from_data.integral(overflow=True) + print 'Total DATA :', signal_region_hists['data'].integral(overflow=True) + print 'Total MC :', mcSum + print '='*60 + return def parse_arguments(): parser = ArgumentParser(__doc__) @@ -306,7 +391,8 @@ def parse_arguments(): ) parser.add_argument( "-c", "--centre-of-mass-energy", dest = "CoM", - default = 13, type = int, + default = 13, + type = int, help = "set the centre of mass energy for analysis. Default = 13 [TeV]" ) parser.add_argument( "--category", @@ -338,87 +424,79 @@ def parse_arguments(): args = parse_arguments() measurement_config = XSectionConfig( args.CoM ) - # caching of variables for shorter access normalise_to_data = args.normalise_to_data - output_folder = '%s/before_fit/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) - - make_folder_if_not_exists( output_folder ) + output_folder = '%s/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) output_folder_base = output_folder + make_folder_if_not_exists( output_folder_base ) + category = args.category generator = args.generator make_additional_QCD_plots = args.additional_QCD_plots + # Retreive the appropriate sample histograms histogram_files = { - 'TTJet': measurement_config.ttbar_trees[category], - 'V+Jets': measurement_config.VJets_trees[category], - 'QCD': measurement_config.electron_QCD_MC_trees[category], - 'SingleTop': measurement_config.SingleTop_trees[category], + 'TTJet' : measurement_config.ttbar_trees[category], + 'V+Jets' : measurement_config.VJets_trees[category], + 'QCD' : measurement_config.electron_QCD_MC_trees[category], + 'SingleTop' : measurement_config.SingleTop_trees[category], } - if 'PowhegPythia8' not in generator: histogram_files['TTJet'] = measurement_config.ttbar_trees[category].replace('PowhegPythia8', generator) - # Leftover from run1, when fit method was used - # Leave implementation for now - normalisations_electron = { - } - normalisations_muon = { - } - preliminary = True useQCDControl = True # showErrorBandOnRatio = True norm_variable = 'MET' + # comment out plots you don't want include_plots = [ - 'HT', - 'MET', - 'ST', - 'WPT', - 'NVertex', - 'NVertexNoWeight', - 'NVertexUp', - 'NVertexDown', - 'LeptonPt', - 'AbsLeptonEta', - 'NJets', - 'NBJets', - # 'NBJetsNoWeight', - # 'NBJetsUp', - # 'NBJetsDown', - # 'NBJets_LightUp', - # 'NBJets_LightDown', - # 'JetPt', - # 'RelIso', - # 'sigmaietaieta' - ] + 'HT', + 'MET', + 'ST', + 'WPT', + 'NVertex', + 'NVertexNoWeight', + 'NVertexUp', + 'NVertexDown', + 'LeptonPt', + 'AbsLeptonEta', + 'NJets', + 'NBJets', + # 'NBJetsNoWeight', + # 'NBJetsUp', + # 'NBJetsDown', + # 'NBJets_LightUp', + # 'NBJets_LightDown', + # 'JetPt', + # 'RelIso', + # 'sigmaietaieta' + ] additional_qcd_plots = [ - 'QCDHT', - 'QCDMET', - 'QCDST', - 'QCDWPT', - 'QCDAbsLeptonEta', - 'QCDLeptonPt', - 'QCDNJets', - - # 'QCDsigmaietaieta', - 'QCDRelIso', - # 'QCDHT_dataControl_mcSignal', - ] + 'QCDHT', + 'QCDMET', + 'QCDST', + 'QCDWPT', + 'QCDAbsLeptonEta', + 'QCDLeptonPt', + 'QCDNJets', + + # 'QCDsigmaietaieta', + 'QCDRelIso', + # 'QCDHT_dataControl_mcSignal', + ] if make_additional_QCD_plots: include_plots.extend( additional_qcd_plots ) selection = 'Ref selection' # also 'Ref selection NoBSelection' for channel, label in { - 'electron' : 'EPlusJets', - 'muon' : 'MuPlusJets', - 'combined' : 'COMBINED' - }.iteritems() : - b_tag_bin = '2orMoreBtags' + 'electron' : 'EPlusJets', + 'muon' : 'MuPlusJets', + 'combined' : 'COMBINED' + }.iteritems() : # Set folder for this batch of plots b_tag_bin = '2orMoreBtags' From 8108aa916a3e507293015fe5bed520b2a2cf3ff9 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 15:19:05 +0000 Subject: [PATCH 67/90] rename scripts in unfolding_tests --- .../{makeConfig.py => 00_makeConfig.py} | 0 .../{getBestTau.py => 01_getBestTau.py} | 0 ... => 01_get_best_regularisation_TUnfold.py} | 0 ...weighting.py => 02_compare_reweighting.py} | 61 +++++++------------ .../{closure_test.py => 03_closure_test.py} | 0 .../{create_toy_mc.py => 04_create_toy_mc.py} | 0 ...ta.py => 05_create_unfolding_pull_data.py} | 0 ...ots.py => 06_make_unfolding_pull_plots.py} | 0 dps/analysis/unfolding_tests/README.md | 20 +++--- 9 files changed, 31 insertions(+), 50 deletions(-) rename dps/analysis/unfolding_tests/{makeConfig.py => 00_makeConfig.py} (100%) rename dps/analysis/unfolding_tests/{getBestTau.py => 01_getBestTau.py} (100%) rename dps/analysis/unfolding_tests/{get_best_regularisation_TUnfold.py => 01_get_best_regularisation_TUnfold.py} (100%) rename dps/analysis/unfolding_tests/{compare_reweighting.py => 02_compare_reweighting.py} (63%) rename dps/analysis/unfolding_tests/{closure_test.py => 03_closure_test.py} (100%) rename dps/analysis/unfolding_tests/{create_toy_mc.py => 04_create_toy_mc.py} (100%) rename dps/analysis/unfolding_tests/{create_unfolding_pull_data.py => 05_create_unfolding_pull_data.py} (100%) rename dps/analysis/unfolding_tests/{make_unfolding_pull_plots.py => 06_make_unfolding_pull_plots.py} (100%) diff --git a/dps/analysis/unfolding_tests/makeConfig.py b/dps/analysis/unfolding_tests/00_makeConfig.py similarity index 100% rename from dps/analysis/unfolding_tests/makeConfig.py rename to dps/analysis/unfolding_tests/00_makeConfig.py diff --git a/dps/analysis/unfolding_tests/getBestTau.py b/dps/analysis/unfolding_tests/01_getBestTau.py similarity index 100% rename from dps/analysis/unfolding_tests/getBestTau.py rename to dps/analysis/unfolding_tests/01_getBestTau.py diff --git a/dps/analysis/unfolding_tests/get_best_regularisation_TUnfold.py b/dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py similarity index 100% rename from dps/analysis/unfolding_tests/get_best_regularisation_TUnfold.py rename to dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py diff --git a/dps/analysis/unfolding_tests/compare_reweighting.py b/dps/analysis/unfolding_tests/02_compare_reweighting.py similarity index 63% rename from dps/analysis/unfolding_tests/compare_reweighting.py rename to dps/analysis/unfolding_tests/02_compare_reweighting.py index 94b10812..1eb0fdcc 100644 --- a/dps/analysis/unfolding_tests/compare_reweighting.py +++ b/dps/analysis/unfolding_tests/02_compare_reweighting.py @@ -9,20 +9,15 @@ from dps.utils.plotting import compare_measurements, Histogram_properties from dps.config import latex_labels - - def main(): config = XSectionConfig(13) - file_for_powhegPythia = File(config.unfolding_central, 'read') - file_for_ptReweight_up = File(config.unfolding_ptreweight_up, 'read') - file_for_ptReweight_down = File(config.unfolding_ptreweight_down, 'read') - file_for_etaReweight_up = File(config.unfolding_etareweight_up, 'read') - file_for_etaReweight_down = File(config.unfolding_etareweight_down, 'read') - file_for_data_template = 'data/normalisation/background_subtraction/13TeV/{variable}/VisiblePS/central/normalisation_combined_patType1CorrectedPFMet.txt' - - + file_for_powhegPythia = File(config.unfolding_central, 'read') + file_for_ptReweight = File(config.unfolding_ptreweight, 'read') + file_for_etaReweight_up = File(config.unfolding_etareweight_up, 'read') + file_for_etaReweight_down = File(config.unfolding_etareweight_down, 'read') + file_for_data_template = 'data/normalisation/background_subtraction/13TeV/{variable}/VisiblePS/central/normalisation_combined.txt' for channel in ['combined']: for variable in config.variables: @@ -43,8 +38,8 @@ def main(): # Get the reweighted powheg pythia distributions - _, _, response_pt_reweighted_up, _ = get_unfold_histogram_tuple( - inputfile=file_for_ptReweight_up, + _, _, response_pt_reweighted, _ = get_unfold_histogram_tuple( + inputfile=file_for_ptReweight, variable=variable, channel=channel, centre_of_mass=13, @@ -52,21 +47,9 @@ def main(): visiblePS=True ) - measured_pt_reweighted_up = asrootpy(response_pt_reweighted_up.ProjectionX('px',1)) - truth_pt_reweighted_up = asrootpy(response_pt_reweighted_up.ProjectionY()) + measured_pt_reweighted = asrootpy(response_pt_reweighted_up.ProjectionX('px',1)) + truth_pt_reweighted = asrootpy(response_pt_reweighted_up.ProjectionY()) - _, _, response_pt_reweighted_down, _ = get_unfold_histogram_tuple( - inputfile=file_for_ptReweight_down, - variable=variable, - channel=channel, - centre_of_mass=13, - load_fakes=False, - visiblePS=True - ) - - measured_pt_reweighted_down = asrootpy(response_pt_reweighted_down.ProjectionX('px',1)) - truth_pt_reweighted_down = asrootpy(response_pt_reweighted_down.ProjectionY()) - _, _, response_eta_reweighted_up, _ = get_unfold_histogram_tuple( inputfile=file_for_etaReweight_up, variable=variable, @@ -101,9 +84,9 @@ def main(): hp = Histogram_properties() hp.name = 'Reweighting_check_{channel}_{variable}_at_{com}TeV'.format( - channel=channel, - variable=variable, - com='13', + channel=channel, + variable=variable, + com='13', ) v_latex = latex_labels.variables_latex[variable] @@ -115,28 +98,26 @@ def main(): hp.title = 'Reweighting check for {variable}'.format(variable=v_latex) measured_central.Rebin(2) - measured_pt_reweighted_up.Rebin(2) - measured_pt_reweighted_down.Rebin(2) + measured_pt_reweighted.Rebin(2) measured_eta_reweighted_up.Rebin(2) measured_eta_reweighted_down.Rebin(2) data.Rebin(2) measured_central.Scale( 1 / measured_central.Integral() ) - measured_pt_reweighted_up.Scale( 1 / measured_pt_reweighted_up.Integral() ) - measured_pt_reweighted_down.Scale( 1 / measured_pt_reweighted_down.Integral() ) + measured_pt_reweighted.Scale( 1 / measured_pt_reweighted.Integral() ) measured_eta_reweighted_up.Scale( 1 / measured_eta_reweighted_up.Integral() ) measured_eta_reweighted_down.Scale( 1/ measured_eta_reweighted_down.Integral() ) data.Scale( 1 / data.Integral() ) compare_measurements( - models = {'Central' : measured_central, 'PtReweighted Up' : measured_pt_reweighted_up, 'PtReweighted Down' : measured_pt_reweighted_down, 'EtaReweighted Up' : measured_eta_reweighted_up, 'EtaReweighted Down' : measured_eta_reweighted_down}, - measurements = {'Data' : data}, - show_measurement_errors=True, - histogram_properties=hp, - save_folder='plots/unfolding/reweighting_check', - save_as=['pdf'] - ) + models = {'Central' : measured_central, 'PtReweighted' : measured_pt_reweighted, 'EtaReweighted Up' : measured_eta_reweighted_up, 'EtaReweighted Down' : measured_eta_reweighted_down}, + measurements = {'Data' : data}, + show_measurement_errors=True, + histogram_properties=hp, + save_folder='plots/unfolding/reweighting_check', + save_as=['pdf'] + ) if __name__ == '__main__': diff --git a/dps/analysis/unfolding_tests/closure_test.py b/dps/analysis/unfolding_tests/03_closure_test.py similarity index 100% rename from dps/analysis/unfolding_tests/closure_test.py rename to dps/analysis/unfolding_tests/03_closure_test.py diff --git a/dps/analysis/unfolding_tests/create_toy_mc.py b/dps/analysis/unfolding_tests/04_create_toy_mc.py similarity index 100% rename from dps/analysis/unfolding_tests/create_toy_mc.py rename to dps/analysis/unfolding_tests/04_create_toy_mc.py diff --git a/dps/analysis/unfolding_tests/create_unfolding_pull_data.py b/dps/analysis/unfolding_tests/05_create_unfolding_pull_data.py similarity index 100% rename from dps/analysis/unfolding_tests/create_unfolding_pull_data.py rename to dps/analysis/unfolding_tests/05_create_unfolding_pull_data.py diff --git a/dps/analysis/unfolding_tests/make_unfolding_pull_plots.py b/dps/analysis/unfolding_tests/06_make_unfolding_pull_plots.py similarity index 100% rename from dps/analysis/unfolding_tests/make_unfolding_pull_plots.py rename to dps/analysis/unfolding_tests/06_make_unfolding_pull_plots.py diff --git a/dps/analysis/unfolding_tests/README.md b/dps/analysis/unfolding_tests/README.md index ea65c67e..beb29753 100644 --- a/dps/analysis/unfolding_tests/README.md +++ b/dps/analysis/unfolding_tests/README.md @@ -14,15 +14,15 @@ Summary of what to run: Make the configs. These store where the input unfolding files and input data (ttbar normalisation) are. Check that you pick up the correct files - typically they are in your local dps directory, or on hdfs. ```shell -python src/unfolding_tests/makeConfig.py +python src/unfolding_tests/00_makeConfig.py ``` You can get the best regularisation for one variable/phase space/channel (i.e. one config file), example: ```shell -python src/unfolding_tests/get_best_regularisation_TUnfold.py config/unfolding/VisiblePS/abs_lepton_eta_13TeV_combined_channel.json +python src/unfolding_tests/01_get_best_regularisation_TUnfold.py config/unfolding/VisiblePS/abs_lepton_eta_13TeV_combined_channel.json ``` or run on several using wildcards. To run on all 13TeV variables, combined channel, in the visible phase: ```shell -python src/unfolding_tests/get_best_regularisation_TUnfold.py config/unfolding/VisiblePS/*_13TeV_combined_channel.json +python src/unfolding_tests/01_get_best_regularisation_TUnfold.py config/unfolding/VisiblePS/*_13TeV_combined_channel.json ``` ## Reweighting check @@ -35,7 +35,7 @@ and the underlying true distribution in data, should then be smaller than (or si the bias seen in the unfolded distributions for the reweighted samples. ```shell -python src/unfolding_tests/compare_reweighting.py +python src/unfolding_tests/02_compare_reweighting.py ``` @@ -49,21 +49,21 @@ For the bias plots, the central case is plotted as points, and all other MC samp Currently, the central sample (Powheg Pythia) is plotted, along with two reweighted MC samples. The reweighting is performed on the top pt. ```shell -python src/unfolding_tests/closure_test.py +python src/unfolding_tests/03_closure_test.py ``` ## Creating toy MC First we need to create a set of toy MC. Run ```shell -python src/unfolding_tests/create_toy_mc.py -s powhegPythia +python src/unfolding_tests/04_create_toy_mc.py -s powhegPythia ``` This will create 300 toy mc (300 is the default amount, probably need more for a full study) based on the powheg pythia sample. Other possible options for -s are currently "madgraph" and "amcatnlo" For more information about available parameters, do ```shell -python src/unfolding_tests/create_toy_mc.py -h +python src/unfolding_tests/04_create_toy_mc.py -h ``` This will create a root file in data/toy_mc named toy_mc_powhegPythia_N_300_13TeV.root (generally toy_mc__N__TeV.root). @@ -71,7 +71,7 @@ This file can be used in the next step. ## Creating pull distributions ```shell -python src/unfolding_tests/create_unfolding_pull_data.py -f data/toy_mc/toy_mc_powhegPythia_N_300_13TeV.root -c combined -n 10 -v HT -s powhegPythia --tau 0.001 +python src/unfolding_tests/05_create_unfolding_pull_data.py -f data/toy_mc/toy_mc_powhegPythia_N_300_13TeV.root -c combined -n 10 -v HT -s powhegPythia --tau 0.001 ``` This will consider the toy mc file, for HT in the combined channel. It will take the first 10 toy mc in that file, and unfold with a tau value of 0.001. Output will be placed in: @@ -95,9 +95,9 @@ Passing --scan_tau will tell the script to submit jobs for a range of tau values ## Analysing pull data Making the plots (just pass a file created by the previous step): ```shell -python src/unfolding_tests/make_unfolding_pull_plots.py data/pull_data/13TeV/HT/powhegPythia/Pull_data_TUnfold_combined_0.001905.txt +python src/unfolding_tests/06_make_unfolding_pull_plots.py data/pull_data/13TeV/HT/powhegPythia/Pull_data_TUnfold_combined_0.001905.txt ``` for more information on which plots are going to be produce please consult ```shell -python src/unfolding_tests/make_unfolding_pull_plots.py -h +python src/unfolding_tests/06_make_unfolding_pull_plots.py -h ``` From 6e533a333f54dd4ff98f03b0e40b5977e65c18a5 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 15:41:25 +0000 Subject: [PATCH 68/90] hopefully make tarring of dps work --- dps/condor/prepare_dps.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dps/condor/prepare_dps.sh b/dps/condor/prepare_dps.sh index acb1bf99..5414b097 100755 --- a/dps/condor/prepare_dps.sh +++ b/dps/condor/prepare_dps.sh @@ -8,9 +8,10 @@ if [ -f "dps.tar" ]; then fi echo "... creating tar file (dps.tar)" mkdir -p jobs -tar -zcf dps.tar bin dps config jobs src tools experimental \ +tar -zcf dps.tar dps bin config jobs \ --exclude="*.pyc" --exclude="jobs/*/logs" \ ---exclude "*.tar" --exclude="config/unfolding" --exclude="experimental/topReco" +--exclude="*.tar" --exclude="config/unfolding" \ +--exclude="dps/legacy/*" # hadoop fs -mkdir -p $1 # hadoop fs -copyFromLocal dps.tar $1 From 80ffd2c461f08cb60a03b82f7dff7434f187f7e5 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 15:44:24 +0000 Subject: [PATCH 69/90] rename new config creater. mv old to legacy --- dps/analysis/xsection/create_measurement.py | 705 +++++++----------- .../xsection/create_measurement2p0.py | 327 -------- dps/legacy/xsection/create_measurement.py | 486 ++++++++++++ 3 files changed, 759 insertions(+), 759 deletions(-) delete mode 100644 dps/analysis/xsection/create_measurement2p0.py create mode 100644 dps/legacy/xsection/create_measurement.py diff --git a/dps/analysis/xsection/create_measurement.py b/dps/analysis/xsection/create_measurement.py index 659e91ea..241cecde 100644 --- a/dps/analysis/xsection/create_measurement.py +++ b/dps/analysis/xsection/create_measurement.py @@ -9,478 +9,319 @@ Example: python src/cross_section_measurement/create_measurement.py -c ''' -from __future__ import print_function -from optparse import OptionParser +from argparse import ArgumentParser from dps.config.xsection import XSectionConfig from dps.config import variable_binning -from dps.utils.input import Input from dps.utils.logger import log -from copy import deepcopy -from dps.utils.measurement import Measurement, Systematic +from dps.utils.file_utilities import write_data_to_JSON # define logger for this module create_measurement_log = log["01b_get_ttjet_normalisation"] cml = create_measurement_log # alias - @cml.trace() def main(): - parser = OptionParser(__doc__) - parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, - help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_option('-d', '--debug', dest="debug", action="store_true", - help="Print the debug information") - (options, _) = parser.parse_args() - centre_of_mass_energy = options.CoM - # set global variables - debug = options.debug - if debug: - log.setLevel(log.DEBUG) - - measurement_config = XSectionConfig(centre_of_mass_energy) - categories = ['QCD_shape'] - categories.extend(measurement_config.categories_and_prefixes.keys()) - categories.extend(measurement_config.rate_changing_systematics_names) - categories.extend([measurement_config.vjets_theory_systematic_prefix + scale for scale in ['scaleup', 'scaledown']]) - - for variable in measurement_config.variables: - for category in categories: - for channel in ['electron', 'muon']: - if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): - continue - elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): - continue - # create_measurement( - # centre_of_mass_energy, category, variable, channel, - # phase_space='FullPS', norm_method='background_subtraction') - # and the visible phase space - create_measurement( - centre_of_mass_energy, category, variable, channel, - phase_space='VisiblePS', norm_method='background_subtraction') - - -@cml.trace() -def create_measurement(com, category, variable, channel, phase_space, norm_method): - if com == 13: - # exclude non existing systematics - if 'VJets' in category and 'scale' in category: - print('Excluding {0} for now'.format(category)) - return - config = XSectionConfig(com) - met_type = get_met_type(category, config) - should_not_run_systematic = category in config.met_systematics_suffixes and variable in config.variables_no_met and not 'JES' in category and not 'JER' in category - if should_not_run_systematic: - # no MET uncertainty on HT (but JES and JER of course) - return - - m = None - if category == 'central': - m = Measurement(category) - else: - vjet_systematics = [config.vjets_theory_systematic_prefix + - systematic for systematic in config.generator_systematics] - if category in config.categories_and_prefixes.keys() or \ - category in config.met_systematics_suffixes or \ - category in vjet_systematics: - m = Systematic(category, - stype=Systematic.SHAPE, - affected_samples=config.samples) - elif category in config.rate_changing_systematics_names: - m = config.rate_changing_systematics_values[category] - - elif category == 'QCD_shape': - m = Systematic(category, - stype=Systematic.SHAPE, - affected_samples=['QCD'], - ) - - m.setVariable(variable) - m.setCentreOfMassEnergy(com) - m.setChannel(channel) - m.setMETType(met_type) - - inputs = { - 'channel': config.analysis_types[channel], - 'met_type': met_type, - 'selection': 'Ref selection', - 'btag': config.translate_options['2m'], # 2 or more - 'energy': com, - 'variable': variable, - 'category': category, - 'phase_space': phase_space, - 'norm_method': norm_method, - 'lepton': channel.title(), - } - variable_template = config.variable_path_templates[ - variable].format(**inputs) - - template_category = category - if category == 'QCD_shape' or category in config.rate_changing_systematics_names: - template_category = 'central' - if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: - template_category = 'central' - - m.addSample( - 'TTJet', - False, - input=create_input( - config, 'TTJet', variable, template_category, channel, - variable_template, phase_space=phase_space, measurement=m, - ), - ) - m.addSample( - 'V+Jets', - False, - input=create_input( - config, 'V+Jets', variable, template_category, channel, - variable_template, phase_space=phase_space, measurement=m, - ), - ) - m.addSample( - 'SingleTop', - False, - input=create_input( - config, 'SingleTop', variable, template_category, channel, - variable_template, phase_space=phase_space, measurement=m, - ), + parser = ArgumentParser(__doc__) + parser.add_argument( + "-c", + "--centre-of-mass-energy", + dest="CoM", + default=13, + type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]" ) - m.addSample( - 'QCD', - False, - input=create_input( - config, 'QCD', variable, template_category, channel, - variable_template, phase_space=phase_space, measurement=m, - ), + parser.add_argument( + '-d', + '--debug', + dest="debug", + action="store_true", + help="Print the debug information" ) - variable_template_data = variable_template.replace( - met_type, config.translate_options['type1']) - - m.addSample( - 'data', - False, - input=create_input( - config, 'data', variable, template_category, channel, - variable_template_data, phase_space=phase_space, measurement=m, - ), + parser.add_argument( + '-q', + '--qcd_from_data', + dest="data_driven_qcd", + default=True, + help="Print the debug information" ) - - m_qcd = Measurement(category) - m_qcd.setVariable(variable) - m_qcd.setCentreOfMassEnergy(com) - - qcd_template = get_qcd_template(config, variable, category, channel) - - # we want "measurement = m" here since all rate systematics should apply - # to the control regions as well - m_qcd.addSample( - 'TTJet', - False, - input=create_input( - config, 'TTJet', variable, template_category, channel, - qcd_template, phase_space=phase_space, measurement=m, - ), - ) - m_qcd.addSample( - 'V+Jets', - False, - input=create_input( - config, 'V+Jets', variable, template_category, channel, - qcd_template, phase_space=phase_space, measurement=m, - ), - ) - m_qcd.addSample( - 'SingleTop', - False, - input=create_input( - config, 'SingleTop', variable, template_category, channel, - qcd_template, phase_space=phase_space, measurement=m, - ), - ) - m_qcd.addSample( - 'QCD', - False, - input=create_input( - config, 'QCD', variable, template_category, channel, - qcd_template, phase_space=phase_space, measurement=m, - ), - ) - m_qcd.addSample( - 'data', - False, - input=create_input( - config, 'data', variable, template_category, channel, - qcd_template, phase_space=phase_space, measurement=m, - ), - ) - - m.addShapeForSample('QCD', m_qcd, False) - norm_qcd = deepcopy(m_qcd) - # we want QCD shape and normalisation to be separate - if category == 'QCD_shape': - for sample in norm_qcd.samples.keys(): - tree = norm_qcd.samples[sample]['input'].tree_name - if channel == 'electron': - tree = tree.replace(config.electron_control_region_systematic, - config.electron_control_region) - else: - tree = tree.replace(config.muon_control_region_systematic, - config.muon_control_region) - norm_qcd.samples[sample]['input'].tree_name = tree - if 'QCD_cross_section' in category: - for sample in norm_qcd.samples.keys(): - tree = norm_qcd.samples[sample]['input'].tree_name - if channel == 'electron': - tree = tree.replace(config.electron_control_region, - config.electron_control_region_systematic) - else: - tree = tree.replace(config.muon_control_region, - config.muon_control_region_systematic) - norm_qcd.samples[sample]['input'].tree_name = tree - - m.addNormForSample('QCD', norm_qcd, False) - - if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: - v_template_category = category.replace( - config.vjets_theory_systematic_prefix, '') - m_vjets = Measurement(category) - m_vjets.setVariable(variable) - m_vjets.setCentreOfMassEnergy(com) - m_vjets.addSample( - 'V+Jets', - False, - input=create_input( - config, 'V+Jets', variable, v_template_category, - channel, - variable_template, - config.generator_systematic_vjets_templates[ - v_template_category]), - phase_space=phase_space, measurement=m, - ) - m.addShapeForSample('V+Jets', m_vjets, False) - - inputs['channel'] = channel - base_path = 'config/measurements/{norm_method}/{energy}TeV/' - base_path += '{channel}/{variable}/{phase_space}/' - if category == 'central': - path = base_path + '{category}.json' - m.toJSON(path.format(**inputs)) - else: - if m.type == Systematic.SHAPE: - inputs['type'] = 'shape_systematic' - else: - inputs['type'] = 'rate_systematic' - if category in config.met_systematics_suffixes and category not in ['JES_up', 'JES_down', 'JER_up', 'JER_down']: - inputs['category'] = met_type - path = base_path + '{category}_{type}.json' - m.toJSON(path.format(**inputs)) + args = parser.parse_args() + + options = {} + options['com'] = args.CoM + options['data_driven_qcd'] = args.data_driven_qcd + if args.debug: log.setLevel(log.DEBUG) + + + xsec_config = XSectionConfig(options['com']) + categories = xsec_config.normalisation_systematics + print categories + + # Create specific configs required + for ps in ['VisiblePS', 'FullPS']: + options['ps']=ps + for channel in ['electron', 'muon']: + options['channel']=channel + for variable in xsec_config.variables: + options['variable']=variable + for category in categories: + if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): + continue + elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): + continue + elif variable in xsec_config.variables_no_met and category in xsec_config.met_specific_systematics: + continue + options['category']=category + + m = create_measurement( + options, + norm_method='background_subtraction', + ) + + write_measurement( + options, + m, + norm_method='background_subtraction', + ) @cml.trace() -def get_met_type(category, config): - met_type = config.translate_options['type1'] - if category == 'JES_up': - met_type += 'JetEnUp' - elif category == 'JES_down': - met_type += 'JetEnDown' - elif category == 'JER_up': - met_type += 'JetResUp' - elif category == 'JER_down': - met_type += 'JetResDown' - - isJetSystematic = 'JetEn' in category or 'JetRes' in category - isJetSystematic = isJetSystematic or 'JES' in category - isJetSystematic = isJetSystematic or 'JER' in category - - if category in config.met_systematics_suffixes: - # already done them - if not isJetSystematic: - met_type = met_type + category - - return met_type +def create_measurement(options, norm_method): + ''' + Create the config file + ''' + # Create dictionary to write to config file + measurement = {} + xsec_config = XSectionConfig(options['com']) + + # Generate basic normalisation config info + measurement["com"] = options['com'] + measurement["channel"] = options['channel'] + measurement["variable"] = options['variable'] + measurement["name"] = options['category'] + measurement["data_driven_qcd"] = options['data_driven_qcd'] + + # Add specific samples to config + measurement["samples"] = get_samples(options, xsec_config) + return measurement @cml.trace() -def get_file(config, sample, category, channel): - use_trees = True if config.centre_of_mass_energy == 13 else False - if channel == 'electron': - qcd_template = config.electron_QCD_MC_category_templates[category] - data_template = config.data_file_electron - qcd_template_tree = config.electron_QCD_MC_category_templates_trees[ - category] - data_template_tree = config.data_file_electron_trees - else: - qcd_template = config.muon_QCD_MC_category_templates[category] - data_template = config.data_file_muon - qcd_template_tree = config.muon_QCD_MC_category_templates_trees[ - category] - data_template_tree = config.data_file_muon_trees - - tree_files = { - 'TTJet': config.ttbar_category_templates_trees[category], - 'V+Jets': config.VJets_category_templates_trees[category], - 'SingleTop': config.SingleTop_category_templates_trees[category], - 'QCD': qcd_template_tree, - 'data': data_template_tree - } - files = { - 'TTJet': config.ttbar_category_templates[category], - 'V+Jets': config.VJets_category_templates[category], - 'SingleTop': config.SingleTop_category_templates[category], - 'QCD': qcd_template, - 'data': data_template, - } +def get_samples(options, xsec_config): + ''' + Return the dictionary of all sample information + ''' + # create samples dictionary + samples = {} + for s in xsec_config.samples: + samples[s] = get_sample_info(options, xsec_config, s) - if use_trees: - return tree_files[sample] - else: - return files[sample] + return samples @cml.trace() -def get_qcd_template(config, variable, category, channel): - qcd_inputs = { - 'channel': config.analysis_types[channel], - 'met_type': config.translate_options['type1'], # always central MET - 'selection': 'Ref selection', - 'btag': config.translate_options['2m'], # 2 or more - 'energy': config.centre_of_mass_energy, - 'variable': variable, - 'category': 'central', # always central - 'lepton': channel.title(), - } - - qcd_template = config.variable_path_templates[ - variable].format(**qcd_inputs) - if channel == 'electron': - qcd_template = qcd_template.replace( - 'Ref selection', config.electron_control_region) - if category == 'QCD_shape': - qcd_template = qcd_template.replace( - config.electron_control_region, - config.electron_control_region_systematic) +def get_sample_info(options, xsec_config, sample): + ''' + Generate each measurements information + ''' + # create sample info + sample_info = {} + + # Branch (variable) + sample_info["branch"] = options['variable'] + if 'abs_lepton_eta' in options['variable']: + sample_info["branch"] = 'abs(lepton_eta)' + + # Selections + sample_info["selection"] = get_selection(options['variable']) + + # MET Systematics + # Only Met Variables + if options['variable'] not in xsec_config.variables_no_met: + # Only MET Syst measurement + if options['category'] in xsec_config.met_specific_systematics: + sample_info["branch"] += '_METUncertainties[{index}]'.format(index = str(xsec_config.met_systematics[options['category']])) + + # Bin Edges + if options['ps'] == 'VisiblePS': + sample_info["bin_edges"] = variable_binning.reco_bin_edges_vis[options['variable']] + elif options['ps'] == 'FullPS': + sample_info["bin_edges"] = variable_binning.reco_bin_edges_full[options['variable']] else: - qcd_template = qcd_template.replace( - 'Ref selection', config.muon_control_region) - if category == 'QCD_shape': - qcd_template = qcd_template.replace( - config.muon_control_region, - config.muon_control_region_systematic) - - return qcd_template - - -@cml.trace() -def create_input(config, sample, variable, category, channel, template, - input_file=None, phase_space=None, **kwargs): - tree, branch, hist = None, None, None - selection = '1' - if not input_file: - input_file = get_file(config, sample, category, channel) - - if config.centre_of_mass_energy == 13: - branch = template.split('/')[-1] - tree = template.replace('/' + branch, '') - - if 'absolute_eta' in branch: - branch = 'abs(lepton_eta)' - - if sample != 'data': - if category in config.met_systematics_suffixes and not variable in config.variables_no_met: - branch = template.split('/')[-1] - branch += '_METUncertainties[%s]' % config.met_systematics[ - category] - - if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: - tree += config.categories_and_prefixes[category] - - if not sample == 'data': - if 'JES_down' in category: - input_file = input_file.replace('tree', 'minusJES_tree') - elif 'JES_up' in category: - input_file = input_file.replace('tree', 'plusJES_tree') - elif 'JER_up' in category: - input_file = input_file.replace('tree', 'plusJER_tree') - elif 'JER_down' in category: - input_file = input_file.replace('tree', 'minusJER_tree') - - selection = '{0} >= 0'.format(branch) - if variable == 'abs_lepton_eta': - selection += ' && {0} <= 3'.format(branch) - else: - hist = template - - lumi_scale = config.luminosity_scale - scale = 1. - - m = kwargs['measurement'] - if m.type == Systematic.RATE: - if 'luminosity' in m.name: - lumi_scale = lumi_scale * m.scale - else: - if sample in m.affected_samples: - scale = m.scale - if sample == 'data': # data is not scaled in any way - lumi_scale = 1. - scale = 1. - - edges = variable_binning.reco_bin_edges_full[variable] - if phase_space == 'VisiblePS': - edges = variable_binning.reco_bin_edges_vis[variable] - + sample_info["bin_edges"] = None + + # Lumi Scale (Rate) + # Normal lumi scale + ls = 1.0 + # If want to rescale MC to new lumi + if 'data' not in sample: + ls = xsec_config.luminosity_scale + sample_info["lumi_scale"]=ls + lumi_scale = xsec_config.rate_changing_systematics['luminosity'] + if options['category'] == 'luminosity+': + sample_info["lumi_scale"]= ls*(1+lumi_scale) + elif options['category'] == 'luminosity-': + sample_info["lumi_scale"]= ls*(1-lumi_scale) + + # Generator Scale (Rate) + sample_info["scale"]=1.0 + generator_scale = xsec_config.rate_changing_systematics['V+Jets_cross_section'] + if options['category'] == 'V+Jets_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'V+Jets_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + generator_scale = xsec_config.rate_changing_systematics['SingleTop_cross_section'] + if options['category'] == 'SingleTop_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'SingleTop_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] + if options['category'] == 'QCD_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'QCD_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + # scaling will always have some non zero value + if sample_info["scale"] <= 0.0001: sample_info["scale"] = 0.0001 + + # Weight branches (Shape) weight_branches = [] if sample == 'data': weight_branches.append('1') else: weight_branches.append('EventWeight') - if 'PileUp' not in category: - weight_branches.append('PUWeight') - elif category == 'PileUp_up': + # PU Weights + if options['category'] == 'PileUp_up': weight_branches.append('PUWeight_up') - elif category == 'PileUp_down': + elif options['category'] == 'PileUp_down': weight_branches.append('PUWeight_down') else: - weight_branches.append('1') + weight_branches.append('PUWeight') - if category == 'BJet_down': - weight_branches.append('BJetDownWeight') - elif category == 'BJet_up': + # BJet Weights + if options['category'] == 'BJet_up': weight_branches.append('BJetUpWeight') - elif category == 'LightJet_down': - weight_branches.append('LightJetDownWeight') - elif category == 'LightJet_up': + elif options['category'] == 'BJet_down': + weight_branches.append('BJetDownWeight') + elif options['category'] == 'LightJet_up': weight_branches.append('LightJetUpWeight') + elif options['category'] == 'LightJet_down': + weight_branches.append('LightJetDownWeight') else: weight_branches.append('BJetWeight') - # if not 'QCD' in tree: - # if channel == 'muon': - # if category == 'Muon_down': - # weight_branches.append('MuonDown') - # elif category == 'Muon_up': - # weight_branches.append('MuonUp') - # else: - # weight_branches.append('MuonEfficiencyCorrection') - # elif channel == 'electron': - # if category == 'Electron_down': - # weight_branches.append('ElectronDown') - # elif category == 'Electron_up': - # weight_branches.append('ElectronUp') - # else: - # weight_branches.append('ElectronEfficiencyCorrection') - - i = Input( - input_file=input_file, - hist=hist, - tree=tree, - branch=branch, - selection=selection, - bin_edges=edges, - lumi_scale=lumi_scale, - scale=scale, - weight_branches=weight_branches, + # Lepton Weights + # Lepton weights for nonisolated leptons are removed in measurement.py + # The lepton sf are not derived for non isolated leptons + # if options['channel'] == 'muon': + # if options['category'] == 'Muon_down': + # weight_branches.append('MuonDown') + # elif options['category'] == 'Muon_up': + # weight_branches.append('MuonUp') + # else: + # weight_branches.append('MuonEfficiencyCorrection') + # elif options['channel'] == 'electron': + # if options['category'] == 'Electron_down': + # weight_branches.append('ElectronDown') + # elif options['category'] == 'Electron_up': + # weight_branches.append('ElectronUp') + # else: + # weight_branches.append('ElectronEfficiencyCorrection') + sample_info["weight_branches"] = weight_branches + + # Input File and Tree + # QCD Contorol Regions (Shape) JES and JER + sample_info["input_file"] = get_file(xsec_config, sample, options) + sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) + if sample != 'data': + if options['category'] == 'JES_up': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJES_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESUp') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESUp') + elif options['category'] == 'JES_down': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJES_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESDown') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESDown') + elif options['category'] == 'JER_up': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJER_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERUp') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERUp') + elif options['category'] == 'JER_down': + sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJER_tree') + sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERDown') + sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERDown') + + return sample_info + +@cml.trace() +def get_selection(var): + ''' + Return a selection for the branch used by ROOT.Tree.Draw() + ''' + sel = str(var)+" >= 0" + if 'abs_lepton_eta' in var: + sel = "abs(lepton_eta) >= 0 && abs(lepton_eta) <= 3" + return sel + + +@cml.trace() +def get_file(config, sample, options): + ''' + Return a specific sample file + ''' + if options['channel'] == 'electron': + qcd = config.electron_QCD_MC_trees[options['category']] + data = config.data_file_electron + else: + qcd = config.muon_QCD_MC_trees[options['category']] + data = config.data_file_muon + + files = { + 'TTBar': config.ttbar_trees[options['category']], + 'V+Jets': config.VJets_trees[options['category']], + 'SingleTop': config.SingleTop_trees[options['category']], + 'QCD': qcd, + 'data': data + } + return files[sample] + + +@cml.trace() +def get_tree(config, options): + ''' + Return a specific sample tree + ''' + tree = config.tree_path[options['channel']] + if options["data_driven_qcd"]: + # QCD control region + qcd_tree = tree.replace( + "Ref selection", config.qcd_control_region[options['channel']]) + # QCD shape systematic + if "QCD_shape" in options['category']: + qcd_tree = tree.replace( + "Ref selection", config.qcd_shape_syst_region[options['channel']]) + else: + qcd_tree = None + return tree, qcd_tree + + +@cml.trace() +def write_measurement(options, measurement, norm_method): + ''' + Write the config + ''' + base_path = 'config/measurements/{norm_method}/{energy}TeV/{channel}/{variable}/{phase_space}/' + path = base_path + '{category}.json' + + path = path.format( + norm_method = norm_method, + energy = options['com'], + channel = options['channel'], + variable = options['variable'], + phase_space = options['ps'], + category = options['category'], ) - return i + write_data_to_JSON(measurement, path, indent = True) + return if __name__ == '__main__': main() diff --git a/dps/analysis/xsection/create_measurement2p0.py b/dps/analysis/xsection/create_measurement2p0.py deleted file mode 100644 index dc5e0eb6..00000000 --- a/dps/analysis/xsection/create_measurement2p0.py +++ /dev/null @@ -1,327 +0,0 @@ -''' - Translates the current config (for a given centre-of-mass energy) - into JSON configs. The configs will be written to - config/measurements/background_subtraction/TeV/ - - Usage: - python src/cross_section_measurement/create_measurement.py -c - - Example: - python src/cross_section_measurement/create_measurement.py -c -''' -from argparse import ArgumentParser -from dps.config.xsection import XSectionConfig -from dps.config import variable_binning -from dps.utils.logger import log -from dps.utils.file_utilities import write_data_to_JSON - -# define logger for this module -create_measurement_log = log["01b_get_ttjet_normalisation"] -cml = create_measurement_log # alias - -@cml.trace() -def main(): - parser = ArgumentParser(__doc__) - parser.add_argument( - "-c", - "--centre-of-mass-energy", - dest="CoM", - default=13, - type=int, - help="set the centre of mass energy for analysis. Default = 13 [TeV]" - ) - parser.add_argument( - '-d', - '--debug', - dest="debug", - action="store_true", - help="Print the debug information" - ) - parser.add_argument( - '-q', - '--qcd_from_data', - dest="data_driven_qcd", - default=True, - help="Print the debug information" - ) - args = parser.parse_args() - - options = {} - options['com'] = args.CoM - options['data_driven_qcd'] = args.data_driven_qcd - if args.debug: log.setLevel(log.DEBUG) - - - xsec_config = XSectionConfig(options['com']) - categories = xsec_config.normalisation_systematics - print categories - - # Create specific configs required - for ps in ['VisiblePS', 'FullPS']: - options['ps']=ps - for channel in ['electron', 'muon']: - options['channel']=channel - for variable in xsec_config.variables: - options['variable']=variable - for category in categories: - if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): - continue - elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): - continue - elif variable in xsec_config.variables_no_met and category in xsec_config.met_specific_systematics: - continue - options['category']=category - - m = create_measurement( - options, - norm_method='background_subtraction', - ) - - write_measurement( - options, - m, - norm_method='background_subtraction', - ) - - -@cml.trace() -def create_measurement(options, norm_method): - ''' - Create the config file - ''' - # Create dictionary to write to config file - measurement = {} - xsec_config = XSectionConfig(options['com']) - - # Generate basic normalisation config info - measurement["com"] = options['com'] - measurement["channel"] = options['channel'] - measurement["variable"] = options['variable'] - measurement["name"] = options['category'] - measurement["data_driven_qcd"] = options['data_driven_qcd'] - - # Add specific samples to config - measurement["samples"] = get_samples(options, xsec_config) - return measurement - - -@cml.trace() -def get_samples(options, xsec_config): - ''' - Return the dictionary of all sample information - ''' - # create samples dictionary - samples = {} - for s in xsec_config.samples: - samples[s] = get_sample_info(options, xsec_config, s) - - return samples - - -@cml.trace() -def get_sample_info(options, xsec_config, sample): - ''' - Generate each measurements information - ''' - # create sample info - sample_info = {} - - # Branch (variable) - sample_info["branch"] = options['variable'] - if 'abs_lepton_eta' in options['variable']: - sample_info["branch"] = 'abs(lepton_eta)' - - # Selections - sample_info["selection"] = get_selection(options['variable']) - - # MET Systematics - # Only Met Variables - if options['variable'] not in xsec_config.variables_no_met: - # Only MET Syst measurement - if options['category'] in xsec_config.met_specific_systematics: - sample_info["branch"] += '_METUncertainties[{index}]'.format(index = str(xsec_config.met_systematics[options['category']])) - - # Bin Edges - if options['ps'] == 'VisiblePS': - sample_info["bin_edges"] = variable_binning.reco_bin_edges_vis[options['variable']] - elif options['ps'] == 'FullPS': - sample_info["bin_edges"] = variable_binning.reco_bin_edges_full[options['variable']] - else: - sample_info["bin_edges"] = None - - # Lumi Scale (Rate) - # Normal lumi scale - ls = 1.0 - # If want to rescale MC to new lumi - if 'data' not in sample: - ls = xsec_config.luminosity_scale - sample_info["lumi_scale"]=ls - lumi_scale = xsec_config.rate_changing_systematics['luminosity'] - if options['category'] == 'luminosity+': - sample_info["lumi_scale"]= ls*(1+lumi_scale) - elif options['category'] == 'luminosity-': - sample_info["lumi_scale"]= ls*(1-lumi_scale) - - # Generator Scale (Rate) - sample_info["scale"]=1.0 - generator_scale = xsec_config.rate_changing_systematics['V+Jets_cross_section'] - if options['category'] == 'V+Jets_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'V+Jets_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - generator_scale = xsec_config.rate_changing_systematics['SingleTop_cross_section'] - if options['category'] == 'SingleTop_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'SingleTop_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] - if options['category'] == 'QCD_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'QCD_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - # scaling will always have some non zero value - if sample_info["scale"] <= 0.0001: sample_info["scale"] = 0.0001 - - # Weight branches (Shape) - weight_branches = [] - if sample == 'data': - weight_branches.append('1') - else: - weight_branches.append('EventWeight') - - # PU Weights - if options['category'] == 'PileUp_up': - weight_branches.append('PUWeight_up') - elif options['category'] == 'PileUp_down': - weight_branches.append('PUWeight_down') - else: - weight_branches.append('PUWeight') - - # BJet Weights - if options['category'] == 'BJet_up': - weight_branches.append('BJetUpWeight') - elif options['category'] == 'BJet_down': - weight_branches.append('BJetDownWeight') - elif options['category'] == 'LightJet_up': - weight_branches.append('LightJetUpWeight') - elif options['category'] == 'LightJet_down': - weight_branches.append('LightJetDownWeight') - else: - weight_branches.append('BJetWeight') - - # Lepton Weights - # Lepton weights for nonisolated leptons are removed in measurement.py - # The lepton sf are not derived for non isolated leptons - if options['channel'] == 'muon': - if options['category'] == 'Muon_down': - weight_branches.append('MuonDown') - elif options['category'] == 'Muon_up': - weight_branches.append('MuonUp') - else: - weight_branches.append('MuonEfficiencyCorrection') - elif options['channel'] == 'electron': - if options['category'] == 'Electron_down': - weight_branches.append('ElectronDown') - elif options['category'] == 'Electron_up': - weight_branches.append('ElectronUp') - else: - weight_branches.append('ElectronEfficiencyCorrection') - sample_info["weight_branches"] = weight_branches - - # Input File and Tree - # QCD Contorol Regions (Shape) JES and JER - sample_info["input_file"] = get_file(xsec_config, sample, options) - sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) - if sample != 'data': - if options['category'] == 'JES_up': - sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJES_tree') - sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESUp') - sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESUp') - elif options['category'] == 'JES_down': - sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJES_tree') - sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JESDown') - sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JESDown') - elif options['category'] == 'JER_up': - sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJER_tree') - sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERUp') - sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERUp') - elif options['category'] == 'JER_down': - sample_info["input_file"] = sample_info["input_file"].replace('tree', 'minusJER_tree') - sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERDown') - sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERDown') - - return sample_info - -@cml.trace() -def get_selection(var): - ''' - Return a selection for the branch used by ROOT.Tree.Draw() - ''' - sel = str(var)+" >= 0" - if 'abs_lepton_eta' in var: - sel = "abs(lepton_eta) >= 0 && abs(lepton_eta) <= 3" - return sel - - -@cml.trace() -def get_file(config, sample, options): - ''' - Return a specific sample file - ''' - if options['channel'] == 'electron': - qcd = config.electron_QCD_MC_trees[options['category']] - data = config.data_file_electron - else: - qcd = config.muon_QCD_MC_trees[options['category']] - data = config.data_file_muon - - files = { - 'TTBar': config.ttbar_trees[options['category']], - 'V+Jets': config.VJets_trees[options['category']], - 'SingleTop': config.SingleTop_trees[options['category']], - 'QCD': qcd, - 'data': data - } - return files[sample] - - -@cml.trace() -def get_tree(config, options): - ''' - Return a specific sample tree - ''' - tree = config.tree_path[options['channel']] - if options["data_driven_qcd"]: - # QCD control region - qcd_tree = tree.replace( - "Ref selection", config.qcd_control_region[options['channel']]) - # QCD shape systematic - if "QCD_shape" in options['category']: - qcd_tree = tree.replace( - "Ref selection", config.qcd_shape_syst_region[options['channel']]) - else: - qcd_tree = None - return tree, qcd_tree - - -@cml.trace() -def write_measurement(options, measurement, norm_method): - ''' - Write the config - ''' - base_path = 'TESTING/config/measurements/{norm_method}/{energy}TeV/{channel}/{variable}/{phase_space}/' - path = base_path + '{category}.json' - - path = path.format( - norm_method = norm_method, - energy = options['com'], - channel = options['channel'], - variable = options['variable'], - phase_space = options['ps'], - category = options['category'], - ) - write_data_to_JSON(measurement, path, indent = True) - return - -if __name__ == '__main__': - main() diff --git a/dps/legacy/xsection/create_measurement.py b/dps/legacy/xsection/create_measurement.py new file mode 100644 index 00000000..dfcdf93e --- /dev/null +++ b/dps/legacy/xsection/create_measurement.py @@ -0,0 +1,486 @@ +''' + Translates the current config (for a given centre-of-mass energy) + into JSON configs. The configs will be written to + config/measurements/background_subtraction/TeV/ + + Usage: + python src/cross_section_measurement/create_measurement.py -c + + Example: + python src/cross_section_measurement/create_measurement.py -c +''' +from __future__ import print_function +from optparse import OptionParser +from dps.config.xsection import XSectionConfig +from dps.config import variable_binning +from dps.utils.input import Input +from dps.utils.logger import log +from copy import deepcopy +from dps.utils.measurement import Measurement, Systematic + +# define logger for this module +create_measurement_log = log["01b_get_ttjet_normalisation"] +cml = create_measurement_log # alias + + +@cml.trace() +def main(): + parser = OptionParser(__doc__) + parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + help="set the centre of mass energy for analysis. Default = 13 [TeV]") + parser.add_option('-d', '--debug', dest="debug", action="store_true", + help="Print the debug information") + (options, _) = parser.parse_args() + centre_of_mass_energy = options.CoM + # set global variables + debug = options.debug + if debug: + log.setLevel(log.DEBUG) + + measurement_config = XSectionConfig(centre_of_mass_energy) + categories = ['QCD_shape'] + categories.extend(measurement_config.categories_and_prefixes.keys()) + categories.extend(measurement_config.rate_changing_systematics_names) + categories.extend([measurement_config.vjets_theory_systematic_prefix + scale for scale in ['scaleup', 'scaledown']]) + + for variable in measurement_config.variables: + for category in categories: + for channel in ['electron', 'muon']: + if channel == 'electron' and (category == 'Muon_down' or category == 'Muon_up'): + continue + elif channel == 'muon' and (category == 'Electron_down' or category == 'Electron_up'): + continue + # create_measurement( + # centre_of_mass_energy, category, variable, channel, + # phase_space='FullPS', norm_method='background_subtraction') + # and the visible phase space + create_measurement( + centre_of_mass_energy, category, variable, channel, + phase_space='VisiblePS', norm_method='background_subtraction') + + +@cml.trace() +def create_measurement(com, category, variable, channel, phase_space, norm_method): + if com == 13: + # exclude non existing systematics + if 'VJets' in category and 'scale' in category: + print('Excluding {0} for now'.format(category)) + return + config = XSectionConfig(com) + met_type = get_met_type(category, config) + should_not_run_systematic = category in config.met_systematics_suffixes and variable in config.variables_no_met and not 'JES' in category and not 'JER' in category + if should_not_run_systematic: + # no MET uncertainty on HT (but JES and JER of course) + return + + m = None + if category == 'central': + m = Measurement(category) + else: + vjet_systematics = [config.vjets_theory_systematic_prefix + + systematic for systematic in config.generator_systematics] + if category in config.categories_and_prefixes.keys() or \ + category in config.met_systematics_suffixes or \ + category in vjet_systematics: + m = Systematic(category, + stype=Systematic.SHAPE, + affected_samples=config.samples) + elif category in config.rate_changing_systematics_names: + m = config.rate_changing_systematics_values[category] + + elif category == 'QCD_shape': + m = Systematic(category, + stype=Systematic.SHAPE, + affected_samples=['QCD'], + ) + + m.setVariable(variable) + m.setCentreOfMassEnergy(com) + m.setChannel(channel) + m.setMETType(met_type) + + inputs = { + 'channel': config.analysis_types[channel], + 'met_type': met_type, + 'selection': 'Ref selection', + 'btag': config.translate_options['2m'], # 2 or more + 'energy': com, + 'variable': variable, + 'category': category, + 'phase_space': phase_space, + 'norm_method': norm_method, + 'lepton': channel.title(), + } + variable_template = config.variable_path_templates[ + variable].format(**inputs) + + template_category = category + if category == 'QCD_shape' or category in config.rate_changing_systematics_names: + template_category = 'central' + if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: + template_category = 'central' + + m.addSample( + 'TTJet', + False, + input=create_input( + config, 'TTJet', variable, template_category, channel, + variable_template, phase_space=phase_space, measurement=m, + ), + ) + m.addSample( + 'V+Jets', + False, + input=create_input( + config, 'V+Jets', variable, template_category, channel, + variable_template, phase_space=phase_space, measurement=m, + ), + ) + m.addSample( + 'SingleTop', + False, + input=create_input( + config, 'SingleTop', variable, template_category, channel, + variable_template, phase_space=phase_space, measurement=m, + ), + ) + m.addSample( + 'QCD', + False, + input=create_input( + config, 'QCD', variable, template_category, channel, + variable_template, phase_space=phase_space, measurement=m, + ), + ) + variable_template_data = variable_template.replace( + met_type, config.translate_options['type1']) + + m.addSample( + 'data', + False, + input=create_input( + config, 'data', variable, template_category, channel, + variable_template_data, phase_space=phase_space, measurement=m, + ), + ) + + m_qcd = Measurement(category) + m_qcd.setVariable(variable) + m_qcd.setCentreOfMassEnergy(com) + + qcd_template = get_qcd_template(config, variable, category, channel) + + # we want "measurement = m" here since all rate systematics should apply + # to the control regions as well + m_qcd.addSample( + 'TTJet', + False, + input=create_input( + config, 'TTJet', variable, template_category, channel, + qcd_template, phase_space=phase_space, measurement=m, + ), + ) + m_qcd.addSample( + 'V+Jets', + False, + input=create_input( + config, 'V+Jets', variable, template_category, channel, + qcd_template, phase_space=phase_space, measurement=m, + ), + ) + m_qcd.addSample( + 'SingleTop', + False, + input=create_input( + config, 'SingleTop', variable, template_category, channel, + qcd_template, phase_space=phase_space, measurement=m, + ), + ) + m_qcd.addSample( + 'QCD', + False, + input=create_input( + config, 'QCD', variable, template_category, channel, + qcd_template, phase_space=phase_space, measurement=m, + ), + ) + m_qcd.addSample( + 'data', + False, + input=create_input( + config, 'data', variable, template_category, channel, + qcd_template, phase_space=phase_space, measurement=m, + ), + ) + + m.addShapeForSample('QCD', m_qcd, False) + norm_qcd = deepcopy(m_qcd) + # we want QCD shape and normalisation to be separate + if category == 'QCD_shape': + for sample in norm_qcd.samples.keys(): + tree = norm_qcd.samples[sample]['input'].tree_name + if channel == 'electron': + tree = tree.replace(config.electron_control_region_systematic, + config.electron_control_region) + else: + tree = tree.replace(config.muon_control_region_systematic, + config.muon_control_region) + norm_qcd.samples[sample]['input'].tree_name = tree + if 'QCD_cross_section' in category: + for sample in norm_qcd.samples.keys(): + tree = norm_qcd.samples[sample]['input'].tree_name + if channel == 'electron': + tree = tree.replace(config.electron_control_region, + config.electron_control_region_systematic) + else: + tree = tree.replace(config.muon_control_region, + config.muon_control_region_systematic) + norm_qcd.samples[sample]['input'].tree_name = tree + + m.addNormForSample('QCD', norm_qcd, False) + + if category in [config.vjets_theory_systematic_prefix + systematic for systematic in config.generator_systematics]: + v_template_category = category.replace( + config.vjets_theory_systematic_prefix, '') + m_vjets = Measurement(category) + m_vjets.setVariable(variable) + m_vjets.setCentreOfMassEnergy(com) + m_vjets.addSample( + 'V+Jets', + False, + input=create_input( + config, 'V+Jets', variable, v_template_category, + channel, + variable_template, + config.generator_systematic_vjets_templates[ + v_template_category]), + phase_space=phase_space, measurement=m, + ) + m.addShapeForSample('V+Jets', m_vjets, False) + + inputs['channel'] = channel + base_path = 'config/measurements/{norm_method}/{energy}TeV/' + base_path += '{channel}/{variable}/{phase_space}/' + if category == 'central': + path = base_path + '{category}.json' + m.toJSON(path.format(**inputs)) + else: + if m.type == Systematic.SHAPE: + inputs['type'] = 'shape_systematic' + else: + inputs['type'] = 'rate_systematic' + if category in config.met_systematics_suffixes and category not in ['JES_up', 'JES_down', 'JER_up', 'JER_down']: + inputs['category'] = met_type + path = base_path + '{category}_{type}.json' + m.toJSON(path.format(**inputs)) + + +@cml.trace() +def get_met_type(category, config): + met_type = config.translate_options['type1'] + if category == 'JES_up': + met_type += 'JetEnUp' + elif category == 'JES_down': + met_type += 'JetEnDown' + elif category == 'JER_up': + met_type += 'JetResUp' + elif category == 'JER_down': + met_type += 'JetResDown' + + isJetSystematic = 'JetEn' in category or 'JetRes' in category + isJetSystematic = isJetSystematic or 'JES' in category + isJetSystematic = isJetSystematic or 'JER' in category + + if category in config.met_systematics_suffixes: + # already done them + if not isJetSystematic: + met_type = met_type + category + + return met_type + + +@cml.trace() +def get_file(config, sample, category, channel): + use_trees = True if config.centre_of_mass_energy == 13 else False + if channel == 'electron': + qcd_template = config.electron_QCD_MC_category_templates[category] + data_template = config.data_file_electron + qcd_template_tree = config.electron_QCD_MC_category_templates_trees[ + category] + data_template_tree = config.data_file_electron_trees + else: + qcd_template = config.muon_QCD_MC_category_templates[category] + data_template = config.data_file_muon + qcd_template_tree = config.muon_QCD_MC_category_templates_trees[ + category] + data_template_tree = config.data_file_muon_trees + + tree_files = { + 'TTJet': config.ttbar_category_templates_trees[category], + 'V+Jets': config.VJets_category_templates_trees[category], + 'SingleTop': config.SingleTop_category_templates_trees[category], + 'QCD': qcd_template_tree, + 'data': data_template_tree + } + files = { + 'TTJet': config.ttbar_category_templates[category], + 'V+Jets': config.VJets_category_templates[category], + 'SingleTop': config.SingleTop_category_templates[category], + 'QCD': qcd_template, + 'data': data_template, + } + + if use_trees: + return tree_files[sample] + else: + return files[sample] + + +@cml.trace() +def get_qcd_template(config, variable, category, channel): + qcd_inputs = { + 'channel': config.analysis_types[channel], + 'met_type': config.translate_options['type1'], # always central MET + 'selection': 'Ref selection', + 'btag': config.translate_options['2m'], # 2 or more + 'energy': config.centre_of_mass_energy, + 'variable': variable, + 'category': 'central', # always central + 'lepton': channel.title(), + } + + qcd_template = config.variable_path_templates[ + variable].format(**qcd_inputs) + if channel == 'electron': + qcd_template = qcd_template.replace( + 'Ref selection', config.electron_control_region) + if category == 'QCD_shape': + qcd_template = qcd_template.replace( + config.electron_control_region, + config.electron_control_region_systematic) + else: + qcd_template = qcd_template.replace( + 'Ref selection', config.muon_control_region) + if category == 'QCD_shape': + qcd_template = qcd_template.replace( + config.muon_control_region, + config.muon_control_region_systematic) + + return qcd_template + + +@cml.trace() +def create_input(config, sample, variable, category, channel, template, + input_file=None, phase_space=None, **kwargs): + tree, branch, hist = None, None, None + selection = '1' + if not input_file: + input_file = get_file(config, sample, category, channel) + + if config.centre_of_mass_energy == 13: + branch = template.split('/')[-1] + tree = template.replace('/' + branch, '') + + if 'absolute_eta' in branch: + branch = 'abs(lepton_eta)' + + if sample != 'data': + if category in config.met_systematics_suffixes and not variable in config.variables_no_met: + branch = template.split('/')[-1] + branch += '_METUncertainties[%s]' % config.met_systematics[ + category] + + if 'JES_down' in category or 'JES_up' in category or 'JER_down' in category or 'JER_up' in category: + tree += config.categories_and_prefixes[category] + + if not sample == 'data': + if 'JES_down' in category: + input_file = input_file.replace('tree', 'minusJES_tree') + elif 'JES_up' in category: + input_file = input_file.replace('tree', 'plusJES_tree') + elif 'JER_up' in category: + input_file = input_file.replace('tree', 'plusJER_tree') + elif 'JER_down' in category: + input_file = input_file.replace('tree', 'minusJER_tree') + + selection = '{0} >= 0'.format(branch) + if variable == 'abs_lepton_eta': + selection += ' && {0} <= 3'.format(branch) + else: + hist = template + + lumi_scale = config.luminosity_scale + scale = 1. + + m = kwargs['measurement'] + if m.type == Systematic.RATE: + if 'luminosity' in m.name: + lumi_scale = lumi_scale * m.scale + else: + if sample in m.affected_samples: + scale = m.scale + if sample == 'data': # data is not scaled in any way + lumi_scale = 1. + scale = 1. + + edges = variable_binning.reco_bin_edges_full[variable] + if phase_space == 'VisiblePS': + edges = variable_binning.reco_bin_edges_vis[variable] + + weight_branches = [] + if sample == 'data': + weight_branches.append('1') + else: + weight_branches.append('EventWeight') + + if 'PileUp' not in category: + weight_branches.append('PUWeight') + elif category == 'PileUp_up': + weight_branches.append('PUWeight_up') + elif category == 'PileUp_down': + weight_branches.append('PUWeight_down') + else: + weight_branches.append('1') + + if category == 'BJet_down': + weight_branches.append('BJetDownWeight') + elif category == 'BJet_up': + weight_branches.append('BJetUpWeight') + elif category == 'LightJet_down': + weight_branches.append('LightJetDownWeight') + elif category == 'LightJet_up': + weight_branches.append('LightJetUpWeight') + else: + weight_branches.append('BJetWeight') + + if not 'QCD' in tree: + if channel == 'muon': + if category == 'Muon_down': + weight_branches.append('MuonDown') + elif category == 'Muon_up': + weight_branches.append('MuonUp') + else: + weight_branches.append('MuonEfficiencyCorrection') + elif channel == 'electron': + if category == 'Electron_down': + weight_branches.append('ElectronDown') + elif category == 'Electron_up': + weight_branches.append('ElectronUp') + else: + weight_branches.append('ElectronEfficiencyCorrection') + + i = Input( + input_file=input_file, + hist=hist, + tree=tree, + branch=branch, + selection=selection, + bin_edges=edges, + lumi_scale=lumi_scale, + scale=scale, + weight_branches=weight_branches, + ) + return i + +if __name__ == '__main__': + main() From e22eb04c375fb2813dc4e158292d812109ea74db Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 15:47:56 +0000 Subject: [PATCH 70/90] Remove TESTING/ directory --- dps/analysis/xsection/01_get_ttjet_normalisation2.py | 2 +- dps/utils/measurement2.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py index c98970bf..6dc3de3d 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation2.py @@ -18,7 +18,7 @@ def main(): results = {} # config file template - input_template = 'TESTING/config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' + input_template = 'config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' if args.visiblePS: ps = 'VisiblePS' diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py index 7b7a396d..f1fcc7d1 100644 --- a/dps/utils/measurement2.py +++ b/dps/utils/measurement2.py @@ -188,7 +188,7 @@ def save(self, phase_space): # If normalisation hasnt been calculated - then go calculate it! if not self.is_normalised: self.calculate_normalisation() - output_folder = 'TESTING/data/normalisation/background_subtraction/{com}TeV/{var}/{ps}/{cat}/' + output_folder = 'data/normalisation/background_subtraction/{com}TeV/{var}/{ps}/{cat}/' output_folder = output_folder.format( com = self.com, var = self.variable, From 98de780e2f315037dd3d1e9fbaa2a28e616553e6 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 15:51:59 +0000 Subject: [PATCH 71/90] Default the new norm script. move previous to legacy --- .../xsection/01_get_ttjet_normalisation.py | 295 ++++-------------- .../xsection/01_get_ttjet_normalisation2.py | 88 ------ .../xsection/01_get_ttjet_normalisation.py | 240 +++++--------- 3 files changed, 145 insertions(+), 478 deletions(-) delete mode 100644 dps/analysis/xsection/01_get_ttjet_normalisation2.py diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index c0b6e954..6dc3de3d 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -1,261 +1,88 @@ -''' - Takes AnalysisSoftware (https://github.com/BristolTopGroup/AnalysisSoftware) - output files and extracts the TTJet normalisation for each measured variable - by subtracting backgrounds from data. - - Usage: - python dps/analysis/xsection/01_get_ttjet_normalisation.py \ - -c -v -i \ - -p - - Example: - python dps/analysis/xsection/01_get_ttjet_normalisation.py \ - -c 13 -v MET -i config/measurements/background_subtraction/ - - TODO: In the end this and 01_get_fit_results.py should be merged. - All should come down to the function to extract the # events from TTJet -''' from __future__ import division from argparse import ArgumentParser from dps.utils.logger import log from dps.config.xsection import XSectionConfig -from dps.analysis.xsection.lib import closure_tests -from dps.utils.file_utilities import write_data_to_JSON, get_files_in_path -from dps.utils.hist_utilities import clean_control_region, \ - hist_to_value_error_tuplelist, fix_overflow - -import os -from copy import deepcopy -from dps.utils.Calculation import combine_complex_results -from dps.utils.measurement import Measurement +from dps.utils.file_utilities import get_files_in_path, read_data_from_JSON +from dps.utils.measurement2 import Measurement from dps.utils.ROOT_utils import set_root_defaults # define logger for this module mylog = log["01b_get_ttjet_normalisation"] - -class TTJetNormalisation(object): +def main(): ''' - Determines the normalisation for top quark pair production. - Unless stated otherwise all templates and (initial) normalisations - are taken from simulation, except for QCD where the template is - extracted from data. - - Subtracts the known backgrounds from data to obtain TTJet template - and normalisation + 1 - Read Config file for normalisation measurement + 2 - Run measurement + 3 - Combine measurement before unfolding ''' + results = {} - @mylog.trace() - def __init__(self, - config, - measurement, - phase_space='FullPS'): - self.config = config - self.variable = measurement.variable - self.category = measurement.name - self.channel = measurement.channel - self.phase_space = phase_space - self.measurement = measurement - self.measurement.read() - - self.normalisation = {} - self.initial_normalisation = {} - # self.unity_normalisation = {} - self.auxiliary_info = {} - - self.have_normalisation = False - - # for sample, hist in self.measurement.histograms.items(): - # h = deepcopy(hist) - # h_norm = h.integral() - # if h_norm > 0: - # h.Scale(1 / h.integral()) - # self.unity_normalisation[sample] = hist_to_value_error_tuplelist(h) - - self.auxiliary_info['norms'] = measurement.aux_info_norms - - @mylog.trace() - def calculate_normalisation(self): - ''' - 1. get file names - 2. get histograms from files - 3. ??? - 4. calculate normalisation - ''' - if self.have_normalisation: - return - histograms = self.measurement.histograms - - for sample, hist in histograms.items(): - # TODO: this should be a list of bin-contents - hist = fix_overflow(hist) - histograms[sample] = hist - self.initial_normalisation[sample] = hist_to_value_error_tuplelist(hist) - self.normalisation[sample] = self.initial_normalisation[sample] - - self.background_subtraction(histograms) - - # next, let's round all numbers (they are event numbers after all - for sample, values in self.normalisation.items(): - new_values = [(round(v, 1), round(e, 1)) for v, e in values] - self.normalisation[sample] = new_values - - self.have_normalisation = True - - @mylog.trace() - def background_subtraction(self, histograms): - ttjet_hist = clean_control_region( - histograms, - subtract=['QCD', 'V+Jets', 'SingleTop'] - ) - self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - - @mylog.trace() - def save(self, output_path): - if not self.have_normalisation: - self.calculate_normalisation() - - file_template = '{type}_{channel}.txt' - folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/{phase_space}/{category}/' - output_folder = folder_template.format( - path = output_path, - CoM = self.config.centre_of_mass_energy, - variable = self.variable, - category = self.category, - method = 'background_subtraction', - phase_space = self.phase_space, - ) - - write_data_to_JSON( - self.normalisation, - output_folder + file_template.format(type='normalisation', channel=self.channel) - ) - write_data_to_JSON( - self.initial_normalisation, - output_folder + file_template.format(type='initial_normalisation', channel=self.channel) - ) - # write_data_to_JSON( - # self.unity_normalisation, - # output_folder + file_template.format(type='unity_normalisation', channel=self.channel) - # ) - write_data_to_JSON( - self.auxiliary_info, - output_folder + file_template.format(type='auxiliary_info', channel=self.channel) - ) - return output_folder - - @mylog.trace() - def combine(self, other): - if not self.have_normalisation or not other.have_normalisation: - mylog.warn( - 'One of the TTJetNormalisations does not have a normalisation, aborting.') - return - - self.normalisation = combine_complex_results( - self.normalisation, other.normalisation) - self.initial_normalisation = combine_complex_results( - self.initial_normalisation, other.initial_normalisation) - # self.unity_normalisation = combine_complex_results( - # self.unity_normalisation, other.unity_normalisation) - self.channel = 'combined' + # config file template + input_template = 'config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' + if args.visiblePS: + ps = 'VisiblePS' + else: + ps = 'FullPS' + + for ch in ['electron', 'muon']: + for var in measurement_config.variables: + if args.variable not in var: continue + + # Create measurement_filepath + measurement_filepath = input_template.format( + com = args.CoM, + ch = ch, + var = var, + ps = ps, + ) + + # Get all config files in measurement_filepath + measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') + + for f in sorted(measurement_files): + if args.test: + if 'central' not in f: continue + print('Processing file ' + f) + # Read in Measurement JSON + config = read_data_from_JSON(f) + + if 'electron' in ch: + # Create Measurement Class using JSON + electron_measurement = Measurement(config) + electron_measurement.calculate_normalisation() + electron_measurement.save(ps) + elif 'muon' in ch: + # Create Measurement Class using JSON + muon_measurement = Measurement(config) + muon_measurement.calculate_normalisation() + muon_measurement.save(ps) + # break + + # Combining the channels before unfolding + combined_measurement = electron_measurement + combined_measurement.combine(muon_measurement) + combined_measurement.save(ps) + return def parse_arguments(): parser = ArgumentParser(__doc__) - parser.add_argument("-p", "--path", dest="path", default='data', - help="set output path for JSON files. Default is 'data'.") - parser.add_argument("-i", "--input", dest="input", - default='config/measurements/background_subtraction/', - help="set output path for JSON files") - parser.add_argument("-v", "--variable", dest="variable", default='MET', - help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") + parser.add_argument("-v", "--variable", dest="variable", default='HT', + help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, - help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_argument('-d', '--debug', dest="debug", action="store_true", - help="Print the debug information") - parser.add_argument('--closure_test', dest="closure_test", action="store_true", - help="Perform fit on data == sum(MC) * scale factor (MC process)") - parser.add_argument('--closure_test_type', dest="closure_test_type", default='simple', - help="Type of closure test (relative normalisation):" + '|'.join(closure_tests.keys())) - parser.add_argument('--test', dest="test", action="store_true", - help="Just run the central measurement") + help="set the centre of mass energy for analysis. Default = 13 [TeV]") parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", - help="Unfold to visible phase space") - + help="Unfold to visible phase space") + parser.add_argument('--test', dest="test", action="store_true", + help="Unfold to visible phase space") args = parser.parse_args() - # fix some of the inputs - if not args.path.endswith('/'): - args.path = args.path + '/' - if not args.input.endswith('/'): - args.input = args.input + '/' - return args -@mylog.trace() -def main(): - # construct categories from files: - input_template = args.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' - - phase_space = 'FullPS' - if args.visiblePS: - phase_space = 'VisiblePS' - results = {} - - for channel in ['electron', 'muon']: - measurement_filepath = input_template.format( - energy = args.CoM, - channel = channel, - variable = variable, - phase_space = phase_space, - ) - measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') - - for f in sorted(measurement_files): - if args.test and 'central' not in f: continue - - print('Processing file ' + f) - measurement = Measurement.fromJSON(f) - # for each measurement - norm = TTJetNormalisation( - config=measurement_config, - measurement=measurement, - phase_space=phase_space, - ) - norm.calculate_normalisation() - mylog.info('Saving results to {0}'.format(output_path)) - norm.save(output_path) - # store results for later combination - r_name = f.replace(channel, '') - if not results.has_key(r_name): - results[r_name] = [norm] - else: - results[r_name].append(norm) - - for f, r_list in results.items(): - if not len(r_list) == 2: - msg = 'Only found results ({0}) for one channel, not combining.' - mylog.warn(msg.format(f)) - continue - n1, n2 = r_list - n1.combine(n2) - n1.save(output_path) - if __name__ == '__main__': set_root_defaults() - args = parse_arguments() + measurement_config = XSectionConfig(args.CoM) + main() - # set global variables - debug = args.debug - if debug: - log.setLevel(log.DEBUG) - measurement_config = XSectionConfig(args.CoM) - # caching of variables for shorter access - variable = args.variable - output_path = args.path - if args.closure_test: - output_path += '/closure_test/' - output_path += args.closure_test_type + '/' - main() diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation2.py b/dps/analysis/xsection/01_get_ttjet_normalisation2.py deleted file mode 100644 index 6dc3de3d..00000000 --- a/dps/analysis/xsection/01_get_ttjet_normalisation2.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import division -from argparse import ArgumentParser -from dps.utils.logger import log -from dps.config.xsection import XSectionConfig -from dps.utils.file_utilities import get_files_in_path, read_data_from_JSON -from dps.utils.measurement2 import Measurement -from dps.utils.ROOT_utils import set_root_defaults - -# define logger for this module -mylog = log["01b_get_ttjet_normalisation"] - -def main(): - ''' - 1 - Read Config file for normalisation measurement - 2 - Run measurement - 3 - Combine measurement before unfolding - ''' - results = {} - - # config file template - input_template = 'config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' - - if args.visiblePS: - ps = 'VisiblePS' - else: - ps = 'FullPS' - - for ch in ['electron', 'muon']: - for var in measurement_config.variables: - if args.variable not in var: continue - - # Create measurement_filepath - measurement_filepath = input_template.format( - com = args.CoM, - ch = ch, - var = var, - ps = ps, - ) - - # Get all config files in measurement_filepath - measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') - - for f in sorted(measurement_files): - if args.test: - if 'central' not in f: continue - print('Processing file ' + f) - # Read in Measurement JSON - config = read_data_from_JSON(f) - - if 'electron' in ch: - # Create Measurement Class using JSON - electron_measurement = Measurement(config) - electron_measurement.calculate_normalisation() - electron_measurement.save(ps) - elif 'muon' in ch: - # Create Measurement Class using JSON - muon_measurement = Measurement(config) - muon_measurement.calculate_normalisation() - muon_measurement.save(ps) - # break - - # Combining the channels before unfolding - combined_measurement = electron_measurement - combined_measurement.combine(muon_measurement) - combined_measurement.save(ps) - return - -def parse_arguments(): - parser = ArgumentParser(__doc__) - parser.add_argument("-v", "--variable", dest="variable", default='HT', - help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") - parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, - help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", - help="Unfold to visible phase space") - parser.add_argument('--test', dest="test", action="store_true", - help="Unfold to visible phase space") - args = parser.parse_args() - return args - -if __name__ == '__main__': - set_root_defaults() - args = parse_arguments() - measurement_config = XSectionConfig(args.CoM) - main() - - - diff --git a/dps/legacy/xsection/01_get_ttjet_normalisation.py b/dps/legacy/xsection/01_get_ttjet_normalisation.py index 02bcc9db..c0b6e954 100644 --- a/dps/legacy/xsection/01_get_ttjet_normalisation.py +++ b/dps/legacy/xsection/01_get_ttjet_normalisation.py @@ -16,7 +16,7 @@ All should come down to the function to extract the # events from TTJet ''' from __future__ import division -from optparse import OptionParser +from argparse import ArgumentParser from dps.utils.logger import log from dps.config.xsection import XSectionConfig from dps.analysis.xsection.lib import closure_tests @@ -35,59 +35,43 @@ class TTJetNormalisation(object): - - ''' - Determines the normalisation for top quark pair production based on - different methods. Unless stated otherwise all templates and - (initial) normalisations are taken from simulation, except for QCD - where the template is extracted from data. - - Supported methods: - BACKGROUND_SUBTRACTION: - Subtracts the known backgrounds from data to obtain TTJet template - and normalisation - SIMULTANEOUS_FIT: - Uses Minuit and several fit variables (quotation needed) to perform - a simultaneous fit (does not use statistical errors of templates). - FRACTION_FITTER: - Uses the TFractionFitter class to fit the TTJet normalisation ''' + Determines the normalisation for top quark pair production. + Unless stated otherwise all templates and (initial) normalisations + are taken from simulation, except for QCD where the template is + extracted from data. - BACKGROUND_SUBTRACTION = 10 - SIMULTANEOUS_FIT = 20 - FRACTION_FITTER = 30 + Subtracts the known backgrounds from data to obtain TTJet template + and normalisation + ''' @mylog.trace() def __init__(self, config, measurement, - method=BACKGROUND_SUBTRACTION, phase_space='FullPS'): self.config = config self.variable = measurement.variable self.category = measurement.name self.channel = measurement.channel - self.method = method self.phase_space = phase_space self.measurement = measurement self.measurement.read() - self.met_type = measurement.met_type - self.fit_variables = ['M3'] - self.normalisation = {} self.initial_normalisation = {} - self.templates = {} + # self.unity_normalisation = {} + self.auxiliary_info = {} self.have_normalisation = False - for sample, hist in self.measurement.histograms.items(): - h = deepcopy(hist) - h_norm = h.integral() - if h_norm > 0: - h.Scale(1 / h.integral()) - self.templates[sample] = hist_to_value_error_tuplelist(h) - self.auxiliary_info = {} + # for sample, hist in self.measurement.histograms.items(): + # h = deepcopy(hist) + # h_norm = h.integral() + # if h_norm > 0: + # h.Scale(1 / h.integral()) + # self.unity_normalisation[sample] = hist_to_value_error_tuplelist(h) + self.auxiliary_info['norms'] = measurement.aux_info_norms @mylog.trace() @@ -96,7 +80,7 @@ def calculate_normalisation(self): 1. get file names 2. get histograms from files 3. ??? - 4. calculate normalisation based on self.method + 4. calculate normalisation ''' if self.have_normalisation: return @@ -106,15 +90,10 @@ def calculate_normalisation(self): # TODO: this should be a list of bin-contents hist = fix_overflow(hist) histograms[sample] = hist - self.initial_normalisation[ - sample] = hist_to_value_error_tuplelist(hist) - if self.method == self.BACKGROUND_SUBTRACTION and sample != 'TTJet': - self.normalisation[sample] = self.initial_normalisation[sample] + self.initial_normalisation[sample] = hist_to_value_error_tuplelist(hist) + self.normalisation[sample] = self.initial_normalisation[sample] - if self.method == self.BACKGROUND_SUBTRACTION: - self.background_subtraction(histograms) - if self.method == self.SIMULTANEOUS_FIT: - self.simultaneous_fit(histograms) + self.background_subtraction(histograms) # next, let's round all numbers (they are event numbers after all for sample, values in self.normalisation.items(): @@ -123,83 +102,48 @@ def calculate_normalisation(self): self.have_normalisation = True - def background_subtraction(self, histograms): - ttjet_hist = clean_control_region(histograms, - subtract=['QCD', 'V+Jets', 'SingleTop']) - self.normalisation[ - 'TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - @mylog.trace() - def simultaneous_fit(self, histograms): - from dps.utils.Fitting import FitData, FitDataCollection, Minuit - print('not in production yet') - fitter = None - fit_data_collection = FitDataCollection() - for fit_variable in self.fit_variables: - mc_histograms = { - 'TTJet': histograms['TTJet'], - 'SingleTop': histograms['SingleTop'], - 'V+Jets': histograms['V+Jets'], - 'QCD': histograms['QCD'], - } - h_data = histograms['data'] - fit_data = FitData(h_data, mc_histograms, - fit_boundaries=self.config.fit_boundaries[fit_variable]) - fit_data_collection.add(fit_data, name=fit_variable) - fitter = Minuit(fit_data_collection) - fitter.fit() - fit_results = fitter.readResults() - - normalisation = fit_data_collection.mc_normalisation( - self.fit_variables[0]) - normalisation_errors = fit_data_collection.mc_normalisation_errors( - self.fit_variables[0]) - print normalisation, normalisation_errors + def background_subtraction(self, histograms): + ttjet_hist = clean_control_region( + histograms, + subtract=['QCD', 'V+Jets', 'SingleTop'] + ) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) @mylog.trace() def save(self, output_path): if not self.have_normalisation: self.calculate_normalisation() - folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/' - folder_template += '{phase_space}/{category}/' - inputs = { - 'path': output_path, - 'CoM': self.config.centre_of_mass_energy, - 'variable': self.variable, - 'category': self.category, - 'method': self.method_string(), - 'phase_space': self.phase_space, - } - output_folder = folder_template.format(**inputs) - - file_template = '{type}_{channel}_{met_type}.txt' - inputs = { - 'channel': self.channel, - 'met_type': self.met_type, - } - write_data_to_JSON(self.normalisation, - output_folder + file_template.format(type='normalisation', **inputs)) - write_data_to_JSON(self.initial_normalisation, - output_folder + file_template.format(type='initial_normalisation', **inputs)) - write_data_to_JSON(self.templates, - output_folder + file_template.format(type='templates', **inputs)) - write_data_to_JSON(self.auxiliary_info, - output_folder + file_template.format(type='auxiliary_info', **inputs)) + file_template = '{type}_{channel}.txt' + folder_template = '{path}/normalisation/{method}/{CoM}TeV/{variable}/{phase_space}/{category}/' + output_folder = folder_template.format( + path = output_path, + CoM = self.config.centre_of_mass_energy, + variable = self.variable, + category = self.category, + method = 'background_subtraction', + phase_space = self.phase_space, + ) + write_data_to_JSON( + self.normalisation, + output_folder + file_template.format(type='normalisation', channel=self.channel) + ) + write_data_to_JSON( + self.initial_normalisation, + output_folder + file_template.format(type='initial_normalisation', channel=self.channel) + ) + # write_data_to_JSON( + # self.unity_normalisation, + # output_folder + file_template.format(type='unity_normalisation', channel=self.channel) + # ) + write_data_to_JSON( + self.auxiliary_info, + output_folder + file_template.format(type='auxiliary_info', channel=self.channel) + ) return output_folder - @mylog.trace() - def method_string(self): - if self.method == self.BACKGROUND_SUBTRACTION: - return 'background_subtraction' - if self.method == self.SIMULTANEOUS_FIT: - return 'simultaneous_fit_' + '_'.join(self.fit_variables) - if self.method == self.FRACTION_FITTER: - return 'fraction_fitter' - - return 'unknown_method' - @mylog.trace() def combine(self, other): if not self.have_normalisation or not other.have_normalisation: @@ -211,59 +155,55 @@ def combine(self, other): self.normalisation, other.normalisation) self.initial_normalisation = combine_complex_results( self.initial_normalisation, other.initial_normalisation) - self.templates = combine_complex_results( - self.templates, other.templates) + # self.unity_normalisation = combine_complex_results( + # self.unity_normalisation, other.unity_normalisation) self.channel = 'combined' -def parse_options(): - parser = OptionParser(__doc__) - parser.add_option("-p", "--path", dest="path", default='data', +def parse_arguments(): + parser = ArgumentParser(__doc__) + parser.add_argument("-p", "--path", dest="path", default='data', help="set output path for JSON files. Default is 'data'.") - parser.add_option("-i", "--input", dest="input", + parser.add_argument("-i", "--input", dest="input", default='config/measurements/background_subtraction/', help="set output path for JSON files") - parser.add_option("-v", "--variable", dest="variable", default='MET', + parser.add_argument("-v", "--variable", dest="variable", default='MET', help="set the variable to analyse (MET, HT, ST, MT, WPT). Default is MET.") - parser.add_option("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, + parser.add_argument("-c", "--centre-of-mass-energy", dest="CoM", default=13, type=int, help="set the centre of mass energy for analysis. Default = 13 [TeV]") - parser.add_option('-d', '--debug', dest="debug", action="store_true", + parser.add_argument('-d', '--debug', dest="debug", action="store_true", help="Print the debug information") - parser.add_option('--closure_test', dest="closure_test", action="store_true", + parser.add_argument('--closure_test', dest="closure_test", action="store_true", help="Perform fit on data == sum(MC) * scale factor (MC process)") - parser.add_option('--closure_test_type', dest="closure_test_type", default='simple', + parser.add_argument('--closure_test_type', dest="closure_test_type", default='simple', help="Type of closure test (relative normalisation):" + '|'.join(closure_tests.keys())) - parser.add_option('--test', dest="test", action="store_true", + parser.add_argument('--test', dest="test", action="store_true", help="Just run the central measurement") - parser.add_option('--visiblePS', dest="visiblePS", action="store_true", + parser.add_argument('--visiblePS', dest="visiblePS", action="store_true", help="Unfold to visible phase space") - (options, args) = parser.parse_args() + args = parser.parse_args() # fix some of the inputs - if not options.path.endswith('/'): - options.path = options.path + '/' - if not options.input.endswith('/'): - options.input = options.input + '/' - - return options, args + if not args.path.endswith('/'): + args.path = args.path + '/' + if not args.input.endswith('/'): + args.input = args.input + '/' + return args @mylog.trace() def main(): # construct categories from files: - input_template = options.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' - # categories = [ category for category - # in measurement_config.measurements_and_prefixes.keys() - # if not measurement_config.ttbar_theory_systematic_prefix in category] + input_template = args.input + '{energy}TeV/{channel}/{variable}/{phase_space}/' phase_space = 'FullPS' - if options.visiblePS: + if args.visiblePS: phase_space = 'VisiblePS' results = {} for channel in ['electron', 'muon']: measurement_filepath = input_template.format( - energy = options.CoM, + energy = args.CoM, channel = channel, variable = variable, phase_space = phase_space, @@ -271,13 +211,14 @@ def main(): measurement_files = get_files_in_path(measurement_filepath, file_ending='.json') for f in sorted(measurement_files): + if args.test and 'central' not in f: continue + print('Processing file ' + f) measurement = Measurement.fromJSON(f) # for each measurement norm = TTJetNormalisation( config=measurement_config, measurement=measurement, - method=TTJetNormalisation.BACKGROUND_SUBTRACTION, phase_space=phase_space, ) norm.calculate_normalisation() @@ -299,35 +240,22 @@ def main(): n1.combine(n2) n1.save(output_path) - -def get_category_from_file(json_file): - filename = json_file.split('/')[-1] - # remove type string - category = filename.replace('_shape_systematic', '') - category = category.replace('_rate_systematic', '') - # remove file ending - category = category.replace('.json', '') - - return category - if __name__ == '__main__': set_root_defaults() - options, args = parse_options() + args = parse_arguments() # set global variables - debug = options.debug + debug = args.debug if debug: log.setLevel(log.DEBUG) - measurement_config = XSectionConfig(options.CoM) + measurement_config = XSectionConfig(args.CoM) # caching of variables for shorter access - translate_options = measurement_config.translate_options - variable = options.variable - - output_path = options.path - if options.closure_test: + variable = args.variable + output_path = args.path + if args.closure_test: output_path += '/closure_test/' - output_path += options.closure_test_type + '/' + output_path += args.closure_test_type + '/' main() From a6ffc4ca9c76c49826059517ebc84aee008431a4 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 6 Jan 2017 16:01:46 +0000 Subject: [PATCH 72/90] measurement2.py -> measurent.py --- .../xsection/01_get_ttjet_normalisation.py | 2 +- dps/legacy/tools/measurement.py | 316 ++++++++++++ dps/utils/measurement.py | 477 +++++++----------- dps/utils/measurement2.py | 225 --------- 4 files changed, 510 insertions(+), 510 deletions(-) create mode 100644 dps/legacy/tools/measurement.py delete mode 100644 dps/utils/measurement2.py diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index 6dc3de3d..79d1b8bf 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -3,7 +3,7 @@ from dps.utils.logger import log from dps.config.xsection import XSectionConfig from dps.utils.file_utilities import get_files_in_path, read_data_from_JSON -from dps.utils.measurement2 import Measurement +from dps.utils.measurement import Measurement from dps.utils.ROOT_utils import set_root_defaults # define logger for this module diff --git a/dps/legacy/tools/measurement.py b/dps/legacy/tools/measurement.py new file mode 100644 index 00000000..df463b3d --- /dev/null +++ b/dps/legacy/tools/measurement.py @@ -0,0 +1,316 @@ +''' + Provides the classes Measurement and Systematic +''' +from __future__ import division +from . import log +import copy +from rootpy.io.file import Directory +from dps.utils.ROOT_utils import get_histogram_from_file +from dps.utils.file_utilities import make_folder_if_not_exists,\ + write_data_to_JSON, read_data_from_JSON +from dps.utils.input import Input +from dps.utils.hist_utilities import clean_control_region +# define logger for this module +meas_log = log["dps.utils.measurement"] + + +class Measurement(): + + ''' + The Measurement class combines files and histogram paths into + one container. It also allows to provide separate shapes for the + histograms while using the normalisation from the initial set. + ''' + + @meas_log.trace() + def __init__(self, name): + self.name = name + self.variable = '' + self.centre_of_mass_energy = 0 + self.channel = '' + self.samples = {} + self.shapes = {} + self.norms = {} + self.histograms = {} + self.fit_variables = {} + + self.have_read_samples = False + self.have_read_shapes = False + self.have_read_norms = False + + self.met_type = '' + + self.type = 0 + + self.aux_info_norms = {} + + @meas_log.trace() + def addSample(self, sample, read=True, **kwargs): + self.samples[sample] = kwargs + # TODO: add tree & branch, selection etc + # whatever get_histograms_from_trees needs + if read: + self.read_sample(sample) + + @meas_log.trace() + def addShapeForSample(self, sample, measurement, read=True): + self.shapes[sample] = measurement + if read: + self.read_shape(sample) + + @meas_log.trace() + def addNormForSample(self, sample, measurement, read=True): + self.norms[sample] = measurement + if read: + self.read_norm(sample) + + @meas_log.trace() + def addFitVariable(self, variable, measurement): + self.fit_variables[variable] = measurement + + @meas_log.trace() + def toJSON(self, JSON_file): + output = self.toDict() + filename = JSON_file.split('/')[-1] + directory = JSON_file.replace(filename, '') + make_folder_if_not_exists(directory) + write_data_to_JSON(output, JSON_file) + + @meas_log.trace() + def toDict(self): + output = {} + output['class'] = str(self.__class__) + output['name'] = self.name + output['variable'] = self.variable + output['centre_of_mass_energy'] = self.centre_of_mass_energy + output['samples'] = self.samples + output['shapes'] = {shape: meas.toDict() + for shape, meas in self.shapes.items()} + output['norms'] = {norm: meas.toDict() + for norm, meas in self.norms.items()} + output['channel'] = self.channel + output['met_type'] = self.met_type + for sample in output['samples'].keys(): + if output['samples'][sample].has_key('input'): + output['samples'][sample]['input'] = output[ + 'samples'][sample]['input'].toDict() + + return output + + @staticmethod + def fromJSON(JSON_file): + src = read_data_from_JSON(JSON_file) + m = Measurement.fromDict(src) + + return m + + @staticmethod + def fromDict(d): + m = None + if d['class'] == 'dps.utils.measurement.Measurement': + m = Measurement(d['name']) + if d['class'] == 'dps.utils.measurement.Systematic': + m = Systematic(d['name'], d['type'], + affected_samples=d['affected_samples'], scale=d['scale']) + m.setVariable(d['variable']) + m.setCentreOfMassEnergy(int(d['centre_of_mass_energy'])) + m.setChannel(d['channel']) + m.setMETType(d['met_type']) + for sample, i in d['samples'].items(): + if i.has_key('input'): + inp = Input(**i['input']) + m.addSample(sample, read=True, input=inp) + else: + m.addSample(sample, i['file'], i['hist'], read=True) + for shape, obj in d['shapes'].items(): + m.addShapeForSample(shape, Measurement.fromDict(obj), read=True) + for norm, obj in d['norms'].items(): + m.addNormForSample( + norm, Measurement.fromDict(obj), read=True) + return m + + @meas_log.trace() + def toROOT(self): + ''' + Converts measurement into something that can be stored in a ROOT + file + ''' + return + d = Directory(name=self.name) + # create shape and norm folders if there is anything to be saved + # what needs to be saved + # MET type + return d + + @meas_log.trace() + def setVariable(self, variable): + self.variable = variable + + @meas_log.trace() + def setCentreOfMassEnergy(self, com): + self.centre_of_mass_energy = com + + @meas_log.trace() + def setChannel(self, channel): + self.channel = channel + + @meas_log.trace() + def setMETType(self, met_type): + self.met_type = met_type + + @meas_log.trace() + def getCleanedShape(self, sample): + subtract = copy.copy(self.histograms.keys()) + subtract.remove(sample) + subtract.remove('data') + hist = clean_control_region(self.histograms, + data_label='data', + subtract=subtract, + fix_to_zero=True) + return hist + + @meas_log.trace() + def read(self): + self.read_samples() + self.read_shapes() + self.read_norms() + + @meas_log.trace() + def read_samples(self): + if self.have_read_samples: + return + for sample in self.samples.keys(): + self.read_sample(sample) + self.have_read_samples = True + + @meas_log.trace() + def read_sample(self, sample): + if self.samples[sample].has_key('input'): + i = self.samples[sample]['input'] + if isinstance(i, dict): + i = Input(**self.samples[sample]['input']) + self.histograms[sample] = i.read() + return + input_file = self.samples[sample]['input_file'] + if self.samples[sample].has_key('hist'): + hist = self.samples[sample]['hist'] + self.histograms[sample] = get_histogram_from_file( + hist, input_file) + + @meas_log.trace() + def read_shapes(self): + if self.have_read_shapes: + return + if not self.have_read_samples: + self.read_samples() + for sample in self.shapes.keys(): + self.read_shape(sample) + self.have_read_shapes = True + + @meas_log.trace() + def read_norms(self): + if self.have_read_norms: + return + if not self.have_read_samples: + self.read_samples() + for sample in self.norms.keys(): + self.read_norm(sample) + self.have_read_norms = True + + @meas_log.trace() + def read_shape(self, sample): + ''' + Shape from a Control Region (CR) is currently treated as: + - define process A for which you which to get the shape + - define CR + - subtract other processes from data in the CR + - normalise the result to process A in signal region + - replace process A in signal region with the new histogram + ''' + measurement = self.shapes[sample] + shape = measurement.getCleanedShape(sample) + if sample in self.histograms.keys(): + n_shape = shape.Integral() + mc = self.histograms[sample] + n_mc = mc.Integral() + scale = 1 + if not n_shape == 0: + if not n_mc == 0: + scale = 1 / n_shape * n_mc + else: + scale = 1 / n_shape + shape.Scale(scale) + self.histograms[sample] = shape + else: + meas_log.warning( + 'No MC entry found for sample "{0}", using shape normalisation'.format(sample)) + self.histograms[sample] = shape + + @meas_log.trace() + def read_norm(self, sample): + ''' + Normalisation from a Control Region (CR) is currently treated as: + - define normalisation for process A + - define CR + - subtract other processes from data in the CR + - calculate the ratio between process A and data (both in CR) + - apply ratio to process A in signal region + ''' + measurement = self.norms[sample] + self.aux_info_norms[sample] = {} + # get ratio from control region + norm = measurement.getCleanedShape(sample) + mc_in_control = measurement.histograms[sample] + # scale sample to this ratio + if sample in self.histograms.keys(): + n_data_control = norm.Integral() + n_mc_control = mc_in_control.Integral() + ratio = n_data_control / n_mc_control + meas_log.debug('Ratio from control region {0}'.format(ratio)) + n_mc_signal_region = self.histograms[sample].integral() + self.histograms[sample].Scale(ratio) + self.aux_info_norms[sample]['norm_factor'] = round(ratio, 2) + self.aux_info_norms[sample]['n_mc_control'] = n_mc_control + self.aux_info_norms[sample][ + 'n_mc_signal_region'] = n_mc_signal_region + self.aux_info_norms[sample]['n_data_control'] = n_data_control + else: + meas_log.warning( + 'No MC entry found for sample "{0}", using control region normalisation'.format(sample)) + self.histograms[sample] = norm + + +class Systematic(Measurement): + + ''' + The Systematic class is an extension of the Measurement class. + It allows to implement systematic specific functionality + (e.g. rate systematics). + ''' + + SHAPE = 10 + RATE = 20 + + @meas_log.trace() + def __init__(self, name, + stype=SHAPE, + affected_samples=[], + scale=1.): + ''' + Constructor + ''' + Measurement.__init__(self, name) + self.type = stype + + self.affected_samples = affected_samples + + self.scale = scale + + @meas_log.trace() + def toDict(self): + output = Measurement.toDict(self) + output['type'] = self.type + output['affected_samples'] = self.affected_samples + output['scale'] = self.scale + + return output diff --git a/dps/utils/measurement.py b/dps/utils/measurement.py index df463b3d..f1fcc7d1 100644 --- a/dps/utils/measurement.py +++ b/dps/utils/measurement.py @@ -3,314 +3,223 @@ ''' from __future__ import division from . import log -import copy -from rootpy.io.file import Directory -from dps.utils.ROOT_utils import get_histogram_from_file -from dps.utils.file_utilities import make_folder_if_not_exists,\ - write_data_to_JSON, read_data_from_JSON -from dps.utils.input import Input -from dps.utils.hist_utilities import clean_control_region +from dps.utils.hist_utilities import hist_to_value_error_tuplelist, clean_control_region + # define logger for this module meas_log = log["dps.utils.measurement"] - class Measurement(): - ''' The Measurement class combines files and histogram paths into one container. It also allows to provide separate shapes for the histograms while using the normalisation from the initial set. ''' - - @meas_log.trace() - def __init__(self, name): - self.name = name - self.variable = '' - self.centre_of_mass_energy = 0 - self.channel = '' - self.samples = {} - self.shapes = {} - self.norms = {} - self.histograms = {} - self.fit_variables = {} - - self.have_read_samples = False - self.have_read_shapes = False - self.have_read_norms = False - - self.met_type = '' - - self.type = 0 - - self.aux_info_norms = {} - - @meas_log.trace() - def addSample(self, sample, read=True, **kwargs): - self.samples[sample] = kwargs - # TODO: add tree & branch, selection etc - # whatever get_histograms_from_trees needs - if read: - self.read_sample(sample) - - @meas_log.trace() - def addShapeForSample(self, sample, measurement, read=True): - self.shapes[sample] = measurement - if read: - self.read_shape(sample) - - @meas_log.trace() - def addNormForSample(self, sample, measurement, read=True): - self.norms[sample] = measurement - if read: - self.read_norm(sample) - @meas_log.trace() - def addFitVariable(self, variable, measurement): - self.fit_variables[variable] = measurement + def __init__(self, measurement): + self.measurement = measurement + self.histograms = {} + self.cr_histograms = {} + self.normalisation = {} + self.variable = None + self.com = None + self.channel = None + self.name = None + self.is_normalised = False + self.central = False + self.samples = {} + self.__setFromConfig() + + def __setFromConfig(self): + self.variable = self.measurement["variable"] + self.com = self.measurement["com"] + self.channel = self.measurement["channel"] + self.samples = self.measurement["samples"] + self.name = self.measurement["name"] + data_driven_qcd = self.measurement["data_driven_qcd"] + + # Is this central or a systematic? + if "central" in self.name: + self.central = True + + # Retrieve histograms from files for SR and CR + for sample, histogram_info in self.samples.iteritems(): + self.histograms[sample] = self.__return_histogram(histogram_info) + if data_driven_qcd: + self.cr_histograms[sample] = self.__return_histogram(histogram_info, useQCDControl=True) + + # print(hist_to_value_error_tuplelist(self.histograms[sample])) + # print(hist_to_value_error_tuplelist(self.cr_histograms[sample])) + + # Replace QCD MC with data-driven MC + if data_driven_qcd: + self.__qcd_from_data() + return - @meas_log.trace() - def toJSON(self, JSON_file): - output = self.toDict() - filename = JSON_file.split('/')[-1] - directory = JSON_file.replace(filename, '') - make_folder_if_not_exists(directory) - write_data_to_JSON(output, JSON_file) + def __qcd_from_data(self): + ''' + Replace Signal region mc qcd with data driven qcd - @meas_log.trace() - def toDict(self): - output = {} - output['class'] = str(self.__class__) - output['name'] = self.name - output['variable'] = self.variable - output['centre_of_mass_energy'] = self.centre_of_mass_energy - output['samples'] = self.samples - output['shapes'] = {shape: meas.toDict() - for shape, meas in self.shapes.items()} - output['norms'] = {norm: meas.toDict() - for norm, meas in self.norms.items()} - output['channel'] = self.channel - output['met_type'] = self.met_type - for sample in output['samples'].keys(): - if output['samples'][sample].has_key('input'): - output['samples'][sample]['input'] = output[ - 'samples'][sample]['input'].toDict() - - return output - - @staticmethod - def fromJSON(JSON_file): - src = read_data_from_JSON(JSON_file) - m = Measurement.fromDict(src) - - return m - - @staticmethod - def fromDict(d): - m = None - if d['class'] == 'dps.utils.measurement.Measurement': - m = Measurement(d['name']) - if d['class'] == 'dps.utils.measurement.Systematic': - m = Systematic(d['name'], d['type'], - affected_samples=d['affected_samples'], scale=d['scale']) - m.setVariable(d['variable']) - m.setCentreOfMassEnergy(int(d['centre_of_mass_energy'])) - m.setChannel(d['channel']) - m.setMETType(d['met_type']) - for sample, i in d['samples'].items(): - if i.has_key('input'): - inp = Input(**i['input']) - m.addSample(sample, read=True, input=inp) - else: - m.addSample(sample, i['file'], i['hist'], read=True) - for shape, obj in d['shapes'].items(): - m.addShapeForSample(shape, Measurement.fromDict(obj), read=True) - for norm, obj in d['norms'].items(): - m.addNormForSample( - norm, Measurement.fromDict(obj), read=True) - return m + N MC QCD in SR N DD QCD in CR + QCD_SHAPE * -------------- * -------------- + N DD QCD in CR N MC QCD in CR - @meas_log.trace() - def toROOT(self): - ''' - Converts measurement into something that can be stored in a ROOT - file + Shape normalise to scale from + SR mc qcd mc qcd to dd qcd ''' + # Get the shape of the data driven qcd in the control region + qcd_shape = clean_control_region( + self.cr_histograms, + subtract=['TTBar', 'V+Jets', 'SingleTop'] + ) + # print(hist_to_value_error_tuplelist(qcd_shape)) + + # Now to normalise the qcd shape to the MC in the Signal Region + # n_dd_cr= Number of datadriven qcd from Control Region + n_mc_sr = self.histograms['QCD'].Integral() + n_dd_cr = qcd_shape.Integral() + qcd_shape.Scale( n_mc_sr/n_dd_cr ) + # print "scaling to normalisation in SR MC : ", n_mc_sr/n_dd_cr + + # Now to scale from mc qcd to datadriven qcd + n_mc_cr = self.cr_histograms['QCD'].Integral() + qcd_shape.Scale( n_dd_cr/n_mc_cr ) + # print "scaling from MC to datadriven : ", n_dd_cr/n_mc_cr + # print "Total scaling : ", n_mc_sr/n_mc_cr + + # Replace QCD histogram with datadriven one + self.histograms['QCD'] = qcd_shape return - d = Directory(name=self.name) - # create shape and norm folders if there is anything to be saved - # what needs to be saved - # MET type - return d - - @meas_log.trace() - def setVariable(self, variable): - self.variable = variable - - @meas_log.trace() - def setCentreOfMassEnergy(self, com): - self.centre_of_mass_energy = com - - @meas_log.trace() - def setChannel(self, channel): - self.channel = channel - - @meas_log.trace() - def setMETType(self, met_type): - self.met_type = met_type - - @meas_log.trace() - def getCleanedShape(self, sample): - subtract = copy.copy(self.histograms.keys()) - subtract.remove(sample) - subtract.remove('data') - hist = clean_control_region(self.histograms, - data_label='data', - subtract=subtract, - fix_to_zero=True) - return hist - @meas_log.trace() - def read(self): - self.read_samples() - self.read_shapes() - self.read_norms() - - @meas_log.trace() - def read_samples(self): - if self.have_read_samples: - return - for sample in self.samples.keys(): - self.read_sample(sample) - self.have_read_samples = True - - @meas_log.trace() - def read_sample(self, sample): - if self.samples[sample].has_key('input'): - i = self.samples[sample]['input'] - if isinstance(i, dict): - i = Input(**self.samples[sample]['input']) - self.histograms[sample] = i.read() - return - input_file = self.samples[sample]['input_file'] - if self.samples[sample].has_key('hist'): - hist = self.samples[sample]['hist'] - self.histograms[sample] = get_histogram_from_file( - hist, input_file) - - @meas_log.trace() - def read_shapes(self): - if self.have_read_shapes: - return - if not self.have_read_samples: - self.read_samples() - for sample in self.shapes.keys(): - self.read_shape(sample) - self.have_read_shapes = True - - @meas_log.trace() - def read_norms(self): - if self.have_read_norms: - return - if not self.have_read_samples: - self.read_samples() - for sample in self.norms.keys(): - self.read_norm(sample) - self.have_read_norms = True - - @meas_log.trace() - def read_shape(self, sample): + def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False): ''' - Shape from a Control Region (CR) is currently treated as: - - define process A for which you which to get the shape - - define CR - - subtract other processes from data in the CR - - normalise the result to process A in signal region - - replace process A in signal region with the new histogram + Takes basic histogram info and returns histo. + Maybe this can move to ROOT_utilities? ''' - measurement = self.shapes[sample] - shape = measurement.getCleanedShape(sample) - if sample in self.histograms.keys(): - n_shape = shape.Integral() - mc = self.histograms[sample] - n_mc = mc.Integral() - scale = 1 - if not n_shape == 0: - if not n_mc == 0: - scale = 1 / n_shape * n_mc - else: - scale = 1 / n_shape - shape.Scale(scale) - self.histograms[sample] = shape - else: - meas_log.warning( - 'No MC entry found for sample "{0}", using shape normalisation'.format(sample)) - self.histograms[sample] = shape - - @meas_log.trace() - def read_norm(self, sample): + from rootpy.io.file import File + from rootpy.plotting import Hist + from dps.utils.hist_utilities import fix_overflow + + f = d_hist_info['input_file'] + tree = d_hist_info['tree'] + qcd_tree = d_hist_info["qcd_control_region"] + var = d_hist_info['branch'] + bins = d_hist_info['bin_edges'] + lumi_scale = d_hist_info['lumi_scale'] + scale = d_hist_info['scale'] + weights = d_hist_info['weight_branches'] + selection = d_hist_info['selection'] + + if useQCDControl: + # replace SR tree with CR tree + tree = qcd_tree + # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) + for weight in weights: + if 'Electron' in weight: weights.remove(weight) + elif 'Muon' in weight: weights.remove(weight) + + weights = "*".join(weights) + # Selection will return a weight 0 or 1 depending on whether event passes selection + weights_and_selection = '( {0} ) * ( {1} )'.format(weights, selection) + + scale *= lumi_scale + + root_file = File( f ) + root_tree = root_file.Get( tree ) + + root_histogram = Hist( bins ) + # Draw histogram of var for selection into root_histogram + root_tree.Draw(var, selection = weights_and_selection, hist = root_histogram) + root_histogram.Scale(scale) + + # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it + if ignoreUnderflow: + root_histogram.SetBinContent(0, 0) + root_histogram.SetBinError(0,0) + + # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not |..| but |--> ) + root_histogram = fix_overflow(root_histogram) + + root_file.Close() + return root_histogram + + + def __background_subtraction(self, histograms): ''' - Normalisation from a Control Region (CR) is currently treated as: - - define normalisation for process A - - define CR - - subtract other processes from data in the CR - - calculate the ratio between process A and data (both in CR) - - apply ratio to process A in signal region + Subtracts the backgrounds from data to give amount of ttbar in data. + Also adds all backgrounds to normalisation output ''' - measurement = self.norms[sample] - self.aux_info_norms[sample] = {} - # get ratio from control region - norm = measurement.getCleanedShape(sample) - mc_in_control = measurement.histograms[sample] - # scale sample to this ratio - if sample in self.histograms.keys(): - n_data_control = norm.Integral() - n_mc_control = mc_in_control.Integral() - ratio = n_data_control / n_mc_control - meas_log.debug('Ratio from control region {0}'.format(ratio)) - n_mc_signal_region = self.histograms[sample].integral() - self.histograms[sample].Scale(ratio) - self.aux_info_norms[sample]['norm_factor'] = round(ratio, 2) - self.aux_info_norms[sample]['n_mc_control'] = n_mc_control - self.aux_info_norms[sample][ - 'n_mc_signal_region'] = n_mc_signal_region - self.aux_info_norms[sample]['n_data_control'] = n_data_control - else: - meas_log.warning( - 'No MC entry found for sample "{0}", using control region normalisation'.format(sample)) - self.histograms[sample] = norm - - -class Systematic(Measurement): - - ''' - The Systematic class is an extension of the Measurement class. - It allows to implement systematic specific functionality - (e.g. rate systematics). - ''' - - SHAPE = 10 - RATE = 20 + ttjet_hist = clean_control_region( + histograms, + subtract=['QCD', 'V+Jets', 'SingleTop'] + ) + self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) + self.normalisation['data'] = hist_to_value_error_tuplelist(histograms['data']) + # self.normalisation['TTBar'] = hist_to_value_error_tuplelist(histograms['TTBar']) + self.normalisation['SingleTop'] = hist_to_value_error_tuplelist(histograms['SingleTop']) + self.normalisation['V+Jets'] = hist_to_value_error_tuplelist(histograms['V+Jets']) + self.normalisation['QCD'] = hist_to_value_error_tuplelist(histograms['QCD']) + return - @meas_log.trace() - def __init__(self, name, - stype=SHAPE, - affected_samples=[], - scale=1.): + def calculate_normalisation(self): ''' - Constructor + Calls the normalisation of the ttbar samples ''' - Measurement.__init__(self, name) - self.type = stype + # normalisation already calculated + if self.is_normalised: return - self.affected_samples = affected_samples + histograms = self.histograms + self.__background_subtraction(histograms) - self.scale = scale + # next, let's round all numbers (they are event numbers after all) + for sample, values in self.normalisation.items(): + new_values = [(round(v, 1), round(e, 1)) for v, e in values] + self.normalisation[sample] = new_values + self.is_normalised = True + return - @meas_log.trace() - def toDict(self): - output = Measurement.toDict(self) - output['type'] = self.type - output['affected_samples'] = self.affected_samples - output['scale'] = self.scale + def save(self, phase_space): + ''' + Saves the normalisation output into a JSON. + I would like to change this to a pandas Dataframe at somepoint after + a few issues have been worked out + ''' + from dps.utils.pandas_utilities import write_tuple_to_df + from dps.utils.file_utilities import make_folder_if_not_exists + # If normalisation hasnt been calculated - then go calculate it! + if not self.is_normalised: self.calculate_normalisation() + + output_folder = 'data/normalisation/background_subtraction/{com}TeV/{var}/{ps}/{cat}/' + output_folder = output_folder.format( + com = self.com, + var = self.variable, + ps = phase_space, + cat = self.name, + ) + make_folder_if_not_exists(output_folder) + + file_template = '{type}_{channel}.txt' + f = file_template.format( + type='normalisation', + channel=self.channel + ) + + write_tuple_to_df( + self.normalisation, + output_folder + f + ) + return + + def combine(self, other): + ''' + Combines the electron and muon measurements + ''' + from dps.utils.Calculation import combine_complex_results + if not self.is_normalised or not other.is_normalised: + mylog.warn( + 'One of the TTJetNormalisations does not have a normalisation, aborting.') + return - return output + self.normalisation = combine_complex_results( + self.normalisation, other.normalisation) + self.channel = 'combined' + return diff --git a/dps/utils/measurement2.py b/dps/utils/measurement2.py deleted file mode 100644 index f1fcc7d1..00000000 --- a/dps/utils/measurement2.py +++ /dev/null @@ -1,225 +0,0 @@ -''' - Provides the classes Measurement and Systematic -''' -from __future__ import division -from . import log -from dps.utils.hist_utilities import hist_to_value_error_tuplelist, clean_control_region - -# define logger for this module -meas_log = log["dps.utils.measurement"] - -class Measurement(): - ''' - The Measurement class combines files and histogram paths into - one container. It also allows to provide separate shapes for the - histograms while using the normalisation from the initial set. - ''' - @meas_log.trace() - def __init__(self, measurement): - self.measurement = measurement - self.histograms = {} - self.cr_histograms = {} - self.normalisation = {} - self.variable = None - self.com = None - self.channel = None - self.name = None - self.is_normalised = False - self.central = False - self.samples = {} - self.__setFromConfig() - - def __setFromConfig(self): - self.variable = self.measurement["variable"] - self.com = self.measurement["com"] - self.channel = self.measurement["channel"] - self.samples = self.measurement["samples"] - self.name = self.measurement["name"] - data_driven_qcd = self.measurement["data_driven_qcd"] - - # Is this central or a systematic? - if "central" in self.name: - self.central = True - - # Retrieve histograms from files for SR and CR - for sample, histogram_info in self.samples.iteritems(): - self.histograms[sample] = self.__return_histogram(histogram_info) - if data_driven_qcd: - self.cr_histograms[sample] = self.__return_histogram(histogram_info, useQCDControl=True) - - # print(hist_to_value_error_tuplelist(self.histograms[sample])) - # print(hist_to_value_error_tuplelist(self.cr_histograms[sample])) - - # Replace QCD MC with data-driven MC - if data_driven_qcd: - self.__qcd_from_data() - return - - def __qcd_from_data(self): - ''' - Replace Signal region mc qcd with data driven qcd - - N MC QCD in SR N DD QCD in CR - QCD_SHAPE * -------------- * -------------- - N DD QCD in CR N MC QCD in CR - - Shape normalise to scale from - SR mc qcd mc qcd to dd qcd - ''' - # Get the shape of the data driven qcd in the control region - qcd_shape = clean_control_region( - self.cr_histograms, - subtract=['TTBar', 'V+Jets', 'SingleTop'] - ) - # print(hist_to_value_error_tuplelist(qcd_shape)) - - # Now to normalise the qcd shape to the MC in the Signal Region - # n_dd_cr= Number of datadriven qcd from Control Region - n_mc_sr = self.histograms['QCD'].Integral() - n_dd_cr = qcd_shape.Integral() - qcd_shape.Scale( n_mc_sr/n_dd_cr ) - # print "scaling to normalisation in SR MC : ", n_mc_sr/n_dd_cr - - # Now to scale from mc qcd to datadriven qcd - n_mc_cr = self.cr_histograms['QCD'].Integral() - qcd_shape.Scale( n_dd_cr/n_mc_cr ) - # print "scaling from MC to datadriven : ", n_dd_cr/n_mc_cr - # print "Total scaling : ", n_mc_sr/n_mc_cr - - # Replace QCD histogram with datadriven one - self.histograms['QCD'] = qcd_shape - return - - def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False): - ''' - Takes basic histogram info and returns histo. - Maybe this can move to ROOT_utilities? - ''' - from rootpy.io.file import File - from rootpy.plotting import Hist - from dps.utils.hist_utilities import fix_overflow - - f = d_hist_info['input_file'] - tree = d_hist_info['tree'] - qcd_tree = d_hist_info["qcd_control_region"] - var = d_hist_info['branch'] - bins = d_hist_info['bin_edges'] - lumi_scale = d_hist_info['lumi_scale'] - scale = d_hist_info['scale'] - weights = d_hist_info['weight_branches'] - selection = d_hist_info['selection'] - - if useQCDControl: - # replace SR tree with CR tree - tree = qcd_tree - # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) - for weight in weights: - if 'Electron' in weight: weights.remove(weight) - elif 'Muon' in weight: weights.remove(weight) - - weights = "*".join(weights) - # Selection will return a weight 0 or 1 depending on whether event passes selection - weights_and_selection = '( {0} ) * ( {1} )'.format(weights, selection) - - scale *= lumi_scale - - root_file = File( f ) - root_tree = root_file.Get( tree ) - - root_histogram = Hist( bins ) - # Draw histogram of var for selection into root_histogram - root_tree.Draw(var, selection = weights_and_selection, hist = root_histogram) - root_histogram.Scale(scale) - - # When a tree is filled with a dummy variable, it will end up in the underflow, so ignore it - if ignoreUnderflow: - root_histogram.SetBinContent(0, 0) - root_histogram.SetBinError(0,0) - - # Fix overflow (Moves entries from overflow bin into last bin i.e. last bin not |..| but |--> ) - root_histogram = fix_overflow(root_histogram) - - root_file.Close() - return root_histogram - - - def __background_subtraction(self, histograms): - ''' - Subtracts the backgrounds from data to give amount of ttbar in data. - Also adds all backgrounds to normalisation output - ''' - ttjet_hist = clean_control_region( - histograms, - subtract=['QCD', 'V+Jets', 'SingleTop'] - ) - self.normalisation['TTJet'] = hist_to_value_error_tuplelist(ttjet_hist) - self.normalisation['data'] = hist_to_value_error_tuplelist(histograms['data']) - # self.normalisation['TTBar'] = hist_to_value_error_tuplelist(histograms['TTBar']) - self.normalisation['SingleTop'] = hist_to_value_error_tuplelist(histograms['SingleTop']) - self.normalisation['V+Jets'] = hist_to_value_error_tuplelist(histograms['V+Jets']) - self.normalisation['QCD'] = hist_to_value_error_tuplelist(histograms['QCD']) - return - - def calculate_normalisation(self): - ''' - Calls the normalisation of the ttbar samples - ''' - # normalisation already calculated - if self.is_normalised: return - - histograms = self.histograms - self.__background_subtraction(histograms) - - # next, let's round all numbers (they are event numbers after all) - for sample, values in self.normalisation.items(): - new_values = [(round(v, 1), round(e, 1)) for v, e in values] - self.normalisation[sample] = new_values - self.is_normalised = True - return - - def save(self, phase_space): - ''' - Saves the normalisation output into a JSON. - I would like to change this to a pandas Dataframe at somepoint after - a few issues have been worked out - ''' - from dps.utils.pandas_utilities import write_tuple_to_df - from dps.utils.file_utilities import make_folder_if_not_exists - # If normalisation hasnt been calculated - then go calculate it! - if not self.is_normalised: self.calculate_normalisation() - - output_folder = 'data/normalisation/background_subtraction/{com}TeV/{var}/{ps}/{cat}/' - output_folder = output_folder.format( - com = self.com, - var = self.variable, - ps = phase_space, - cat = self.name, - ) - make_folder_if_not_exists(output_folder) - - file_template = '{type}_{channel}.txt' - f = file_template.format( - type='normalisation', - channel=self.channel - ) - - write_tuple_to_df( - self.normalisation, - output_folder + f - ) - return - - def combine(self, other): - ''' - Combines the electron and muon measurements - ''' - from dps.utils.Calculation import combine_complex_results - if not self.is_normalised or not other.is_normalised: - mylog.warn( - 'One of the TTJetNormalisations does not have a normalisation, aborting.') - return - - self.normalisation = combine_complex_results( - self.normalisation, other.normalisation) - self.channel = 'combined' - return From 98bc8b18c7d31d2f48258053fb12909fe8559e5d Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 10 Jan 2017 08:49:17 +0000 Subject: [PATCH 73/90] Emyrs DPS additions incorporated --- .../BLTUnfold/produceUnfoldingHistograms.py | 63 ++-- dps/analysis/BLTUnfold/runJobsCrab.py | 24 +- .../BLTUnfold/submitBLTUnfold.description | 2 +- .../unfolding_tests/unfolding_sandbox.py | 13 +- .../xsection/02_unfold_and_measure.py | 285 +++++++++++++----- .../xsection/04_make_plots_matplotlib.py | 29 +- .../xsection/make_control_plots_fromTrees.py | 21 +- dps/config/xsection.py | 140 ++++----- dps/experimental/howMuchLumiInNtuples.py | 5 +- dps/utils/systematic.py | 2 +- 10 files changed, 374 insertions(+), 210 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index c24bff66..d6a83e4b 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -28,7 +28,15 @@ def calculateTopEtaWeight( lepTopRap, hadTopRap, whichWayToWeight = 1): else : return 1 -def calculateTopPtWeight( lepTopPt, hadTopPt ): + def calculateTopPtWeight( lepTopPt, hadTopPt, whichWayToWeight = 1 ): + if whichWayToWeight == -1 : + return max ( (-0.001 * lepTopPt + 1.1 ) * (-0.001 * hadTopPt + 1.1), 0.1 ) + elif whichWayToWeight == 1 : + return max ( (0.001 * lepTopPt + 0.9 ) * (0.001 * hadTopPt + 0.9), 0.1 ) + else : + return 1 + +def calculateTopPtSystematicWeight( lepTopPt, hadTopPt ): ''' Calculating the top pt weight ______________ A + B.Pt @@ -36,15 +44,14 @@ def calculateTopPtWeight( lepTopPt, hadTopPt ): A = 0.0615 B = -0.0005 - ''' - ptWeight = 1 - A = 0.0615 - B = -0.0005 - sf_lept = exp(A+(B*lepTopPt)) - sf_hadt = exp(A+(B*hadTopPt)) - ptWeight = sqrt(sf_hadt*sf_lept) - return ptWeight - + ''' + lepTopWeight = ptWeight( lepTopPt ) + hadTopWeight = ptWeight( hadTopPt ) + return sqrt( lepTopWeight * hadTopWeight ) + +def ptWeight( pt ): + return exp( 0.0615 - 0.0005 * pt ) + def calculateTopPtSystematicWeight( lepTopPt, hadTopPt ): lepTopWeight = ptWeight( lepTopPt ) @@ -64,8 +71,14 @@ def getFileName( com, sample, measurementConfig ) : 'madgraph' : measurementConfig.ttbar_madgraph_trees, 'powhegherwigpp' : measurementConfig.ttbar_powhegherwigpp_trees, - 'scaleup' : measurementConfig.ttbar_scaleup_trees, - 'scaledown' : measurementConfig.ttbar_scaledown_trees, + + 'ueup' : measurementConfig.ttbar_ueup_trees, + 'uedown' : measurementConfig.ttbar_uedown_trees, + 'isrup' : measurementConfig.ttbar_isrup_trees, + 'isrdown' : measurementConfig.ttbar_isrdown_trees, + 'fsrup' : measurementConfig.ttbar_fsrup_trees, + 'fsrdown' : measurementConfig.ttbar_fsrdown_trees, + 'massdown' : measurementConfig.ttbar_mtop1695_trees, 'massup' : measurementConfig.ttbar_mtop1755_trees, @@ -92,6 +105,9 @@ def getFileName( com, sample, measurementConfig ) : 'TauEnDown' : measurementConfig.ttbar_trees['central'], 'UnclusteredEnUp' : measurementConfig.ttbar_trees['central'], 'UnclusteredEnDown' : measurementConfig.ttbar_trees['central'], + + 'topPtSystematic' : measurementConfig.ttbar_trees['central'], + }, } @@ -182,7 +198,6 @@ def main(): pdfWeight = args.pdfWeight muFmuRWeight = args.muFmuRWeight - alphaSWeight = args.alphaSWeight # Output file name outputFileName = 'crap.root' @@ -198,10 +213,6 @@ def main(): outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopEtaReweighting_down.root' % energySuffix elif args.applyTopPtReweighting: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_withTopPtReweighting.root' % energySuffix - elif alphaSWeight == 0: - outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaSDown.root' % ( energySuffix ) - elif alphaSWeight == 1: - outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaSUp.root' % ( energySuffix ) elif muFmuRWeight == 1: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_1muR2muF.root' % ( energySuffix ) elif muFmuRWeight == 2: @@ -383,7 +394,7 @@ def main(): branch = event.__getattr__ n+=1 if not n%100000: print 'Processing event %.0f Progress : %.2g %%' % ( n, float(n)/nEntries*100 ) - if n == 1000: break + # if n == 100000: break # # # # # # Weights and selection # # # @@ -406,7 +417,7 @@ def main(): if args.sample == 'leptonup': leptonWeight = event.LeptonEfficiencyCorrectionUp elif args.sample == 'leptondown': - leptonWeight == event.LeptonEfficiencyCorrectionDown + leptonWeight = event.LeptonEfficiencyCorrectionDown # B Jet Weight bjetWeight = event.BJetWeight @@ -419,6 +430,11 @@ def main(): elif args.sample == "lightjetdown": bjetWeight = event.LightJetDownWeight + # Top pt systematic weight + topPtSystematicWeight = 1 + if args.sample == 'topPtSystematic': + topPtSystematicWeight = calculateTopPtSystematicWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton')) + # Offline level weights offlineWeight = event.EventWeight * measurement_config.luminosity_scale offlineWeight *= pileupWeight @@ -439,13 +455,8 @@ def main(): offlineWeight *= branch('muFmuRWeight_%i' % muFmuRWeight) pass - if options.applyTopPtReweighting != 0: - ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), options.applyTopPtReweighting) - offlineWeight *= ptWeight - genWeight *= ptWeight - - if args.applyTopPtReweighting: - ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton')) + if args.applyTopPtReweighting != 0: + ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), args.applyTopPtReweighting) offlineWeight *= ptWeight genWeight *= ptWeight diff --git a/dps/analysis/BLTUnfold/runJobsCrab.py b/dps/analysis/BLTUnfold/runJobsCrab.py index 2d7d7d00..28b89215 100755 --- a/dps/analysis/BLTUnfold/runJobsCrab.py +++ b/dps/analysis/BLTUnfold/runJobsCrab.py @@ -7,20 +7,28 @@ '--centreOfMassEnergy 13 -f', '--centreOfMassEnergy 13 -s central', - - '--centreOfMassEnergy 13 -s central --topPtReweighting 1', - '--centreOfMassEnergy 13 -s central --topPtReweighting -1', - '--centreOfMassEnergy 13 -s central --topEtaReweighting 1', - '--centreOfMassEnergy 13 -s central --topEtaReweighting -1', + # '--centreOfMassEnergy 13 -s central --topPtReweighting 1', + # '--centreOfMassEnergy 13 -s central --topPtReweighting -1', + # '--centreOfMassEnergy 13 -s central --topEtaReweighting 1', + # '--centreOfMassEnergy 13 -s central --topEtaReweighting -1', # '--centreOfMassEnergy 13 -s amcatnlo', # '--centreOfMassEnergy 13 -s madgraph', '--centreOfMassEnergy 13 -s powhegherwigpp', # # '--centreOfMassEnergy 13 -s amcatnloherwigpp', - # # PS scale samples - '--centreOfMassEnergy 13 -s scaleup', - '--centreOfMassEnergy 13 -s scaledown', + # Top pt + '--centreOfMassEnergy 13 -s topPtSystematic', + + # Underlying event samples + '--centreOfMassEnergy 13 -s ueup', + '--centreOfMassEnergy 13 -s uedown', + + # isr/fsr variations + '--centreOfMassEnergy 13 -s isrup', + '--centreOfMassEnergy 13 -s isrdown', + '--centreOfMassEnergy 13 -s fsrup', + '--centreOfMassEnergy 13 -s fsrdown', # ME scale weights '--centreOfMassEnergy 13 --muFmuRWeight 1', diff --git a/dps/analysis/BLTUnfold/submitBLTUnfold.description b/dps/analysis/BLTUnfold/submitBLTUnfold.description index feee567b..ecde298e 100644 --- a/dps/analysis/BLTUnfold/submitBLTUnfold.description +++ b/dps/analysis/BLTUnfold/submitBLTUnfold.description @@ -15,4 +15,4 @@ request_memory=500 # use the ENV that is provided getenv = true -queue 141 +queue 138 diff --git a/dps/analysis/unfolding_tests/unfolding_sandbox.py b/dps/analysis/unfolding_tests/unfolding_sandbox.py index c695043a..2006dabc 100644 --- a/dps/analysis/unfolding_tests/unfolding_sandbox.py +++ b/dps/analysis/unfolding_tests/unfolding_sandbox.py @@ -20,9 +20,10 @@ def main(): for channel in ['combined', 'muon', 'electron']: + # for variable in config.variables: for variable in config.variables: # for variable in ['MET']: - + print variable # tau_value = get_tau_value(config, channel, variable) @@ -33,7 +34,6 @@ def main(): inputfile=file_for_unfolding, variable=variable, channel=channel, - met_type=config.met_type, centre_of_mass=config.centre_of_mass_energy, ttbar_xsection=config.ttbar_xsection, luminosity=config.luminosity, @@ -63,12 +63,12 @@ def main(): measured.SetBinContent(0,0) truth = asrootpy(h_response_ph.ProjectionY()) # print 'Truth from response :',list(truth.y()) - # print 'Truth underflow : ',truth.GetBinContent(0),truth.GetBinContent(truth.GetNbinsX()+1) + # print 'Truth underflow :',truth.GetBinContent(0),truth.GetBinContent(truth.GetNbinsX()+1) # Unfold unfolding = Unfolding( measured, truth, measured, h_response, None, - method=method, tau=tau_value) + method=method, k_value=-1, tau=tau_value) # unfolded_data = unfolding.closureTest() @@ -80,8 +80,10 @@ def main(): # print binx, biny,h_response.GetBinContent(binx,biny) # print bin,h_truth.GetBinContent(bin) # print 'Tau :',tau_value - unfolded_results = unfolding.unfold() + # print 'Unfolded :',list( unfolded_results.y() ) + # print unfolding.unfoldObject.GetTau() + # print 'Unfolded :',list( unfolded_results.y() ) refolded_results = unfolding.refold() refolded_results.rebin(2) @@ -96,7 +98,6 @@ def main(): print pValue,1-pValue # print unfolding.unfoldObject.GetTau() - def get_tau_value(config, channel, variable): if channel == 'electron': return config.tau_values_electron[variable] diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index a7eab7f7..c044f18b 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -29,17 +29,23 @@ def get_unfolding_files(measurement_config): 'PDFWeights_%d' % (index) : File ( measurement_config.unfolding_pdfweights[index] ) for index in range( 0, 100 ) } - unfolding_files['file_for_scaledown'] = File( measurement_config.unfolding_scale_down, 'read' ) - unfolding_files['file_for_scaleup'] = File( measurement_config.unfolding_scale_up, 'read' ) - unfolding_files['file_for_renormalisationdown'] = File( measurement_config.unfolding_renormalisation_down, 'read' ) unfolding_files['file_for_renormalisationup'] = File( measurement_config.unfolding_renormalisation_up, 'read' ) unfolding_files['file_for_factorisationdown'] = File( measurement_config.unfolding_factorisation_down, 'read' ) unfolding_files['file_for_factorisationup'] = File( measurement_config.unfolding_factorisation_up, 'read' ) unfolding_files['file_for_combineddown'] = File( measurement_config.unfolding_combined_down, 'read' ) unfolding_files['file_for_combinedup'] = File( measurement_config.unfolding_combined_up, 'read' ) - unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) - unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) + # unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) + # unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) + + + unfolding_files['file_for_isrdown'] = File( measurement_config.unfolding_isr_down, 'read' ) + unfolding_files['file_for_isrup'] = File( measurement_config.unfolding_isr_up, 'read' ) + unfolding_files['file_for_fsrdown'] = File( measurement_config.unfolding_fsr_down, 'read' ) + unfolding_files['file_for_fsrup'] = File( measurement_config.unfolding_fsr_up, 'read' ) + unfolding_files['file_for_uedown'] = File( measurement_config.unfolding_ue_down, 'read' ) + unfolding_files['file_for_ueup'] = File( measurement_config.unfolding_ue_up, 'read' ) + unfolding_files['file_for_massdown'] = File( measurement_config.unfolding_mass_down, 'read' ) unfolding_files['file_for_massup'] = File( measurement_config.unfolding_mass_up, 'read' ) @@ -69,10 +75,12 @@ def get_unfolding_files(measurement_config): unfolding_files['file_for_PUUp'] = File( measurement_config.unfolding_PUSystematic_up, 'read') unfolding_files['file_for_PUDown'] = File( measurement_config.unfolding_PUSystematic_down, 'read') + unfolding_files['file_for_ptreweight'] = File( measurement_config.unfolding_ptreweight, 'read' ) + unfolding_files['file_for_powhegPythia8'] = File( measurement_config.unfolding_powheg_pythia8, 'read') - unfolding_files['file_for_amcatnlo'] = File( measurement_config.unfolding_amcatnlo, 'read') - unfolding_files['file_for_amcatnlo_herwig'] = File( measurement_config.unfolding_amcatnlo_herwig, 'read') - unfolding_files['file_for_madgraphMLM'] = File( measurement_config.unfolding_madgraphMLM, 'read') + # unfolding_files['file_for_amcatnlo'] = File( measurement_config.unfolding_amcatnlo, 'read') + # unfolding_files['file_for_amcatnlo_herwig'] = File( measurement_config.unfolding_amcatnlo_herwig, 'read') + # unfolding_files['file_for_madgraphMLM'] = File( measurement_config.unfolding_madgraphMLM, 'read') unfolding_files['file_for_powheg_herwig'] = File( measurement_config.unfolding_powheg_herwig, 'read' ) return unfolding_files @@ -117,13 +125,10 @@ def unfold_results( results, category, channel, tau_value, h_truth, h_measured, def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, tau_value, visiblePS ): global com, luminosity, ttbar_xsection, method, variable, path_to_DF - global unfolding_files, file_for_ptreweight + global unfolding_files - # Add in this option? - global use_ptreweight files_for_systematics = { - 'TTJets_scaledown' : unfolding_files['file_for_scaledown'], - 'TTJets_scaleup' : unfolding_files['file_for_scaleup'], + 'TTJets_massdown' : unfolding_files['file_for_massdown'], 'TTJets_massup' : unfolding_files['file_for_massup'], @@ -133,8 +138,15 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'TTJets_renormalisationup' : unfolding_files['file_for_renormalisationup'], 'TTJets_combineddown' : unfolding_files['file_for_combineddown'], 'TTJets_combinedup' : unfolding_files['file_for_combinedup'], - 'TTJets_alphaSdown' : unfolding_files['file_for_alphaSdown'], - 'TTJets_alphaSup' : unfolding_files['file_for_alphaSup'], + # 'TTJets_alphaSdown' : unfolding_files['file_for_alphaSdown'], + # 'TTJets_alphaSup' : unfolding_files['file_for_alphaSup'], + + 'TTJets_isrdown' : unfolding_files['file_for_isrdown'], + 'TTJets_isrup' : unfolding_files['file_for_isrup'], + 'TTJets_fsrdown' : unfolding_files['file_for_fsrdown'], + 'TTJets_fsrup' : unfolding_files['file_for_fsrup'], + 'TTJets_uedown' : unfolding_files['file_for_uedown'], + 'TTJets_ueup' : unfolding_files['file_for_ueup'], 'JES_down' : unfolding_files['file_for_jesdown'], 'JES_up' : unfolding_files['file_for_jesup'], @@ -149,7 +161,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'LightJet_down' : unfolding_files['file_for_lightjetdown'], 'TTJets_hadronisation' : unfolding_files['file_for_powheg_herwig'], - 'TTJets_NLOgenerator' : unfolding_files['file_for_amcatnlo'], + # 'TTJets_NLOgenerator' : unfolding_files['file_for_amcatnlo'], 'ElectronEnUp' : unfolding_files['file_for_ElectronEnUp'], 'ElectronEnDown' : unfolding_files['file_for_ElectronEnDown'], @@ -167,6 +179,9 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'PileUp_up' : unfolding_files['file_for_PUUp'], 'PileUp_down' : unfolding_files['file_for_PUDown'], + + 'Top_Pt_reweight' : unfolding_files['file_for_ptreweight'], + } h_truth, h_measured, h_response, h_fakes = None, None, None, None @@ -253,8 +268,38 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, load_fakes = True, visiblePS = visiblePS, ) - h_truth_powhegPythia8, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_powhegPythia8, + h_truth_fsrdown, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_fsrdown, + variable = variable, + channel = channel, + com = com, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_fsrup, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_fsrup, + variable = variable, + channel = channel, + com = com, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_isrdown, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_isrdown, + variable = variable, + channel = channel, + com = com, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + h_truth_isrup, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_isrup, variable = variable, channel = channel, com = com, @@ -263,8 +308,8 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, load_fakes = True, visiblePS = visiblePS, ) - h_truth_amcatnlo, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_amcatnlo, + h_truth_uedown, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_uedown, variable = variable, channel = channel, com = com, @@ -273,8 +318,8 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, load_fakes = True, visiblePS = visiblePS, ) - h_truth_madgraphMLM, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_madgraphMLM, + h_truth_ueup, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_ueup, variable = variable, channel = channel, com = com, @@ -283,6 +328,37 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, load_fakes = True, visiblePS = visiblePS, ) + + h_truth_powhegPythia8, _, _, _ = get_unfold_histogram_tuple( + inputfile = file_for_powhegPythia8, + variable = variable, + channel = channel, + com = com, + ttbar_xsection = ttbar_xsection, + luminosity = luminosity, + load_fakes = True, + visiblePS = visiblePS, + ) + # h_truth_amcatnlo, _, _, _ = get_unfold_histogram_tuple( + # inputfile = file_for_amcatnlo, + # variable = variable, + # channel = channel, + # com = com, + # ttbar_xsection = ttbar_xsection, + # luminosity = luminosity, + # load_fakes = True, + # visiblePS = visiblePS, + # ) + # h_truth_madgraphMLM, _, _, _ = get_unfold_histogram_tuple( + # inputfile = file_for_madgraphMLM, + # variable = variable, + # channel = channel, + # com = com, + # ttbar_xsection = ttbar_xsection, + # luminosity = luminosity, + # load_fakes = True, + # visiblePS = visiblePS, + # ) h_truth_powheg_herwig, _, _, _ = get_unfold_histogram_tuple( inputfile = file_for_powheg_herwig, variable = variable, @@ -293,14 +369,21 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, load_fakes = True, visiblePS = visiblePS, ) + normalisation_unfolded['powhegPythia8'] = hist_to_value_error_tuplelist( h_truth_powhegPythia8 ) - normalisation_unfolded['amcatnlo'] = hist_to_value_error_tuplelist( h_truth_madgraphMLM ) - normalisation_unfolded['madgraphMLM'] = hist_to_value_error_tuplelist( h_truth_amcatnlo ) + # normalisation_unfolded['amcatnlo'] = hist_to_value_error_tuplelist( h_truth_madgraphMLM ) + # normalisation_unfolded['madgraphMLM'] = hist_to_value_error_tuplelist( h_truth_amcatnlo ) normalisation_unfolded['powhegHerwig'] = hist_to_value_error_tuplelist( h_truth_powheg_herwig ) normalisation_unfolded['massdown'] = hist_to_value_error_tuplelist( h_truth_massdown ) normalisation_unfolded['massup'] = hist_to_value_error_tuplelist( h_truth_massup ) + normalisation_unfolded['isrdown'] = hist_to_value_error_tuplelist( h_truth_isrdown ) + normalisation_unfolded['isrup'] = hist_to_value_error_tuplelist( h_truth_isrup ) + normalisation_unfolded['fsrdown'] = hist_to_value_error_tuplelist( h_truth_fsrdown ) + normalisation_unfolded['fsrup'] = hist_to_value_error_tuplelist( h_truth_fsrup ) + normalisation_unfolded['uedown'] = hist_to_value_error_tuplelist( h_truth_uedown ) + normalisation_unfolded['ueup'] = hist_to_value_error_tuplelist( h_truth_ueup ) # Write all normalisations in unfolded binning scheme to dataframes file_template = '{path_to_DF}/{category}/unfolded_normalisation_{channel}_{method}.txt' @@ -341,21 +424,21 @@ def calculate_xsections( normalisation, category, channel ): luminosity, branching_ratio ) - xsection_unfolded['amcatnlo'] = calculate_xsection( - normalisation['amcatnlo'], - luminosity, - branching_ratio - ) - xsection_unfolded['madgraphMLM'] = calculate_xsection( - normalisation['powhegHerwig'], - luminosity, - branching_ratio - ) + # xsection_unfolded['amcatnlo'] = calculate_xsection( + # normalisation['amcatnlo'], + # luminosity, + # branching_ratio + # ) xsection_unfolded['powhegHerwig'] = calculate_xsection( - normalisation['madgraphMLM'], + normalisation['powhegHerwig'], luminosity, branching_ratio ) + # xsection_unfolded['madgraphMLM'] = calculate_xsection( + # normalisation['madgraphMLM'], + # luminosity, + # branching_ratio + # ) xsection_unfolded['massdown'] = calculate_xsection( normalisation['massdown'], @@ -367,6 +450,37 @@ def calculate_xsections( normalisation, category, channel ): luminosity, branching_ratio ) + xsection_unfolded['isrdown'] = calculate_xsection( + normalisation['isrdown'], + luminosity, + branching_ratio + ) + xsection_unfolded['isrup'] = calculate_xsection( + normalisation['isrup'], + luminosity, + branching_ratio + ) + xsection_unfolded['fsrdown'] = calculate_xsection( + normalisation['fsrdown'], + luminosity, + branching_ratio + ) + xsection_unfolded['fsrup'] = calculate_xsection( + normalisation['fsrup'], + luminosity, + branching_ratio + ) + xsection_unfolded['uedown'] = calculate_xsection( + normalisation['uedown'], + luminosity, + branching_ratio + ) + xsection_unfolded['ueup'] = calculate_xsection( + normalisation['ueup'], + luminosity, + branching_ratio + ) + file_template = '{path_to_DF}/{category}/xsection_{channel}_{method}.txt' write_02(xsection_unfolded, file_template, path_to_DF, category, channel, method) @@ -402,34 +516,64 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ ) if category == 'central': - normalised_xsection['massdown'] = calculate_normalised_xsection( + normalised_xsection['powhegPythia8'] = calculate_normalised_xsection( normalisation['powhegPythia8'], binWidths[variable], normalise_to_one, ) - normalised_xsection['massdown'] = calculate_normalised_xsection( - normalisation['amcatnlo'], + # normalised_xsection['amcatnlo'] = calculate_normalised_xsection( + # normalisation['amcatnlo'], + # binWidths[variable], + # normalise_to_one, + # ) + normalised_xsection['powhegHerwig'] = calculate_normalised_xsection( + normalisation['powhegHerwig'], binWidths[variable], normalise_to_one, ) + # normalised_xsection['madgraphMLM'] = calculate_normalised_xsection( + # normalisation['madgraphMLM'], + # binWidths[variable], + # normalise_to_one, + # ) + normalised_xsection['massdown'] = calculate_normalised_xsection( - normalisation['powhegHerwig'], + normalisation['massdown'], binWidths[variable], normalise_to_one, ) - normalised_xsection['massdown'] = calculate_normalised_xsection( - normalisation['madgraphMLM'], + normalised_xsection['massup'] = calculate_normalised_xsection( + normalisation['massup'], binWidths[variable], normalise_to_one, ) - - normalised_xsection['massdown'] = calculate_normalised_xsection( - normalisation['massdown'], + normalised_xsection['isrup'] = calculate_normalised_xsection( + normalisation['isrup'], binWidths[variable], normalise_to_one, ) - normalised_xsection['massdown'] = calculate_normalised_xsection( - normalisation['massup'], + normalised_xsection['isrup'] = calculate_normalised_xsection( + normalisation['isrup'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['fsrup'] = calculate_normalised_xsection( + normalisation['fsrup'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['fsrup'] = calculate_normalised_xsection( + normalisation['fsrup'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['ueup'] = calculate_normalised_xsection( + normalisation['ueup'], + binWidths[variable], + normalise_to_one, + ) + normalised_xsection['ueup'] = calculate_normalised_xsection( + normalisation['ueup'], binWidths[variable], normalise_to_one, ) @@ -473,7 +617,6 @@ def parse_arguments(): args = parser.parse_args() return args - if __name__ == '__main__': set_root_defaults( msg_ignore_level = 3001 ) # setup @@ -502,7 +645,7 @@ def parse_arguments(): phase_space = "VisiblePS" unfolding_files = get_unfolding_files(measurement_config) - path_to_DF = 'TESTING/{path}/{com}TeV/{variable}/{phase_space}/'.format( + path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/'.format( path = args.path, com = measurement_config.com_energy, variable = variable, @@ -512,7 +655,7 @@ def parse_arguments(): # Core Systematics all_measurements = deepcopy( measurement_config.measurements ) # Adding PDF Systematics - pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 0, 100 )] + pdf_uncertainties = ['PDFWeights_%d' % index for index in range(measurement_config.pdfWeightMin, measurement_config.pdfWeightMax )] all_measurements.extend( pdf_uncertainties ) # # TTBar Reweighting Systematics # ttbar_theory_systematics = [ 'TTJets_ptreweight', 'TTJets_etareweight' ] @@ -561,9 +704,9 @@ def parse_arguments(): # Combine the normalisations (beforeUnfolding) # normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) - # TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] + TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] TTJet_normalisation_results_muon = normalisation_results_muon['TTJet'] - # TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] + TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] # # get unfolded normalisations and xsections unfolded_normalisation_electron = {} @@ -572,19 +715,19 @@ def parse_arguments(): unfolded_normalisation_combinedBeforeUnfolding = {} - # # Electron channel - # channel = 'electron' - # unfolded_normalisation_electron = get_unfolded_normalisation( - # TTJet_normalisation_results_electron, - # category, - # channel, - # tau_value_electron, - # visiblePS = visiblePS - # ) - # # measure xsection - # calculate_xsections( unfolded_normalisation_electron, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_electron, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_electron, category, channel , True ) + # Electron channel + channel = 'electron' + unfolded_normalisation_electron = get_unfolded_normalisation( + TTJet_normalisation_results_electron, + category, + channel, + tau_value_electron, + visiblePS = visiblePS + ) + # measure xsection + calculate_xsections( unfolded_normalisation_electron, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_electron, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_electron, category, channel , True ) # Muon channel channel = 'muon' @@ -614,13 +757,13 @@ def parse_arguments(): # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) - # # Results where the channels are combined after unfolding - # channel = 'combined' - # unfolded_normalisation_combined = combine_complex_results( unfolded_normalisation_electron, unfolded_normalisation_muon ) - # # measure xsection - # calculate_xsections( unfolded_normalisation_combined, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_combined, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_combined, category, channel , True ) + # Results where the channels are combined after unfolding + channel = 'combined' + unfolded_normalisation_combined = combine_complex_results( unfolded_normalisation_electron, unfolded_normalisation_muon ) + # measure xsection + calculate_xsections( unfolded_normalisation_combined, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_combined, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_combined, category, channel , True ) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 1243a8a9..d975f2b7 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -72,8 +72,8 @@ def read_xsection_measurement_results( category, channel ): # Add in distributions for the different MC to be shown h_normalised_xsection_powhegPythia8 = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegPythia8'], edges ) - h_normalised_xsection_amcatnlo = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnlo'], edges ) - h_normalised_xsection_madgraphMLM = value_error_tuplelist_to_hist( normalised_xsection_unfolded['madgraphMLM'], edges ) + # h_normalised_xsection_amcatnlo = value_error_tuplelist_to_hist( normalised_xsection_unfolded['amcatnlo'], edges ) + # h_normalised_xsection_madgraphMLM = value_error_tuplelist_to_hist( normalised_xsection_unfolded['madgraphMLM'], edges ) h_normalised_xsection_powhegHerwigpp = value_error_tuplelist_to_hist( normalised_xsection_unfolded['powhegHerwig'], edges ) h_normalised_xsection_massup = value_error_tuplelist_to_hist( normalised_xsection_unfolded['massup'], edges ) @@ -83,8 +83,8 @@ def read_xsection_measurement_results( category, channel ): histograms_normalised_xsection_different_generators.update( { 'powhegPythia8' : h_normalised_xsection_powhegPythia8, - 'amcatnloPythia8' : h_normalised_xsection_amcatnlo, - 'madgraphMLM' : h_normalised_xsection_madgraphMLM, + # 'amcatnloPythia8' : h_normalised_xsection_amcatnlo, + # 'madgraphMLM' : h_normalised_xsection_madgraphMLM, 'powhegHerwig' : h_normalised_xsection_powhegHerwigpp, } ) @@ -756,11 +756,16 @@ def parse_arguments(): ) all_measurements = deepcopy( measurement_config.measurements ) - pdf_uncertainties = ['PDFWeights_%d' % index for index in range( 1, 45 )] + pdf_uncertainties = ['PDFWeights_%d' % index for index in range( measurement_config.pdfWeightMin, measurement_config.pdfWeightMax )] all_measurements.extend( pdf_uncertainties ) - # for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - for channel in ['muon']: + channel = [ + # 'electron', + # 'muon', + 'combined', + # 'combinedBeforeUnfolding', + ] + for ch in channel: for category in all_measurements: # Show central only. TODO Add in additional systematic comparison plots @@ -770,10 +775,10 @@ def parse_arguments(): # Read the xsection results from dataframe histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts = read_xsection_measurement_results( category, channel ) - histname = '{variable}_normalised_xsection_{channel}_{phase_space}_{method}' + histname = '{variable}_normalised_xsection_{ch}_{phase_space}_{method}' histname = histname.format( variable = variable, - channel = channel, + ch = ch, phase_space = phase_space, method = method ) @@ -803,6 +808,6 @@ def parse_arguments(): # MET Only # Rate Changing Only # etc... - # plot_central_and_systematics( channel, measurements, exclude = ttbar_generator_systematics ) - # plot_central_and_systematics( channel, ttbar_generator_systematics, suffix = 'ttbar_generator_only' ) - # plot_central_and_systematics( channel, rate_changing_systematics, suffix = 'rate_changing_only' ) + # plot_central_and_systematics( ch, measurements, exclude = ttbar_generator_systematics ) + # plot_central_and_systematics( ch, ttbar_generator_systematics, suffix = 'ttbar_generator_only' ) + # plot_central_and_systematics( ch, rate_changing_systematics, suffix = 'rate_changing_only' ) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index 6e67bf5b..fc5e8df5 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -46,16 +46,16 @@ def getHistograms( histogram_files, if use_qcd_data_region: qcd_data_region = qcd_data_region_electron # No Lepton Eff in QCD CR and PU distributions - if not 'QCD' in channel and not 'NPU' in branchName: - weightBranchSignalRegion += ' * ElectronEfficiencyCorrection' + # if not 'QCD' in channel and not 'NPU' in branchName: + # weightBranchSignalRegion += ' * ElectronEfficiencyCorrection' if 'muon' in channel: histogram_files['data'] = measurement_config.data_file_muon histogram_files['QCD'] = measurement_config.muon_QCD_MC_trees[category] if use_qcd_data_region: qcd_data_region = qcd_data_region_muon - if not 'QCD' in channel: - weightBranchSignalRegion += ' * MuonEfficiencyCorrection' + # if not 'QCD' in channel: + # weightBranchSignalRegion += ' * MuonEfficiencyCorrection' # Print all the weights applied to this plot print weightBranchSignalRegion @@ -82,7 +82,8 @@ def getHistograms( histogram_files, histograms_electron = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','EPlusJets')], branch = branchName, - weightBranch = weightBranchSignalRegion + ' * ElectronEfficiencyCorrection', + # weightBranch = weightBranchSignalRegion + ' * ElectronEfficiencyCorrection', + weightBranch = weightBranchSignalRegion, files = histogram_files_electron, nBins = nBins, xMin = x_limits[0], @@ -92,7 +93,8 @@ def getHistograms( histogram_files, histograms_muon = get_histograms_from_trees( trees = [signal_region_tree.replace('COMBINED','MuPlusJets')], branch = branchName, - weightBranch = weightBranchSignalRegion + ' * MuonEfficiencyCorrection', + # weightBranch = weightBranchSignalRegion + ' * MuonEfficiencyCorrection', + weightBranch = weightBranchSignalRegion, files = histogram_files_muon, nBins = nBins, xMin = x_limits[0], @@ -308,7 +310,7 @@ def make_plot( channel, x_axis_title, y_axis_title, maxData = max( list(signal_region_hists['data'].y()) ) y_limits = [0, maxData * 1.4] if log_y: - y_limits = [0.1, maxData * 10 ] + y_limits = [0.1, maxData * 100 ] # More histogram settings to look semi decent histogram_properties = Histogram_properties() @@ -427,7 +429,7 @@ def parse_arguments(): normalise_to_data = args.normalise_to_data - output_folder = '%s/%dTeV/' % ( args.output_folder, measurement_config.centre_of_mass_energy ) + output_folder = '{o}/'.format( o = args.output_folder ) output_folder_base = output_folder make_folder_if_not_exists( output_folder_base ) @@ -464,6 +466,7 @@ def parse_arguments(): 'AbsLeptonEta', 'NJets', 'NBJets', + # 'NBJetsNoWeight', # 'NBJetsUp', # 'NBJetsDown', @@ -687,7 +690,7 @@ def parse_arguments(): x_limits = control_plots_bins['NJets'], nBins = len(control_plots_bins['NJets'])-1, rebin = 1, - legend_location = ( 1, 0.78 ), + legend_location = ( 1.0, 0.78 ), cms_logo_location = 'left', use_qcd_data_region = useQCDControl, log_y = True, diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 50a11829..459cea89 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -2,7 +2,7 @@ import dps.utils.measurement class XSectionConfig(): - current_analysis_path = '/hdfs/TopQuarkGroup/ec6821/0.0.10/atOutput/combined/' + current_analysis_path = '/hdfs/TopQuarkGroup/ec6821/1.0.0/atOutput/combined/' known_centre_of_mass_energies = [13] # has to be separate as many variables depend on it luminosities = {13:36459} @@ -21,7 +21,6 @@ class XSectionConfig(): 'general_category_templates_trees', 'generator_systematic_vjets_templates', 'generator_systematics', - 'generator_mcsamples', 'higgs_category_templates', 'higgs_file', 'include_higgs', 'tau_values_electron', 'tau_values_muon', @@ -93,8 +92,8 @@ def __fill_defaults__( self ): self.path_to_files = self.current_analysis_path path_to_files = self.path_to_files - # self.path_to_unfolding_histograms = '/hdfs/TopQuarkGroup/run2/unfolding/13TeV/2016/' - self.path_to_unfolding_histograms = 'unfolding/13TeV/' + self.path_to_unfolding_histograms = '/hdfs/TopQuarkGroup/run2/unfolding/13TeV/Moriond2017/' + # self.path_to_unfolding_histograms = 'unfolding/13TeV/' path_to_unfolding_histograms = self.path_to_unfolding_histograms self.luminosity = self.luminosities[self.centre_of_mass_energy] @@ -132,8 +131,8 @@ def __fill_defaults__( self ): self.analysis_types = { - 'electron' :'EPlusJets', - 'muon' :'MuPlusJets', + 'electron' : 'EPlusJets', + 'muon' : 'MuPlusJets', 'combined' : 'combined', } @@ -164,8 +163,8 @@ def __fill_defaults__( self ): # self.data_file_muon = path_to_files + 'data_muon_tree.root' # self.data_file_electron = path_to_files + 'data_electron_tree.root' - self.data_file_muon = '/hdfs/TopQuarkGroup/db0268/0.1.2/atOutput/combined/data_muon_tree.root' - self.data_file_electron = '/hdfs/TopQuarkGroup/db0268/0.1.2/atOutput/combined/data_electron_tree.root' + self.data_file_muon = '/hdfs/TopQuarkGroup/ec6821/1.0.0/atOutput/combined/data_muon_tree.root' + self.data_file_electron = '/hdfs/TopQuarkGroup/ec6821/1.0.0/atOutput/combined/data_electron_tree.root' self.higgs_file = path_to_files + 'central/TTH_Inclusive_M-125' + middle + '.root' @@ -208,7 +207,7 @@ def __fill_defaults__( self ): 'JES_up', 'JES_down', 'JER_up', - 'JER_down', + 'JER_down', 'BJet_up', 'BJet_down', @@ -247,22 +246,23 @@ def __fill_defaults__( self ): # Rename to generator_measurements? self.generator_systematics = [ - 'TTJets_scaleup', - 'TTJets_scaledown', 'TTJets_massup', 'TTJets_massdown', 'TTJets_hadronisation', - 'TTJets_NLOgenerator', 'TTJets_factorisationup', 'TTJets_factorisationdown', 'TTJets_renormalisationup', 'TTJets_renormalisationdown', 'TTJets_combinedup', 'TTJets_combineddown', - 'TTJets_alphaSup', - 'TTJets_alphaSdown', + 'TTJets_fsrup', + 'TTJets_fsrdown', + 'TTJets_isrup', + 'TTJets_isrdown', + 'TTJets_ueup', + 'TTJets_uedown' ] - + self.measurements = self.normalisation_systematics + self.generator_systematics self.list_of_systematics = { @@ -276,13 +276,13 @@ def __fill_defaults__( self ): # Generator Uncertainties 'TTJets_mass' : ['TTJets_massup', 'TTJets_massdown'], 'TTJets_hadronisation' : ['TTJets_hadronisation', 'TTJets_hadronisation'], - 'TTJets_ue' : ['TTJets_ueup', 'TTJets_uedown'], + 'TTJets_ue' : ['TTJets_ueup', 'TTJets_uedown'], + 'TTJets_envelope' : ['TTJets_factorisationup', 'TTJets_factorisationdown', 'TTJets_renormalisationup', 'TTJets_renormalisationdown', 'TTJets_combinedup', 'TTJets_combineddown', 'TTJets_fsrup', 'TTJets_fsrdown', - 'TTJets_isrup', 'TTJets_isrdown', - ], + 'TTJets_isrup', 'TTJets_isrdown'], # Event Reweighting 'PileUp' : ['PileUp_up', 'PileUp_down'], @@ -301,55 +301,46 @@ def __fill_defaults__( self ): 'TauEn' : ['TauEnUp', 'TauEnDown'], 'UnclusteredEn' : ['UnclusteredEnUp', 'UnclusteredEnDown'], # Top Reweighting Uncertainties - # 'Top_pt_reweight' : ['Top_pt_reweight_up', 'Top_pt_reweight_down'], + 'Top_pt_reweight' : ['Top_pt_reweight', 'Top_pt_reweight'], # 'Top_eta_reweight' : ['Top_eta_reweight_up', 'Top_eta_reweight_down'], } # now fill in the centre of mass dependent values self.__fill_defaults_13TeV__() - - self.generator_mcsamples = [ - 'PowhegPythia8', - 'powhegHerwigpp', - 'amc', - 'amcatnloHerwigpp', - 'madgraph' - ] - - self.rate_changing_systematics_values = {} - for systematic in self.rate_changing_systematics.keys(): - affected_samples = XSectionConfig.samples # all samples - if 'SingleTop' in systematic: - affected_samples = ['SingleTop'] - if 'TTJet' in systematic: - affected_samples = ['TTJet'] - if 'VJets' in systematic: - affected_samples = ['V+Jets'] - if 'QCD' in systematic: - affected_samples = ['QCD'] - - sp = dps.utils.measurement.Systematic( - systematic + '+', - # systematic + '_up', - stype = dps.utils.measurement.Systematic.RATE, - affected_samples = affected_samples, - scale = 1 + self.rate_changing_systematics[systematic], - ) - scale = 1 - self.rate_changing_systematics[systematic] - if scale <= 0: scale = 10e-5 - - sm = dps.utils.measurement.Systematic( - systematic + '-', - # systematic + '_down', - stype = dps.utils.measurement.Systematic.RATE, - affected_samples = affected_samples, - scale = scale, - ) - self.rate_changing_systematics_values[sp.name] = sp - self.rate_changing_systematics_values[sm.name] = sm - - self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() + # self.rate_changing_systematics_values = {} + # for systematic in self.rate_changing_systematics.keys(): + # affected_samples = XSectionConfig.samples # all samples + # if 'SingleTop' in systematic: + # affected_samples = ['SingleTop'] + # if 'TTJet' in systematic: + # affected_samples = ['TTJet'] + # if 'VJets' in systematic: + # affected_samples = ['V+Jets'] + # if 'QCD' in systematic: + # affected_samples = ['QCD'] + + # sp = dps.utils.measurement.Systematic( + # systematic + '+', + # # systematic + '_up', + # stype = dps.utils.measurement.Systematic.RATE, + # affected_samples = affected_samples, + # scale = 1 + self.rate_changing_systematics[systematic], + # ) + # scale = 1 - self.rate_changing_systematics[systematic] + # if scale <= 0: scale = 10e-5 + + # sm = dps.utils.measurement.Systematic( + # systematic + '-', + # # systematic + '_down', + # stype = dps.utils.measurement.Systematic.RATE, + # affected_samples = affected_samples, + # scale = scale, + # ) + # self.rate_changing_systematics_values[sp.name] = sp + # self.rate_changing_systematics_values[sm.name] = sm + + # self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() self.topMass_systematics = [ 'TTJets_massup', 'TTJets_massdown'] @@ -371,7 +362,6 @@ def __fill_defaults__( self ): self.generator_systematic_vjets_templates[systematic] = tmp # categories_and_prefixes = self.categories_and_prefixes - generator_mcsamples = self.generator_mcsamples # Used in 01 # self.general_trees = { @@ -386,17 +376,13 @@ def __fill_defaults__( self ): category: path_to_files + 'QCD_Electron_tree.root' for category in self.normalisation_systematics} self.muon_QCD_MC_trees = { category: path_to_files + 'QCD_Muon_tree.root' for category in self.normalisation_systematics} - self.ttbar_generator_trees = { - category: path_to_files + 'TTJets_' + category + '_tree.root' for category in generator_mcsamples} - # Need with generator_mcsamples???? self.ttbar_amc_trees = path_to_files + '/TTJets_amc_tree.root' self.ttbar_madgraph_trees = path_to_files + '/TTJets_madgraph_tree.root' - self.ttbar_powhegpythia8_trees = path_to_files + '/TTJets_powhegPythia8_tree.root' + self.ttbar_powhegpythia8_trees = path_to_files + '/TTJets_PowhegPythia8_tree.root' self.ttbar_powhegherwigpp_trees = path_to_files + '/TTJets_powhegHerwigpp_tree.root' self.ttbar_amcatnloherwigpp_trees = path_to_files + '/TTJets_amcatnloHerwigpp_tree.root' - self.ttbar_scaleup_trees = path_to_files + '/TTJets_PowhegPythia8_scaleup_tree.root' - self.ttbar_scaledown_trees = path_to_files + '/TTJets_PowhegPythia8_scaledown_tree.root' + self.ttbar_mtop1695_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1695_tree.root' self.ttbar_mtop1755_trees = path_to_files + '/TTJets_PowhegPythia8_mtop1755_tree.root' self.ttbar_jesup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJES_tree.root' @@ -404,6 +390,15 @@ def __fill_defaults__( self ): self.ttbar_jerup_trees = path_to_files + '/TTJets_PowhegPythia8_plusJER_tree.root' self.ttbar_jerdown_trees = path_to_files + '/TTJets_PowhegPythia8_minusJER_tree.root' + # Underlying Event trees + self.ttbar_ueup_trees = path_to_files + '/TTJets_powhegPythia8_up_tree.root' + self.ttbar_uedown_trees = path_to_files + '/TTJets_powhegPythia8_down_tree.root' + # Initial(Final) State Radiation event Trees + self.ttbar_isrup_trees = path_to_files + '/TTJets_powhegPythia8_isrup_tree.root' + self.ttbar_isrdown_trees = path_to_files + '/TTJets_powhegPythia8_isrdown_tree.root' + self.ttbar_fsrup_trees = path_to_files + '/TTJets_powhegPythia8_fsrup_tree.root' + self.ttbar_fsrdown_trees = path_to_files + '/TTJets_powhegPythia8_fsrdown_tree.root' + # Needed? self.data_muon_category_templates = { @@ -440,8 +435,7 @@ def __fill_defaults__( self ): self.unfolding_central = self.unfolding_powheg_pythia8 - self.unfolding_ptreweight_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_withTopPtReweighting_up.root' % self.centre_of_mass_energy - self.unfolding_ptreweight_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_withTopPtReweighting_down.root' % self.centre_of_mass_energy + self.unfolding_ptreweight = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_withTopPtReweighting.root' % self.centre_of_mass_energy self.unfolding_renormalisation_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_05muR1muF.root' % self.centre_of_mass_energy self.unfolding_renormalisation_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_2muR1muF.root' % self.centre_of_mass_energy @@ -449,8 +443,7 @@ def __fill_defaults__( self ): self.unfolding_factorisation_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_1muR2muF.root' % self.centre_of_mass_energy self.unfolding_combined_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_05muR05muF.root' % self.centre_of_mass_energy self.unfolding_combined_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_2muR2muF.root' % self.centre_of_mass_energy - - self.unfolding_fsr_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_fsrup_asymmetric.root' % self.centre_of_mass_energy + self.unfolding_fsr_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_fsrdown_asymmetric.root' % self.centre_of_mass_energy self.unfolding_fsr_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_fsrup_asymmetric.root' % self.centre_of_mass_energy self.unfolding_isr_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_isrdown_asymmetric.root' % self.centre_of_mass_energy self.unfolding_isr_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_isrup_asymmetric.root' % self.centre_of_mass_energy @@ -482,7 +475,6 @@ def __fill_defaults__( self ): self.unfolding_PUSystematic_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_pileupUp_asymmetric.root' % self.centre_of_mass_energy self.unfolding_PUSystematic_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_pileupDown_asymmetric.root' % self.centre_of_mass_energy - self.pdfWeightMin = 0 self.pdfWeightMax = 100 self.unfolding_pdfweights = {index : path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_pdfWeight_%d.root' % (self.centre_of_mass_energy, index) for index in range( self.pdfWeightMin, self.pdfWeightMax )} @@ -589,7 +581,7 @@ def __fill_defaults_13TeV__( self ): middle = self.middle path_to_files = self.path_to_files - self.new_luminosity = 36260 + self.new_luminosity = 36459 self.ttbar_xsection = 831.76 # pb self.rate_changing_systematics = {#TODO check where this is used diff --git a/dps/experimental/howMuchLumiInNtuples.py b/dps/experimental/howMuchLumiInNtuples.py index d7905f7d..b8d89106 100644 --- a/dps/experimental/howMuchLumiInNtuples.py +++ b/dps/experimental/howMuchLumiInNtuples.py @@ -34,5 +34,6 @@ # print run,lumi # print outputJson with open('json.txt', 'w') as outfile: - print json.dumps(outputJson) - json.dump(outputJson, outfile) \ No newline at end of file + print json.dumps(outputJson) + json.dump(outputJson, outfile) + diff --git a/dps/utils/systematic.py b/dps/utils/systematic.py index d1cb0c20..89aaebe1 100644 --- a/dps/utils/systematic.py +++ b/dps/utils/systematic.py @@ -530,7 +530,7 @@ def make_covariance_plot( options, systematic, matrix, label='Covariance' ): ) make_folder_if_not_exists(covariance_matrix_output_path) - x_binning = array ( 'f' , bin_edges_vis[variable] ) + x_binning = array ( 'f', bin_edges_vis[variable] ) y_binning = array ( 'f', bin_edges_vis[variable] ) n_xbins = len( x_binning ) - 1 n_ybins = len( y_binning ) - 1 From fa78be767bf2f9bf3ae24a18d59151d08c8eedde Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 10 Jan 2017 08:49:48 +0000 Subject: [PATCH 74/90] Fix argument bug in 01 submission --- dps/experimental/condor/01b/run01_forAllOptions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dps/experimental/condor/01b/run01_forAllOptions.py b/dps/experimental/condor/01b/run01_forAllOptions.py index d34e29a9..b37c9f7c 100644 --- a/dps/experimental/condor/01b/run01_forAllOptions.py +++ b/dps/experimental/condor/01b/run01_forAllOptions.py @@ -27,7 +27,7 @@ # 'ttbarRap', ] -jobOptions = ['-v %s -i config/measurements/background_subtraction' % ( var ) for var in vars ] +jobOptions = ['-v %s' % ( var ) for var in vars ] parser = OptionParser("Merge histogram files on DICE") parser.add_option("-n", dest="jobNumber", default=-1, type='int', From cbb68f80e342b02c53435cfde105d59839b9bc84 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 10 Jan 2017 14:30:50 +0000 Subject: [PATCH 75/90] Rate changing systematic now only apply to associate sample - not all of them --- dps/analysis/xsection/create_measurement.py | 38 +++++++++++++-------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/dps/analysis/xsection/create_measurement.py b/dps/analysis/xsection/create_measurement.py index 241cecde..35063fa9 100644 --- a/dps/analysis/xsection/create_measurement.py +++ b/dps/analysis/xsection/create_measurement.py @@ -164,24 +164,32 @@ def get_sample_info(options, xsec_config, sample): # Generator Scale (Rate) sample_info["scale"]=1.0 - generator_scale = xsec_config.rate_changing_systematics['V+Jets_cross_section'] - if options['category'] == 'V+Jets_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'V+Jets_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - generator_scale = xsec_config.rate_changing_systematics['SingleTop_cross_section'] - if options['category'] == 'SingleTop_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'SingleTop_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] - if options['category'] == 'QCD_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'QCD_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale + + if sample == 'V+Jets': + generator_scale = xsec_config.rate_changing_systematics['V+Jets_cross_section'] + if options['category'] == 'V+Jets_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'V+Jets_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + + if sample == 'SingleTop': + generator_scale = xsec_config.rate_changing_systematics['SingleTop_cross_section'] + if options['category'] == 'SingleTop_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'SingleTop_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + + if sample == 'QCD': + generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] + if options['category'] == 'QCD_cross_section+': + sample_info["scale"] = 1.0 + 1.0*generator_scale + elif options['category'] == 'QCD_cross_section-': + sample_info["scale"] = 1.0 - 1.0*generator_scale + # scaling will always have some non zero value if sample_info["scale"] <= 0.0001: sample_info["scale"] = 0.0001 + # Weight branches (Shape) weight_branches = [] if sample == 'data': From 9759c7e7bc81082cd64b17c0021dc317ce5809e2 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 10 Jan 2017 14:33:40 +0000 Subject: [PATCH 76/90] Fix some xsection calc functions, Add actual data before fake subtraction --- .../xsection/02_unfold_and_measure.py | 153 +++++++++--------- 1 file changed, 73 insertions(+), 80 deletions(-) diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index c044f18b..cf1a1650 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -38,7 +38,6 @@ def get_unfolding_files(measurement_config): # unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) # unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) - unfolding_files['file_for_isrdown'] = File( measurement_config.unfolding_isr_down, 'read' ) unfolding_files['file_for_isrup'] = File( measurement_config.unfolding_isr_up, 'read' ) unfolding_files['file_for_fsrdown'] = File( measurement_config.unfolding_fsr_down, 'read' ) @@ -46,7 +45,6 @@ def get_unfolding_files(measurement_config): unfolding_files['file_for_uedown'] = File( measurement_config.unfolding_ue_down, 'read' ) unfolding_files['file_for_ueup'] = File( measurement_config.unfolding_ue_up, 'read' ) - unfolding_files['file_for_massdown'] = File( measurement_config.unfolding_mass_down, 'read' ) unfolding_files['file_for_massup'] = File( measurement_config.unfolding_mass_up, 'read' ) @@ -87,11 +85,16 @@ def get_unfolding_files(measurement_config): def unfold_results( results, category, channel, tau_value, h_truth, h_measured, h_response, h_fakes, method, visiblePS ): global variable, path_to_DF, args + edges = reco_bin_edges_full[variable] if visiblePS: edges = reco_bin_edges_vis[variable] + h_data = value_error_tuplelist_to_hist( results, edges ) + # Rebin original TTJet_Measured in terms of final binning (h_data is later replaced with h_data_no_fakes) + h_data_rebinned = h_data.rebinned(2) + # Remove fakes before unfolding h_data_no_fakes = removeFakes( h_measured, h_fakes, h_data ) @@ -109,19 +112,10 @@ def unfold_results( results, category, channel, tau_value, h_truth, h_measured, # print "h_response bin edges : ", h_response # print "h_unfolded_data bin edges : ", h_unfolded_data h_data_no_fakes = h_data_no_fakes.rebinned(2) - h_data = h_data.rebinned(2) del unfolding - return hist_to_value_error_tuplelist( h_data ), hist_to_value_error_tuplelist( h_unfolded_data ), hist_to_value_error_tuplelist( h_data_no_fakes ) + return hist_to_value_error_tuplelist( h_data_rebinned ), hist_to_value_error_tuplelist( h_unfolded_data ), hist_to_value_error_tuplelist( h_data_no_fakes ) -# def data_covariance_matrix( data ): -# values = list( data ) -# get_bin_error = data.GetBinError -# cov_matrix = Hist2D( len( values ), -10, 10, len( values ), -10, 10, type = 'D' ) -# for bin_i in range( len( values ) ): -# error = get_bin_error( bin_i + 1 ) -# cov_matrix.SetBinContent( bin_i + 1, bin_i + 1, error * error ) -# return cov_matrix def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, tau_value, visiblePS ): global com, luminosity, ttbar_xsection, method, variable, path_to_DF @@ -143,7 +137,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'TTJets_isrdown' : unfolding_files['file_for_isrdown'], 'TTJets_isrup' : unfolding_files['file_for_isrup'], - 'TTJets_fsrdown' : unfolding_files['file_for_fsrdown'], + # 'TTJets_fsrdown' : unfolding_files['file_for_fsrdown'], 'TTJets_fsrup' : unfolding_files['file_for_fsrup'], 'TTJets_uedown' : unfolding_files['file_for_uedown'], 'TTJets_ueup' : unfolding_files['file_for_ueup'], @@ -161,7 +155,6 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'LightJet_down' : unfolding_files['file_for_lightjetdown'], 'TTJets_hadronisation' : unfolding_files['file_for_powheg_herwig'], - # 'TTJets_NLOgenerator' : unfolding_files['file_for_amcatnlo'], 'ElectronEnUp' : unfolding_files['file_for_ElectronEnUp'], 'ElectronEnDown' : unfolding_files['file_for_ElectronEnDown'], @@ -193,7 +186,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = files_for_systematics[category], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -206,7 +199,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = unfolding_files['files_for_pdfs'][category], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -218,7 +211,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, inputfile = unfolding_files['file_for_unfolding'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -249,80 +242,80 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, # Return truth of different generators for comparison to data in 04 if category == 'central': h_truth_massdown, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_massdown, + inputfile = unfolding_files['file_for_massdown'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) h_truth_massup, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_massup, - variable = variable, - channel = channel, - com = com, - ttbar_xsection = ttbar_xsection, - luminosity = luminosity, - load_fakes = True, - visiblePS = visiblePS, - ) - h_truth_fsrdown, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_fsrdown, + inputfile = unfolding_files['file_for_massup'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) + # h_truth_fsrdown, _, _, _ = get_unfold_histogram_tuple( + # inputfile = unfolding_files['file_for_fsrdown'], + # variable = variable, + # channel = channel, + # centre_of_mass = com, + # ttbar_xsection = ttbar_xsection, + # luminosity = luminosity, + # load_fakes = True, + # visiblePS = visiblePS, + # ) h_truth_fsrup, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_fsrup, + inputfile = unfolding_files['file_for_fsrup'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) h_truth_isrdown, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_isrdown, + inputfile = unfolding_files['file_for_isrdown'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) h_truth_isrup, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_isrup, + inputfile = unfolding_files['file_for_isrup'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) h_truth_uedown, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_uedown, + inputfile = unfolding_files['file_for_uedown'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) h_truth_ueup, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_ueup, + inputfile = unfolding_files['file_for_ueup'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -330,40 +323,40 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, ) h_truth_powhegPythia8, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_powhegPythia8, + inputfile = unfolding_files['file_for_powhegPythia8'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, visiblePS = visiblePS, ) # h_truth_amcatnlo, _, _, _ = get_unfold_histogram_tuple( - # inputfile = file_for_amcatnlo, + # inputfile = unfolding_files['file_for_amcatnlo'], # variable = variable, # channel = channel, - # com = com, + # centre_of_mass = com, # ttbar_xsection = ttbar_xsection, # luminosity = luminosity, # load_fakes = True, # visiblePS = visiblePS, # ) # h_truth_madgraphMLM, _, _, _ = get_unfold_histogram_tuple( - # inputfile = file_for_madgraphMLM, + # inputfile = unfolding_files['file_for_madgraphMLM'], # variable = variable, # channel = channel, - # com = com, + # centre_of_mass = com, # ttbar_xsection = ttbar_xsection, # luminosity = luminosity, # load_fakes = True, # visiblePS = visiblePS, # ) h_truth_powheg_herwig, _, _, _ = get_unfold_histogram_tuple( - inputfile = file_for_powheg_herwig, + inputfile = unfolding_files['file_for_powheg_herwig'], variable = variable, channel = channel, - com = com, + centre_of_mass = com, ttbar_xsection = ttbar_xsection, luminosity = luminosity, load_fakes = True, @@ -380,7 +373,7 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, normalisation_unfolded['massup'] = hist_to_value_error_tuplelist( h_truth_massup ) normalisation_unfolded['isrdown'] = hist_to_value_error_tuplelist( h_truth_isrdown ) normalisation_unfolded['isrup'] = hist_to_value_error_tuplelist( h_truth_isrup ) - normalisation_unfolded['fsrdown'] = hist_to_value_error_tuplelist( h_truth_fsrdown ) + # normalisation_unfolded['fsrdown'] = hist_to_value_error_tuplelist( h_truth_fsrdown ) normalisation_unfolded['fsrup'] = hist_to_value_error_tuplelist( h_truth_fsrup ) normalisation_unfolded['uedown'] = hist_to_value_error_tuplelist( h_truth_uedown ) normalisation_unfolded['ueup'] = hist_to_value_error_tuplelist( h_truth_ueup ) @@ -460,11 +453,11 @@ def calculate_xsections( normalisation, category, channel ): luminosity, branching_ratio ) - xsection_unfolded['fsrdown'] = calculate_xsection( - normalisation['fsrdown'], - luminosity, - branching_ratio - ) + # xsection_unfolded['fsrdown'] = calculate_xsection( + # normalisation['fsrdown'], + # luminosity, + # branching_ratio + # ) xsection_unfolded['fsrup'] = calculate_xsection( normalisation['fsrup'], luminosity, @@ -547,8 +540,8 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ binWidths[variable], normalise_to_one, ) - normalised_xsection['isrup'] = calculate_normalised_xsection( - normalisation['isrup'], + normalised_xsection['isrdown'] = calculate_normalised_xsection( + normalisation['isrdown'], binWidths[variable], normalise_to_one, ) @@ -557,18 +550,18 @@ def calculate_normalised_xsections( normalisation, category, channel, normalise_ binWidths[variable], normalise_to_one, ) + # normalised_xsection['fsrdown'] = calculate_normalised_xsection( + # normalisation['fsrdown'], + # binWidths[variable], + # normalise_to_one, + # ) normalised_xsection['fsrup'] = calculate_normalised_xsection( normalisation['fsrup'], binWidths[variable], normalise_to_one, ) - normalised_xsection['fsrup'] = calculate_normalised_xsection( - normalisation['fsrup'], - binWidths[variable], - normalise_to_one, - ) - normalised_xsection['ueup'] = calculate_normalised_xsection( - normalisation['ueup'], + normalised_xsection['uedown'] = calculate_normalised_xsection( + normalisation['uedown'], binWidths[variable], normalise_to_one, ) @@ -647,7 +640,7 @@ def parse_arguments(): unfolding_files = get_unfolding_files(measurement_config) path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}/'.format( path = args.path, - com = measurement_config.com_energy, + com = com, variable = variable, phase_space = phase_space, ) @@ -667,7 +660,7 @@ def parse_arguments(): continue if ( variable in measurement_config.variables_no_met ) and (category in measurement_config.met_specific_systematics): continue - print 'Unfolding category "%s"' % category + print 'Unfolding category {}'.format(category) # read normalisation results from JSON electron_file = path_to_DF + '/' + category + '/normalisation_electron.txt' @@ -706,7 +699,7 @@ def parse_arguments(): # normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] TTJet_normalisation_results_muon = normalisation_results_muon['TTJet'] - TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] + # TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] # # get unfolded normalisations and xsections unfolded_normalisation_electron = {} @@ -743,19 +736,19 @@ def parse_arguments(): calculate_normalised_xsections( unfolded_normalisation_muon, category, channel ) calculate_normalised_xsections( unfolded_normalisation_muon, category, channel , True ) - # # Results where the channels are combined before unfolding (the 'combined in the response matrix') - # channel = 'combinedBeforeUnfolding' - # unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( - # TTJet_normalisation_results_combined, - # category, - # 'combined', - # tau_value=tau_value_combined, - # visiblePS=visiblePS, - # ) - # # measure xsection - # calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) + # # # Results where the channels are combined before unfolding (the 'combined in the response matrix') + # # channel = 'combinedBeforeUnfolding' + # # unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( + # # TTJet_normalisation_results_combined, + # # category, + # # 'combined', + # # tau_value=tau_value_combined, + # # visiblePS=visiblePS, + # # ) + # # # measure xsection + # # calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + # # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + # # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) # Results where the channels are combined after unfolding channel = 'combined' From e630d35e187a167b4841ca985f37e65f2d2b7557 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Tue, 10 Jan 2017 14:35:05 +0000 Subject: [PATCH 77/90] Remove TESTNING path --- .../xsection/01_get_ttjet_normalisation.py | 3 +-- .../xsection/03_calculate_systematics.py | 17 ++++++++++++----- .../xsection/04_make_plots_matplotlib.py | 4 ++-- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/dps/analysis/xsection/01_get_ttjet_normalisation.py b/dps/analysis/xsection/01_get_ttjet_normalisation.py index 79d1b8bf..b58acf92 100644 --- a/dps/analysis/xsection/01_get_ttjet_normalisation.py +++ b/dps/analysis/xsection/01_get_ttjet_normalisation.py @@ -20,10 +20,9 @@ def main(): # config file template input_template = 'config/measurements/background_subtraction/{com}TeV/{ch}/{var}/{ps}/' + ps = 'FullPS' if args.visiblePS: ps = 'VisiblePS' - else: - ps = 'FullPS' for ch in ['electron', 'muon']: for var in measurement_config.variables: diff --git a/dps/analysis/xsection/03_calculate_systematics.py b/dps/analysis/xsection/03_calculate_systematics.py index a4972253..3d9b5eae 100644 --- a/dps/analysis/xsection/03_calculate_systematics.py +++ b/dps/analysis/xsection/03_calculate_systematics.py @@ -76,7 +76,7 @@ def parse_arguments(): if not visiblePS: phase_space = 'FullPS' - path_to_DF = 'TESTING/{path}/{com}TeV/{variable}/{phase_space}' + path_to_DF = '{path}/{com}TeV/{variable}/{phase_space}' path_to_DF = path_to_DF.format( path = args.path, com = args.CoM, @@ -108,12 +108,19 @@ def parse_arguments(): list_of_systematics = all_systematics # If you want different lists of systematics can just do some manipulation here - # for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - for channel in ['muon']: - print("Calculating {0} channel systematic uncertainties : ".format(channel)) + print(list_of_systematics) + + channel = [ + 'electron', + 'muon', + 'combined', + # 'combinedBeforeUnfolding', + ] + for ch in channel: + print("Calculating {0} channel systematic uncertainties : ".format(ch)) # Add channel specific args to list of args - args['channel'] = channel + args['channel'] = ch # Retreive the normalised cross sections, for all groups in list_of_systematics. systematic_normalised_uncertainty, unfolded_systematic_normalised_uncertainty = get_normalised_cross_sections( diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index d975f2b7..682fed66 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -41,7 +41,7 @@ def read_xsection_measurement_results( category, channel ): ''' global path_to_DF, variable, phase_space, method - file_template = 'TESTING/{path}/{category}/{name}_{channel}_{method}{suffix}.txt' + file_template = '{path}/{category}/{name}_{channel}_{method}{suffix}.txt' filename = file_template.format( path = path_to_DF, category = category, @@ -773,7 +773,7 @@ def parse_arguments(): if variable in measurement_config.variables_no_met and category in measurement_config.met_specific_systematics: continue # Read the xsection results from dataframe - histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts = read_xsection_measurement_results( category, channel ) + histograms_normalised_xsection_different_generators, histograms_normalised_xsection_systematics_shifts = read_xsection_measurement_results( category, ch ) histname = '{variable}_normalised_xsection_{ch}_{phase_space}_{method}' histname = histname.format( From cbe64d4a7f645a0be9faf472c6c54fddf08ce924 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:34:54 +0000 Subject: [PATCH 78/90] update x_0nb_ files --- bin/x_04b_all_vars | 3 ++- bin/x_05b_all_vars | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/bin/x_04b_all_vars b/bin/x_04b_all_vars index aa8a8380..bdf2158c 100755 --- a/bin/x_04b_all_vars +++ b/bin/x_04b_all_vars @@ -15,7 +15,8 @@ i=0 echo "Visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Plotting diff. x-section for distribution: $var" - nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py --visiblePS --show-generator-ratio -v $var -c 13 -p data/normalisation/background_subtraction/ -o plots/background_subtraction &> logs/04_${var}_plot_Vis_13TeV.log & # -a <--add this option for additional plots + # nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py --visiblePS --show-generator-ratio -v $var -c 13 -p data/normalisation/background_subtraction/ -o plots/background_subtraction &> logs/04_${var}_plot_Vis_13TeV.log & # -a <--add this option for additional plots + nohup time python dps/analysis/xsection/04_make_plots_matplotlib.py --visiblePS -v $var -c 13 -p data/normalisation/background_subtraction/ -o plots/background_subtraction &> logs/04_${var}_plot_Vis_13TeV.log & # -a <--add this option for additional plots let i+=1 shallIwait $i $N_JOBS done diff --git a/bin/x_05b_all_vars b/bin/x_05b_all_vars index 1e5b5875..4e9f9ae5 100755 --- a/bin/x_05b_all_vars +++ b/bin/x_05b_all_vars @@ -20,7 +20,7 @@ echo "Now visible phase space" for var in MET HT ST WPT lepton_pt abs_lepton_eta NJets; do echo "Tabulating diff. x-section for distribution: $var" - nohup time python dps/analysis/xsection/05_make_tables.py -v $var -c 13 -p data/normalisation/background_subtraction/ -a --visiblePS -o tables/background_subtraction &> logs/05_${var}_table_13TeV.log & + nohup time python dps/analysis/xsection/05_make_systematic_plots.py -v $var --visiblePS &> logs/05_${var}_table_13TeV.log & let i+=1 if (( $i % N_JOBS == 0 )) then From 9d89dd74795676a934cf88bfc4166a3f51742ce8 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:37:35 +0000 Subject: [PATCH 79/90] Add alphaS and matching uncertainties --- .../BLTUnfold/produceUnfoldingHistograms.py | 29 ++++++++++ dps/analysis/BLTUnfold/runJobsCrab.py | 9 +++- .../BLTUnfold/submitBLTUnfold.description | 2 +- .../xsection/02_unfold_and_measure.py | 54 +++++++++++-------- .../xsection/03_calculate_systematics.py | 2 - dps/config/xsection.py | 53 ++++++------------ dps/utils/systematic.py | 10 ++-- 7 files changed, 90 insertions(+), 69 deletions(-) diff --git a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py index d6a83e4b..3bfd4b87 100644 --- a/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py +++ b/dps/analysis/BLTUnfold/produceUnfoldingHistograms.py @@ -152,6 +152,11 @@ def parse_arguments(): dest='alphaSWeight', default=-1 ) + parser.add_argument('--matchingWeight', + type=int, + dest='matchingWeight', + default=-1 + ) parser.add_argument('--nGeneratorWeights', type=int, dest='nGeneratorWeights', @@ -198,6 +203,8 @@ def main(): pdfWeight = args.pdfWeight muFmuRWeight = args.muFmuRWeight + alphaSWeight = args.alphaSWeight + matchingWeight = options.matchingWeight # Output file name outputFileName = 'crap.root' @@ -225,6 +232,18 @@ def main(): outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_05muR1muF.root' % ( energySuffix ) elif muFmuRWeight == 8: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_05muR05muF.root' % ( energySuffix ) + + elif matchingWeight == 9: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_matching_down.root' % ( energySuffix ) + elif matchingWeight == 18: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_matching_up.root' % ( energySuffix ) + elif matchingWeight >= 0: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_matchingWeight_%i.root' % ( energySuffix, matchingWeight ) + + elif alphaSWeight == 0: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaS_down.root' % ( energySuffix ) + elif alphaSWeight == 1: + outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_alphaS_up.root' % ( energySuffix ) elif pdfWeight >= 0 and pdfWeight <= 99: outputFileName = outputFileDir+'/unfolding_TTJets_%s_asymmetric_pdfWeight_%i.root' % ( energySuffix, pdfWeight ) elif args.sample != 'central': @@ -455,6 +474,16 @@ def main(): offlineWeight *= branch('muFmuRWeight_%i' % muFmuRWeight) pass + if alphaSWeight == 0 or alphaSWeight == 1: + genWeight *= branch('alphaSWeight_%i' % alphaSWeight) + offlineWeight *= branch('alphaSWeight_%i' % alphaSWeight) + pass + + if matchingWeight >= 0: + genWeight *= branch('matchingWeight_%i' % matchingWeight) + offlineWeight *= branch('matchingWeight_%i' % matchingWeight) + pass + if args.applyTopPtReweighting != 0: ptWeight = calculateTopPtWeight( branch('lepTopPt_parton'), branch('hadTopPt_parton'), args.applyTopPtReweighting) offlineWeight *= ptWeight diff --git a/dps/analysis/BLTUnfold/runJobsCrab.py b/dps/analysis/BLTUnfold/runJobsCrab.py index 28b89215..08e87821 100755 --- a/dps/analysis/BLTUnfold/runJobsCrab.py +++ b/dps/analysis/BLTUnfold/runJobsCrab.py @@ -38,7 +38,14 @@ '--centreOfMassEnergy 13 --muFmuRWeight 6', '--centreOfMassEnergy 13 --muFmuRWeight 8', - # # # Top mass + '--centreOfMassEnergy 13 --alphaSWeight 0', + '--centreOfMassEnergy 13 --alphaSWeight 1', + + # ME-PS matching weight + '--centreOfMassEnergy 13 --matchingWeight 9', + '--centreOfMassEnergy 13 --matchingWeight 18', + + # # Top mass '--centreOfMassEnergy 13 -s massup', '--centreOfMassEnergy 13 -s massdown', diff --git a/dps/analysis/BLTUnfold/submitBLTUnfold.description b/dps/analysis/BLTUnfold/submitBLTUnfold.description index ecde298e..1ed76567 100644 --- a/dps/analysis/BLTUnfold/submitBLTUnfold.description +++ b/dps/analysis/BLTUnfold/submitBLTUnfold.description @@ -15,4 +15,4 @@ request_memory=500 # use the ENV that is provided getenv = true -queue 138 +queue 142 diff --git a/dps/analysis/xsection/02_unfold_and_measure.py b/dps/analysis/xsection/02_unfold_and_measure.py index cf1a1650..7cc5df44 100644 --- a/dps/analysis/xsection/02_unfold_and_measure.py +++ b/dps/analysis/xsection/02_unfold_and_measure.py @@ -14,7 +14,8 @@ value_error_tuplelist_to_hist from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes from dps.utils.ROOT_utils import set_root_defaults -from dps.utils.pandas_utilities import read_tuple_from_file, write_tuple_to_df +from dps.utils.pandas_utilities import read_tuple_from_file, write_tuple_to_df, combine_complex_df + from copy import deepcopy def get_unfolding_files(measurement_config): @@ -35,8 +36,11 @@ def get_unfolding_files(measurement_config): unfolding_files['file_for_factorisationup'] = File( measurement_config.unfolding_factorisation_up, 'read' ) unfolding_files['file_for_combineddown'] = File( measurement_config.unfolding_combined_down, 'read' ) unfolding_files['file_for_combinedup'] = File( measurement_config.unfolding_combined_up, 'read' ) - # unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) - # unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) + unfolding_files['file_for_alphaSdown'] = File( measurement_config.unfolding_alphaS_down, 'read' ) + unfolding_files['file_for_alphaSup'] = File( measurement_config.unfolding_alphaS_up, 'read' ) + + unfolding_files['file_for_matchingdown'] = File( measurement_config.unfolding_matching_down, 'read' ) + unfolding_files['file_for_matchingup'] = File( measurement_config.unfolding_matching_up, 'read' ) unfolding_files['file_for_isrdown'] = File( measurement_config.unfolding_isr_down, 'read' ) unfolding_files['file_for_isrup'] = File( measurement_config.unfolding_isr_up, 'read' ) @@ -45,6 +49,8 @@ def get_unfolding_files(measurement_config): unfolding_files['file_for_uedown'] = File( measurement_config.unfolding_ue_down, 'read' ) unfolding_files['file_for_ueup'] = File( measurement_config.unfolding_ue_up, 'read' ) + unfolding_files['file_for_topPtSystematic'] = File( measurement_config.unfolding_topPtSystematic, 'read' ) + unfolding_files['file_for_massdown'] = File( measurement_config.unfolding_mass_down, 'read' ) unfolding_files['file_for_massup'] = File( measurement_config.unfolding_mass_up, 'read' ) @@ -132,8 +138,11 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'TTJets_renormalisationup' : unfolding_files['file_for_renormalisationup'], 'TTJets_combineddown' : unfolding_files['file_for_combineddown'], 'TTJets_combinedup' : unfolding_files['file_for_combinedup'], - # 'TTJets_alphaSdown' : unfolding_files['file_for_alphaSdown'], - # 'TTJets_alphaSup' : unfolding_files['file_for_alphaSup'], + 'TTJets_alphaSdown' : unfolding_files['file_for_alphaSdown'], + 'TTJets_alphaSup' : unfolding_files['file_for_alphaSup'], + + 'TTJets_matchingdown' : unfolding_files['file_for_matchingdown'], + 'TTJets_matchingup' : unfolding_files['file_for_matchingup'], 'TTJets_isrdown' : unfolding_files['file_for_isrdown'], 'TTJets_isrup' : unfolding_files['file_for_isrup'], @@ -142,6 +151,8 @@ def get_unfolded_normalisation( TTJet_normalisation_results, category, channel, 'TTJets_uedown' : unfolding_files['file_for_uedown'], 'TTJets_ueup' : unfolding_files['file_for_ueup'], + 'TTJets_topPt' : unfolding_files['file_for_topPtSystematic'], + 'JES_down' : unfolding_files['file_for_jesdown'], 'JES_up' : unfolding_files['file_for_jesup'], @@ -685,21 +696,18 @@ def parse_arguments(): if category == 'Muon_up' or category == 'Muon_down': normalisation_results_electron = read_tuple_from_file( path_to_DF + '/central/normalisation_electron.txt' ) normalisation_results_muon = read_tuple_from_file( muon_file ) - # normalisation_results_combined = read_tuple_from_file( combined_file ) elif category == 'Electron_up' or category == 'Electron_down': normalisation_results_electron = read_tuple_from_file( electron_file ) normalisation_results_muon = read_tuple_from_file( path_to_DF + '/central/normalisation_muon.txt' ) - # normalisation_results_combined = read_tuple_from_file( combined_file ) else: normalisation_results_electron = read_tuple_from_file( electron_file ) normalisation_results_muon = read_tuple_from_file( muon_file ) - # normalisation_results_combined = read_tuple_from_file( combined_file ) # Combine the normalisations (beforeUnfolding) - # normalisation_results_combined = combine_complex_results(normalisation_results_electron, normalisation_results_muon) + normalisation_results_combined = combine_complex_df(normalisation_results_electron, normalisation_results_muon) TTJet_normalisation_results_electron = normalisation_results_electron['TTJet'] TTJet_normalisation_results_muon = normalisation_results_muon['TTJet'] - # TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] + TTJet_normalisation_results_combined = normalisation_results_combined['TTJet'] # # get unfolded normalisations and xsections unfolded_normalisation_electron = {} @@ -736,19 +744,19 @@ def parse_arguments(): calculate_normalised_xsections( unfolded_normalisation_muon, category, channel ) calculate_normalised_xsections( unfolded_normalisation_muon, category, channel , True ) - # # # Results where the channels are combined before unfolding (the 'combined in the response matrix') - # # channel = 'combinedBeforeUnfolding' - # # unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( - # # TTJet_normalisation_results_combined, - # # category, - # # 'combined', - # # tau_value=tau_value_combined, - # # visiblePS=visiblePS, - # # ) - # # # measure xsection - # # calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - # # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) - # # calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) + # Results where the channels are combined before unfolding (the 'combined in the response matrix') + channel = 'combinedBeforeUnfolding' + unfolded_normalisation_combinedBeforeUnfolding = get_unfolded_normalisation( + TTJet_normalisation_results_combined, + category, + 'combined', + tau_value=tau_value_combined, + visiblePS=visiblePS, + ) + # measure xsection + calculate_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel ) + calculate_normalised_xsections( unfolded_normalisation_combinedBeforeUnfolding, category, channel , True ) # Results where the channels are combined after unfolding channel = 'combined' diff --git a/dps/analysis/xsection/03_calculate_systematics.py b/dps/analysis/xsection/03_calculate_systematics.py index 3d9b5eae..98dbd30b 100644 --- a/dps/analysis/xsection/03_calculate_systematics.py +++ b/dps/analysis/xsection/03_calculate_systematics.py @@ -108,8 +108,6 @@ def parse_arguments(): list_of_systematics = all_systematics # If you want different lists of systematics can just do some manipulation here - print(list_of_systematics) - channel = [ 'electron', 'muon', diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 459cea89..161acbe1 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -248,13 +248,18 @@ def __fill_defaults__( self ): self.generator_systematics = [ 'TTJets_massup', 'TTJets_massdown', + 'TTJets_alphaSup', + 'TTJets_alphaSdown', 'TTJets_hadronisation', + 'TTJets_topPt', 'TTJets_factorisationup', 'TTJets_factorisationdown', 'TTJets_renormalisationup', 'TTJets_renormalisationdown', 'TTJets_combinedup', 'TTJets_combineddown', + 'TTJets_matchingup', + 'TTJets_matchingdown', 'TTJets_fsrup', 'TTJets_fsrdown', 'TTJets_isrup', @@ -277,13 +282,16 @@ def __fill_defaults__( self ): 'TTJets_mass' : ['TTJets_massup', 'TTJets_massdown'], 'TTJets_hadronisation' : ['TTJets_hadronisation', 'TTJets_hadronisation'], 'TTJets_ue' : ['TTJets_ueup', 'TTJets_uedown'], - + 'TTJets_topPt' : ['TTJets_topPt', 'TTJets_topPt'], 'TTJets_envelope' : ['TTJets_factorisationup', 'TTJets_factorisationdown', 'TTJets_renormalisationup', 'TTJets_renormalisationdown', 'TTJets_combinedup', 'TTJets_combineddown', 'TTJets_fsrup', 'TTJets_fsrdown', 'TTJets_isrup', 'TTJets_isrdown'], + 'TTJets_alphaS' : ['TTJets_alphaSup', 'TTJets_alphaSdown'], + 'TTJets_matching' : ['TTJets_matchingup', 'TTJets_matchingdown'], + # Event Reweighting 'PileUp' : ['PileUp_up', 'PileUp_down'], 'JES' : ['JES_up', 'JES_down'], @@ -301,47 +309,13 @@ def __fill_defaults__( self ): 'TauEn' : ['TauEnUp', 'TauEnDown'], 'UnclusteredEn' : ['UnclusteredEnUp', 'UnclusteredEnDown'], # Top Reweighting Uncertainties - 'Top_pt_reweight' : ['Top_pt_reweight', 'Top_pt_reweight'], + # 'Top_pt_reweight' : ['Top_pt_reweight', 'Top_pt_reweight'], # 'Top_eta_reweight' : ['Top_eta_reweight_up', 'Top_eta_reweight_down'], } # now fill in the centre of mass dependent values self.__fill_defaults_13TeV__() - # self.rate_changing_systematics_values = {} - # for systematic in self.rate_changing_systematics.keys(): - # affected_samples = XSectionConfig.samples # all samples - # if 'SingleTop' in systematic: - # affected_samples = ['SingleTop'] - # if 'TTJet' in systematic: - # affected_samples = ['TTJet'] - # if 'VJets' in systematic: - # affected_samples = ['V+Jets'] - # if 'QCD' in systematic: - # affected_samples = ['QCD'] - - # sp = dps.utils.measurement.Systematic( - # systematic + '+', - # # systematic + '_up', - # stype = dps.utils.measurement.Systematic.RATE, - # affected_samples = affected_samples, - # scale = 1 + self.rate_changing_systematics[systematic], - # ) - # scale = 1 - self.rate_changing_systematics[systematic] - # if scale <= 0: scale = 10e-5 - - # sm = dps.utils.measurement.Systematic( - # systematic + '-', - # # systematic + '_down', - # stype = dps.utils.measurement.Systematic.RATE, - # affected_samples = affected_samples, - # scale = scale, - # ) - # self.rate_changing_systematics_values[sp.name] = sp - # self.rate_changing_systematics_values[sm.name] = sm - - # self.rate_changing_systematics_names = self.rate_changing_systematics_values.keys() - self.topMass_systematics = [ 'TTJets_massup', 'TTJets_massdown'] self.topMasses = [ @@ -449,7 +423,12 @@ def __fill_defaults__( self ): self.unfolding_isr_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_isrup_asymmetric.root' % self.centre_of_mass_energy self.unfolding_ue_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_uedown_asymmetric.root' % self.centre_of_mass_energy self.unfolding_ue_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_ueup_asymmetric.root' % self.centre_of_mass_energy - + self.unfolding_topPtSystematic = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_topPtSystematic_asymmetric.root' % self.centre_of_mass_energy + self.unfolding_alphaS_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_alphaS_down.root' % self.centre_of_mass_energy + self.unfolding_alphaS_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_alphaS_up.root' % self.centre_of_mass_energy + self.unfolding_matching_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_matching_down.root' % self.centre_of_mass_energy + self.unfolding_matching_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_asymmetric_matching_up.root' % self.centre_of_mass_energy + self.unfolding_mass_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_massdown_asymmetric.root' % self.centre_of_mass_energy self.unfolding_mass_up = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_massup_asymmetric.root' % self.centre_of_mass_energy self.unfolding_Lepton_down = path_to_unfolding_histograms + 'unfolding_TTJets_%dTeV_leptondown_asymmetric.root' % self.centre_of_mass_energy diff --git a/dps/utils/systematic.py b/dps/utils/systematic.py index 89aaebe1..89c1c9b7 100644 --- a/dps/utils/systematic.py +++ b/dps/utils/systematic.py @@ -346,15 +346,15 @@ def get_symmetrised_systematic_uncertainty(options, norm_syst_unc_x_secs ): normalised_x_sections_with_symmetrised_systematics['BJet'][0] = bJet_tot # Combine PDF with alphaS variations - # alphaS = normalised_x_sections_with_symmetrised_systematics['TTJets_alphaS'][0] - # pdf = normalised_x_sections_with_symmetrised_systematics['PDF'][0] - # pdf_tot = [combine_errors_in_quadrature([e1, e2]) for e1, e2 in zip(alphaS, pdf)] - # normalised_x_sections_with_symmetrised_systematics['PDF'][0] = pdf_tot + alphaS = normalised_x_sections_with_symmetrised_systematics['TTJets_alphaS'][0] + pdf = normalised_x_sections_with_symmetrised_systematics['PDF'][0] + pdf_tot = [combine_errors_in_quadrature([e1, e2]) for e1, e2 in zip(alphaS, pdf)] + normalised_x_sections_with_symmetrised_systematics['PDF'][0] = pdf_tot # TODO combine the signs.... # Now alphaS is combined with pdfs dont need it in dictionary anymore. nor LightJet del normalised_x_sections_with_symmetrised_systematics['LightJet'] - # del normalised_x_sections_with_symmetrised_systematics['TTJets_alphaS'] + del normalised_x_sections_with_symmetrised_systematics['TTJets_alphaS'] return normalised_x_sections_with_symmetrised_systematics From 1615b8d6e2d3985f7bfadf6eebf46ee9b6a3789a Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:38:43 +0000 Subject: [PATCH 80/90] Fix rate uncertainties. Change QCD rate now only applies to QCD sample etc.. --- dps/analysis/unfolding_tests/00_makeConfig.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/dps/analysis/unfolding_tests/00_makeConfig.py b/dps/analysis/unfolding_tests/00_makeConfig.py index e1e10459..852cd3e7 100644 --- a/dps/analysis/unfolding_tests/00_makeConfig.py +++ b/dps/analysis/unfolding_tests/00_makeConfig.py @@ -3,11 +3,9 @@ from dps.utils.file_utilities import make_folder_if_not_exists com = 13 -fitVars = "M3_angle_bl" - config = XSectionConfig( com ) -make_folder_if_not_exists('config/unfolding/FullPS/') +# make_folder_if_not_exists('config/unfolding/FullPS/') make_folder_if_not_exists('config/unfolding/VisiblePS/') for channel in config.analysis_types.keys(): From 1c54bf32e73efc41bfb7e3422e79611d9fb20696 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:39:13 +0000 Subject: [PATCH 81/90] Fix x axis labels on multiple subplots --- dps/analysis/xsection/04_make_plots_matplotlib.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/dps/analysis/xsection/04_make_plots_matplotlib.py b/dps/analysis/xsection/04_make_plots_matplotlib.py index 682fed66..30efc8d4 100644 --- a/dps/analysis/xsection/04_make_plots_matplotlib.py +++ b/dps/analysis/xsection/04_make_plots_matplotlib.py @@ -325,8 +325,8 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = Fals # Title and CMS labels # note: fontweight/weight does not change anything as we use Latex text!!! - plt.title( label,loc='right', **CMS.title ) label, channel_label = get_cms_labels( channel ) + plt.title( label,loc='right', **CMS.title ) # Locations of labels logo_location = (0.05, 0.98) prelim_location = (0.05, 0.92) @@ -358,7 +358,7 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = Fals ) # channel text plt.text(channel_location[0], channel_location[1], - r"\emph{{cl}}".format(cl=channel_label), + r"\emph{%s}"%channel_label, transform=axes.transAxes, fontsize=40, verticalalignment='top', @@ -394,7 +394,9 @@ def make_plots( histograms, category, output_folder, histname, show_ratio = Fals x_label = '${}$'.format(variables_latex[variable]) if variable in ['HT', 'ST', 'MET', 'WPT']: x_label += ' [GeV]' - plt.xlabel( x_label, CMS.x_axis_title ) + + if not show_generator_ratio: + plt.xlabel( x_label, CMS.x_axis_title ) y_label = '$\\frac{\\textrm{pred.}}{\\textrm{data}}$' plt.ylabel( y_label, CMS.y_axis_title ) From 6852439aab5fe5c651c93d898abf1c7794161d3b Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:39:43 +0000 Subject: [PATCH 82/90] Temporarily increase storage precision while i figure out what going wrong --- dps/utils/pandas_utilities.py | 39 +++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/dps/utils/pandas_utilities.py b/dps/utils/pandas_utilities.py index dfa70f85..416d8b65 100644 --- a/dps/utils/pandas_utilities.py +++ b/dps/utils/pandas_utilities.py @@ -5,6 +5,7 @@ pd.set_option('display.max_colwidth', 4096) pd.set_option('display.max_rows', 50) pd.set_option('display.width', 1000) +pd.set_option('precision',12) def dict_to_df(d): ''' @@ -73,7 +74,7 @@ def divide_by_series(s1, s2): def tupleise_cols(vals, errs): ''' - tupleising two cols in pandas + tupleising two cols ''' vals_errs = [ (v, e) for v,e in zip(vals, errs)] return vals_errs @@ -133,4 +134,38 @@ def read_tuple_from_file( filename ): errs = df[sample+'_Unc'] df[sample] = tupleise_cols(vals, errs) del df[sample+'_Unc'] - return df \ No newline at end of file + return df + +def combine_complex_df( df1, df2 ): + ''' + Takes a 2 pandii dataframes of the form: + A | B A | B + (v,e) | (v,e) (v,e) | (v,e) + + Returns 1 pandas dataframe of the form + A | B + (v,e) | (v,e) + ''' + from uncertainties import ufloat + l1=df1.columns.tolist() + l2=df2.columns.tolist() + if l1 != l2: + print "Trying to combine two non compatible dataframes" + print l1 + print l2 + return + + combined_result = {} + for sample in l1: + results = [] + for entry1, entry2 in zip(df1[sample], df2[sample]): + v1 = ufloat(entry1[0], entry1[1]) + v2 = ufloat(entry2[0], entry2[1]) + s = v1 + v2 + results.append( ( s.nominal_value, s.std_dev ) ) + combined_result[sample] = results + df = dict_to_df(combined_result) + return df + + + From a9455608f52a1458dcea83cba7f42d3bcb65fb8a Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:40:14 +0000 Subject: [PATCH 83/90] More Colours, Plot Cosmetics --- .../xsection/05_make_systematic_plots.py | 65 ++++++++++--------- 1 file changed, 35 insertions(+), 30 deletions(-) diff --git a/dps/analysis/xsection/05_make_systematic_plots.py b/dps/analysis/xsection/05_make_systematic_plots.py index 8dee2248..665ddb07 100644 --- a/dps/analysis/xsection/05_make_systematic_plots.py +++ b/dps/analysis/xsection/05_make_systematic_plots.py @@ -16,8 +16,8 @@ from matplotlib import rc from operator import itemgetter -rc( 'font', **CMS.font ) -rc( 'text', usetex = False ) +# rc( 'font', **CMS.font ) +# rc( 'text', usetex = False ) def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, output_folder, subcategories = [], subname = '', plot_largest = False): ''' @@ -27,7 +27,9 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, if not subcategories: subcategories = systematic_uncertainties.keys() x_limits = [bin_edges[0], bin_edges[-1]] - y_limits = [-0.6,0.6] + # y_limits = [-0.6,0.6] + y_limits = [0,0.4] + fig_syst = plt.figure( figsize = ( 20, 16 ), dpi = 400, facecolor = 'white' ) ax_syst = fig_syst.add_subplot(1, 1, 1) ax_syst.minorticks_on() @@ -74,8 +76,10 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, plt.tick_params( **CMS.axis_label_major ) plt.tick_params( **CMS.axis_label_minor ) - colours = ['red', 'blue', 'green', 'chartreuse', 'indigo', 'magenta', 'darkmagenta', 'hotpink', 'cyan', 'darkred', 'darkgoldenrod', 'mediumvioletred', 'mediumspringgreen', 'gold', 'darkgoldenrod', 'slategray', 'dodgerblue', 'cadetblue', 'darkblue', 'seagreen', 'deeppink' ] - + colours = ['red', 'blue', 'green', 'chartreuse', 'indigo', 'magenta', 'darkmagenta', 'hotpink', 'cyan', 'darkred', 'darkgoldenrod', 'mediumvioletred', 'mediumspringgreen', 'gold', 'darkgoldenrod', 'slategray', 'dodgerblue', 'cadetblue', 'darkblue', 'seagreen', 'deeppink', 'deepskyblue' ] + # if len(colours) < len(error_hists.keys()): + # print '---> Need to add more colours!!!' + for error_hists in [error_hists_up, error_hists_down]: for i, source, in enumerate(error_hists.keys()): hist = error_hists[source] @@ -92,7 +96,8 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, else: rplt.hist( hist, stacked=False, label = '' ) - leg = plt.legend(loc='lower right',prop={'size':20},ncol=4) + leg = plt.legend(loc='upper right',prop={'size':25},ncol=3) + # leg = plt.legend(loc='upper right',prop={'size':20},ncol=4) leg.draw_frame(False) x_title = variables_NonLatex[variable] @@ -111,29 +116,29 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, logo_location = (0.05, 0.98) prelim_location = (0.05, 0.92) channel_location = ( 0.05, 0.86) - plt.text(logo_location[0], logo_location[1], - r"\textbf{CMS}", - transform=ax_syst.transAxes, - fontsize=42, - verticalalignment='top', - horizontalalignment='left' - ) - # preliminary - plt.text(prelim_location[0], prelim_location[1], - r"\emph{Preliminary}", - transform=ax_syst.transAxes, - fontsize=42, - verticalalignment='top', - horizontalalignment='left' - ) - # channel text - plt.text(channel_location[0], channel_location[1], - r"\emph{%s}" %channel, - transform=ax_syst.transAxes, - fontsize=40, - verticalalignment='top', - horizontalalignment='left' - ) + # plt.text(logo_location[0], logo_location[1], + # "CMS", + # transform=ax_syst.transAxes, + # fontsize=42, + # verticalalignment='top', + # horizontalalignment='left' + # ) + # # preliminary + # plt.text(prelim_location[0], prelim_location[1], + # r"\emph{Preliminary}", + # transform=ax_syst.transAxes, + # fontsize=42, + # verticalalignment='top', + # horizontalalignment='left' + # ) + # # channel text + # plt.text(channel_location[0], channel_location[1], + # r"\emph{%s}" % channel, + # transform=ax_syst.transAxes, + # fontsize=40, + # verticalalignment='top', + # horizontalalignment='left' + # ) plt.tight_layout() @@ -217,7 +222,7 @@ def plot_systematic_uncertainties(systematic_uncertainties, bin_edges, variable, for channel in ['electron', 'muon', 'combined', 'combinedBeforeUnfolding']: - # if channel != 'combined':continue + if channel != 'combined':continue input_file = '{basepath}/{com}TeV/{var}/{ps}/central/xsection_normalised_{channel}_{method}_summary_relative.txt'.format( basepath = path, com = com, From 718c4ab963ae0a68c6d312d6139f05009bde22a8 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:40:47 +0000 Subject: [PATCH 84/90] Remove TTbar new tune generator for spring16 MC --- dps/experimental/DougsBTagEff/makeBTagEfficiencies.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py index e2cad743..9b8c674d 100644 --- a/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py +++ b/dps/experimental/DougsBTagEff/makeBTagEfficiencies.py @@ -29,7 +29,6 @@ 1 : ["TTJets_powhegHerwigpp_tree.root" , "PowhegHerwigpp"], 2 : ["TTJets_amc_tree.root" , "aMCatNLOPythia8"], 3 : ["TTJets_madgraph_tree.root" , "Madgraph"], - 4 : ["TTJets_PowhegPythia8_Moriond17_tree.root" , "PowhegPythia8_Moriond17"] # 4 : ["TTJets_amcatnloHerwigpp_tree.root" , "aMCatNLOHerwigpp"], } @@ -62,9 +61,6 @@ in_file = input_files[key][0] sample = input_files[key][1] input_file = basepath+in_file - if key == 4: - input_file = "/hdfs/TopQuarkGroup/run2/atOutput/13TeV/2016/TTJets_PowhegPythia8_Moriond17_tree.root" - print "Generator : ", sample directory = out_file.mkdir( sample ) From a79d3b258012bedff976bedc3a90b9e38d985b34 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:41:25 +0000 Subject: [PATCH 85/90] pdate tau finderto read from dataframes --- dps/analysis/unfolding_tests/01_getBestTau.py | 6 ++++-- .../unfolding_tests/01_get_best_regularisation_TUnfold.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/dps/analysis/unfolding_tests/01_getBestTau.py b/dps/analysis/unfolding_tests/01_getBestTau.py index 62fd4324..216b4b87 100644 --- a/dps/analysis/unfolding_tests/01_getBestTau.py +++ b/dps/analysis/unfolding_tests/01_getBestTau.py @@ -17,7 +17,7 @@ usage: python getBestTau.py config.json # for 13 TeV in the visible phase space : - python dps/analysis/unfolding_tests/getBestTau.py config/unfolding/VisiblePS/*.json -n 100 -t 0.005 --refold_plots --test + python dps/analysis/unfolding_tests/01_getBestTau.py config/unfolding/VisiblePS/*.json -n 100 -t 0.005 --refold_plots --test -n = number of tau points -t = specific tau value --refold_plots = output some comparison plots for every tau (suggest few tau) @@ -43,6 +43,7 @@ # , gen_bin_edges_vis from dps.utils.Unfolding import Unfolding, get_unfold_histogram_tuple, removeFakes from dps.utils.file_utilities import read_data_from_JSON, make_folder_if_not_exists +from dps.utils.pandas_utilities import read_tuple_from_file from dps.utils.hist_utilities import hist_to_value_error_tuplelist, value_error_tuplelist_to_hist import pandas as pd @@ -123,7 +124,7 @@ def __set_unfolding_histograms__( self ): edges = [] edges = reco_bin_edges_vis[self.variable] - json_input = read_data_from_JSON(data_file) + json_input = read_tuple_from_file(data_file) if data_key == "": # JSON file == histogram self.h_data = value_error_tuplelist_to_hist(json_input, edges) @@ -143,6 +144,7 @@ def main(): clear_old_df('tables/taufinding/') for input_values, json_file in zip( input_values_sets, json_input_files ): + if 'combined' in json_file: continue # Initialise the TauFinding class regularisation_settings = TauFinding( input_values ) diff --git a/dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py b/dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py index 0e92dc30..9502c74a 100644 --- a/dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py +++ b/dps/analysis/unfolding_tests/01_get_best_regularisation_TUnfold.py @@ -36,6 +36,7 @@ from dps.config import CMS from ROOT import TGraph, TSpline3, Double, TUnfoldDensity, TUnfold, TDecompSVD, TMatrixD, TCanvas, gROOT from rootpy import asrootpy +from dps.utils.pandas_utilities import read_tuple_from_file rc('font',**CMS.font) rc( 'text', usetex = True ) @@ -107,7 +108,7 @@ def __set_unfolding_histograms__( self ): edges = [] edges = reco_bin_edges_vis[self.variable] - json_input = read_data_from_JSON(data_file) + json_input = read_tuple_from_file(data_file) if data_key == "": # JSON file == histogram self.h_data = value_error_tuplelist_to_hist(json_input, edges) @@ -124,6 +125,7 @@ def main(): results = {} for input_values, json_file in zip( input_values_sets, json_input_files ): print 'Processing', json_file + if 'combined' in json_file: continue regularisation_settings = RegularisationSettings( input_values ) variable = regularisation_settings.variable channel = regularisation_settings.channel @@ -142,7 +144,6 @@ def main(): h_response, fakes = None, method = 'TUnfold', - k_value = -1, tau = 0. ) @@ -266,7 +267,6 @@ def get_best_tau( regularisation_settings ): h_response, fakes = None, method = 'TUnfold', - k_value = -1, tau = -1 ) From 8778e342890424f35977537e7c203e9c4691bc1d Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Fri, 13 Jan 2017 15:42:21 +0000 Subject: [PATCH 86/90] Adding logZ to colorbar on binning plots --- dps/analysis/xsection/make_binning_plots.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/dps/analysis/xsection/make_binning_plots.py b/dps/analysis/xsection/make_binning_plots.py index bb310041..7b78f11b 100644 --- a/dps/analysis/xsection/make_binning_plots.py +++ b/dps/analysis/xsection/make_binning_plots.py @@ -32,6 +32,12 @@ def make_scatter_plot( input_file, histogram, bin_edges, channel, variable, title ): global output_folder, output_formats, options scatter_plot = get_histogram_from_file( histogram, input_file ) + + # Finding max value in scatterplot for colourmap normalisation + max_bin = scatter_plot.GetMaximumBin() + max_bin_content = scatter_plot.GetBinContent(max_bin) + norm = mpl.colors.LogNorm(vmin = 1, vmax = int(max_bin_content+1)) + # scatter_plot.Rebin2D( 5, 5 ) x_limits = [bin_edges[variable][0], bin_edges[variable][-1]] @@ -51,7 +57,9 @@ def make_scatter_plot( input_file, histogram, bin_edges, channel, variable, titl plt.tick_params( **CMS.axis_label_minor ) ax0.xaxis.labelpad = 12 ax0.yaxis.labelpad = 12 - im = rplt.imshow( scatter_plot, axes = ax0, cmap = my_cmap, vmin = 0.001 ) + + im = rplt.imshow( scatter_plot, axes = ax0, cmap = my_cmap, norm=norm ) + colorbar = plt.colorbar( im ) colorbar.ax.tick_params( **CMS.axis_label_major ) From 1af5820f194ccdd4c301ef66db42547c3f623a3e Mon Sep 17 00:00:00 2001 From: Emyr Clement Date: Tue, 17 Jan 2017 17:04:09 +0000 Subject: [PATCH 87/90] Fix QCD uncertainty treamtent --- dps/analysis/xsection/create_measurement.py | 25 +++++---- dps/config/xsection.py | 11 ++-- dps/utils/measurement.py | 61 +++++++++++++-------- 3 files changed, 57 insertions(+), 40 deletions(-) diff --git a/dps/analysis/xsection/create_measurement.py b/dps/analysis/xsection/create_measurement.py index 35063fa9..b9f61236 100644 --- a/dps/analysis/xsection/create_measurement.py +++ b/dps/analysis/xsection/create_measurement.py @@ -179,13 +179,6 @@ def get_sample_info(options, xsec_config, sample): elif options['category'] == 'SingleTop_cross_section-': sample_info["scale"] = 1.0 - 1.0*generator_scale - if sample == 'QCD': - generator_scale = xsec_config.rate_changing_systematics['QCD_cross_section'] - if options['category'] == 'QCD_cross_section+': - sample_info["scale"] = 1.0 + 1.0*generator_scale - elif options['category'] == 'QCD_cross_section-': - sample_info["scale"] = 1.0 - 1.0*generator_scale - # scaling will always have some non zero value if sample_info["scale"] <= 0.0001: sample_info["scale"] = 0.0001 @@ -239,7 +232,7 @@ def get_sample_info(options, xsec_config, sample): # Input File and Tree # QCD Contorol Regions (Shape) JES and JER sample_info["input_file"] = get_file(xsec_config, sample, options) - sample_info["tree"], sample_info["qcd_control_region"] = get_tree(xsec_config, options) + sample_info["tree"], sample_info["qcd_control_region"], sample_info["qcd_normalisation_region"] = get_tree(xsec_config, options) if sample != 'data': if options['category'] == 'JES_up': sample_info["input_file"] = sample_info["input_file"].replace('tree', 'plusJES_tree') @@ -258,6 +251,8 @@ def get_sample_info(options, xsec_config, sample): sample_info["tree"] = sample_info["tree"].replace('FitVariables', 'FitVariables_JERDown') sample_info["qcd_control_region"] = sample_info["qcd_control_region"].replace('FitVariables', 'FitVariables_JERDown') + if sample_info["qcd_normalisation_region"] is None: + sample_info["qcd_normalisation_region"] = sample_info["qcd_control_region"] return sample_info @cml.trace() @@ -299,6 +294,8 @@ def get_tree(config, options): Return a specific sample tree ''' tree = config.tree_path[options['channel']] + qcd_tree = None + qcd_normalisation_tree = None if options["data_driven_qcd"]: # QCD control region qcd_tree = tree.replace( @@ -307,9 +304,15 @@ def get_tree(config, options): if "QCD_shape" in options['category']: qcd_tree = tree.replace( "Ref selection", config.qcd_shape_syst_region[options['channel']]) - else: - qcd_tree = None - return tree, qcd_tree + qcd_normalisation_tree = tree.replace( + "Ref selection", config.qcd_control_region[options['channel']]) + elif "QCD_cross_section" in options['category']: + qcd_tree = tree.replace( + "Ref selection", config.qcd_control_region[options['channel']]) + qcd_normalisation_tree = tree.replace( + "Ref selection", config.qcd_shape_syst_region[options['channel']]) + + return tree, qcd_tree, qcd_normalisation_tree @cml.trace() diff --git a/dps/config/xsection.py b/dps/config/xsection.py index 161acbe1..39d69afd 100644 --- a/dps/config/xsection.py +++ b/dps/config/xsection.py @@ -236,11 +236,10 @@ def __fill_defaults__( self ): 'V+Jets_cross_section-', 'V+Jets_cross_section+', - 'QCD_cross_section+', - 'QCD_cross_section-', 'SingleTop_cross_section+', 'SingleTop_cross_section-', + 'QCD_cross_section', 'QCD_shape', ] @@ -273,7 +272,7 @@ def __fill_defaults__( self ): self.list_of_systematics = { # Theoretical Uncertainties (Rate Changing) 'V+Jets_cross_section' : ['V+Jets_cross_section+', 'V+Jets_cross_section-'], - 'QCD_cross_section' : ['QCD_cross_section+', 'QCD_cross_section-'], + 'QCD_cross_section' : ['QCD_cross_section', 'QCD_cross_section'], 'SingleTop_cross_section' : ['SingleTop_cross_section+', 'SingleTop_cross_section-'], 'luminosity' : ['luminosity+', 'luminosity-'], # QCD Shape @@ -525,7 +524,7 @@ def __fill_defaults__( self ): ('luminosity-', 'luminosity+'), ], 'QCD Normalisation': [ - ('QCD_cross_section-', 'QCD_cross_section+'), + ('QCD_cross_section', 'QCD_cross_section'), ], 'QCD shape': [ ('QCD_shape', 'QCD_shape') @@ -564,11 +563,9 @@ def __fill_defaults_13TeV__( self ): self.ttbar_xsection = 831.76 # pb self.rate_changing_systematics = {#TODO check where this is used - 'luminosity': 0.027, # Best guess for 13 TeV 4.8->2.7 + 'luminosity': 0.062, 'SingleTop_cross_section': 0.05, # Currently same as 8 TeV - # 'TTJet_cross_section': 0.043, # Currently same as 8 TeV 'V+Jets_cross_section': 0.5, - 'QCD_cross_section' : 1., } self.tau_values_electron = { diff --git a/dps/utils/measurement.py b/dps/utils/measurement.py index f1fcc7d1..a604b885 100644 --- a/dps/utils/measurement.py +++ b/dps/utils/measurement.py @@ -19,6 +19,7 @@ def __init__(self, measurement): self.measurement = measurement self.histograms = {} self.cr_histograms = {} + self.cr_histograms_for_normalisation = {} self.normalisation = {} self.variable = None self.com = None @@ -47,6 +48,9 @@ def __setFromConfig(self): if data_driven_qcd: self.cr_histograms[sample] = self.__return_histogram(histogram_info, useQCDControl=True) + if histogram_info["qcd_normalisation_region"] != histogram_info["qcd_control_region"]: + self.cr_histograms_for_normalisation[sample] = self.__return_histogram(histogram_info, useQCDControl=True, useQCDSystematicControl=True) + # print(hist_to_value_error_tuplelist(self.histograms[sample])) # print(hist_to_value_error_tuplelist(self.cr_histograms[sample])) @@ -59,38 +63,47 @@ def __qcd_from_data(self): ''' Replace Signal region mc qcd with data driven qcd - N MC QCD in SR N DD QCD in CR - QCD_SHAPE * -------------- * -------------- - N DD QCD in CR N MC QCD in CR + N MC QCD in SR + Data in CR * -------------- + N MC QCD in CR - Shape normalise to scale from - SR mc qcd mc qcd to dd qcd + Shape transfer factor + from control to + signal region ''' # Get the shape of the data driven qcd in the control region - qcd_shape = clean_control_region( + data_driven_qcd = clean_control_region( self.cr_histograms, subtract=['TTBar', 'V+Jets', 'SingleTop'] ) - # print(hist_to_value_error_tuplelist(qcd_shape)) - - # Now to normalise the qcd shape to the MC in the Signal Region - # n_dd_cr= Number of datadriven qcd from Control Region + # print(hist_to_value_error_tuplelist(data_driven_qcd)) + # Calculate transfer factor from signal to control region n_mc_sr = self.histograms['QCD'].Integral() - n_dd_cr = qcd_shape.Integral() - qcd_shape.Scale( n_mc_sr/n_dd_cr ) - # print "scaling to normalisation in SR MC : ", n_mc_sr/n_dd_cr - - # Now to scale from mc qcd to datadriven qcd - n_mc_cr = self.cr_histograms['QCD'].Integral() - qcd_shape.Scale( n_dd_cr/n_mc_cr ) - # print "scaling from MC to datadriven : ", n_dd_cr/n_mc_cr - # print "Total scaling : ", n_mc_sr/n_mc_cr + n_mc_cr = 1 + transfer_factor = 1 + if self.cr_histograms_for_normalisation == {}: + n_mc_cr = self.cr_histograms['QCD'].Integral() + transfer_factor = n_mc_sr/n_mc_cr + else : + # Treatment for QCD systematic uncertainties + # Use shape from the control region + # and the normalisation derived from a different control region + n_mc_cr = self.cr_histograms['QCD'].Integral() + n_mc_cr_norm = self.cr_histograms_for_normalisation['QCD'].Integral() + data_driven_qcd_normalisation = clean_control_region( + self.cr_histograms_for_normalisation, + subtract=['TTBar', 'V+Jets', 'SingleTop'] + ) + n_data_cr_norm = data_driven_qcd_normalisation.Integral() + transfer_factor = n_mc_sr/ n_mc_cr_norm * n_data_cr_norm / data_driven_qcd.Integral() + + data_driven_qcd.Scale( transfer_factor ) # Replace QCD histogram with datadriven one - self.histograms['QCD'] = qcd_shape + self.histograms['QCD'] = data_driven_qcd return - def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False): + def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=False, useQCDSystematicControl=False): ''' Takes basic histogram info and returns histo. Maybe this can move to ROOT_utilities? @@ -102,6 +115,7 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa f = d_hist_info['input_file'] tree = d_hist_info['tree'] qcd_tree = d_hist_info["qcd_control_region"] + qcd_tree_for_normalisation = d_hist_info["qcd_normalisation_region"] var = d_hist_info['branch'] bins = d_hist_info['bin_edges'] lumi_scale = d_hist_info['lumi_scale'] @@ -111,7 +125,10 @@ def __return_histogram(self, d_hist_info, ignoreUnderflow=True, useQCDControl=Fa if useQCDControl: # replace SR tree with CR tree - tree = qcd_tree + if useQCDSystematicControl: + tree = qcd_tree_for_normalisation + else: + tree = qcd_tree # Remove the Lepton reweighting for the datadriven qcd (SF not derived for unisolated leptons) for weight in weights: if 'Electron' in weight: weights.remove(weight) From 35a944fe9208d2589aa4d3df22273b31ce8ddcbb Mon Sep 17 00:00:00 2001 From: Emyr Clement Date: Tue, 17 Jan 2017 17:07:24 +0000 Subject: [PATCH 88/90] Add uncertainties in quadrature in fix_overflow --- dps/utils/hist_utilities.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dps/utils/hist_utilities.py b/dps/utils/hist_utilities.py index cf01b729..d81e398d 100644 --- a/dps/utils/hist_utilities.py +++ b/dps/utils/hist_utilities.py @@ -12,6 +12,7 @@ from rootpy.plotting.hist import Hist2D import random import string +from math import sqrt from copy import deepcopy from .file_utilities import read_data_from_JSON from .logger import log @@ -205,7 +206,7 @@ def fix_overflow( hist ): overflow_error= hist.GetBinError( overflow_bin ) new_last_bin_content = hist.GetBinContent( last_bin ) + overflow - new_last_bin_error = hist.GetBinError( last_bin ) + overflow_error + new_last_bin_error = sqrt(hist.GetBinError( last_bin ) ** 2 + overflow_error ** 2) hist.SetBinContent( last_bin, new_last_bin_content ) hist.SetBinError( last_bin, new_last_bin_error ) @@ -225,7 +226,7 @@ def fix_overflow( hist ): hist.SetBinContent( x, overflow_bin_y, 0. ) hist.SetBinContent( x, last_bin_y, overflow_y + last_bin_content_y ) - hist.SetBinError( x, last_bin_y, overflow_error_y + last_bin_error_y ) + hist.SetBinError( x, last_bin_y, sqrt( overflow_error_y ** 2 + last_bin_error_y ** 2 ) ) # now all x-overflow for y in range( 1, overflow_bin_y +1): overflow_x = hist.GetBinContent( overflow_bin_x, y ) @@ -236,7 +237,7 @@ def fix_overflow( hist ): hist.SetBinContent( overflow_bin_x, y, 0. ) hist.SetBinContent( last_bin_x, y, overflow_x + last_bin_content_x ) - hist.SetBinError( last_bin_x, y, overflow_error_x + last_bin_error_x ) + hist.SetBinError( last_bin_x, y, sqrt( overflow_error_x ** 2 + last_bin_error_x ** 2 ) ) # and now the final bin (both x and y overflow) overflow_x_y = hist.GetBinContent( overflow_bin_x, overflow_bin_y ) last_bin_content_x_y = hist.GetBinContent( last_bin_x, last_bin_y ) From 1031109a3bf6b13a9297d16bcf0ba72a00251841 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 18 Jan 2017 13:06:12 +0000 Subject: [PATCH 89/90] double length of control plots --- dps/config/variable_binning.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dps/config/variable_binning.py b/dps/config/variable_binning.py index 731e4d98..b33da4b3 100644 --- a/dps/config/variable_binning.py +++ b/dps/config/variable_binning.py @@ -64,19 +64,19 @@ def produce_reco_bin_edges( gen_bin_edges ): control_plots_bins = { 'NJets' : [i + 0.5 for i in range ( 3, 16 + 1 )], 'JetPt' : [i * 5 for i in range ( 5, 40 )], - 'MuonPt' : [i * 24 for i in range ( 1, 10 )], - 'ElectronPt' : [i * 10 for i in range ( 1, 20 )], + 'MuonPt' : [i * 10 for i in range ( 1, 40 )], + 'ElectronPt' : [i * 10 for i in range ( 1, 40 )], 'LeptonEta' : [i*0.5 for i in range( -25, 25 )], 'AbsLeptonEta' : [i*0.1 for i in range( 0, 25 )], 'NBJets' : [i - 0.5 for i in range ( 0, 6 + 1 )], 'NVertex' : [i for i in range ( 0,40 + 1 )], 'relIso' : [i*0.01 for i in range(0,20)], - 'relIsoQCD' : [i*0.05 for i in range(0,20)], + 'relIsoQCD' : [i*0.025 for i in range(0,40)], 'AbsLeptonEtaQCD' : [i*0.2 for i in range( 0, 24 )], - 'MET' : [i * 15 for i in range ( 0, 21 )], - 'WPT' : [i * 25 for i in range ( 0, 17 )], - 'HT' : [i * 50 for i in range ( 0, 21 )], - 'ST' : [i * 50 for i in range ( 2, 25 )], + 'MET' : [i * 15 for i in range ( 0, 40 )], + 'WPT' : [i * 25 for i in range ( 0, 35 )], + 'HT' : [i * 50 for i in range ( 0, 40 )], + 'ST' : [i * 50 for i in range ( 2, 40 )], 'sigmaietaieta' : [i * 0.002 for i in range ( 0, 20 )], } From 02b2fab67229ec870a98aa972c4b0ff97e77dff7 Mon Sep 17 00:00:00 2001 From: Douglas Burns Date: Wed, 18 Jan 2017 13:08:22 +0000 Subject: [PATCH 90/90] do not calculate all qcd control plots multiple times --- .../xsection/make_control_plots_fromTrees.py | 460 +++++++++--------- 1 file changed, 230 insertions(+), 230 deletions(-) diff --git a/dps/analysis/xsection/make_control_plots_fromTrees.py b/dps/analysis/xsection/make_control_plots_fromTrees.py index fc5e8df5..828126b9 100644 --- a/dps/analysis/xsection/make_control_plots_fromTrees.py +++ b/dps/analysis/xsection/make_control_plots_fromTrees.py @@ -459,9 +459,9 @@ def parse_arguments(): 'ST', 'WPT', 'NVertex', - 'NVertexNoWeight', - 'NVertexUp', - 'NVertexDown', + # 'NVertexNoWeight', + # 'NVertexUp', + # 'NVertexDown', 'LeptonPt', 'AbsLeptonEta', 'NJets', @@ -1026,232 +1026,232 @@ def parse_arguments(): use_qcd_data_region = useQCDControl, ) + ################################################### + # QCD Control Region + ################################################### + for channel, label in { + 'electronQCDNonIso' : 'EPlusJets/QCD non iso e+jets', + 'electronQCDConversions' : 'EPlusJets/QCDConversions', + 'muonQCDNonIso' : 'MuPlusJets/QCD non iso mu+jets 3toInf', + 'muonQCDNonIso2' : 'MuPlusJets/QCD non iso mu+jets 1p5to3', + }.iteritems() : + b_tag_bin = '0btag' + + # Set folder for this batch of plots + output_folder = output_folder_base + "QCDControl/Variables/%s/" % channel + # output_folder = output_folder_base + "QCDControl/Variables/%s/TightElectron/" % channel + make_folder_if_not_exists(output_folder) + + print 'Control region :',label + + treeName = 'EPlusJets/QCD non iso e+jets' + signalTreeName = 'EPlusJets/Ref selection' + if channel == 'electronQCDConversions': + treeName = 'EPlusJets/QCDConversions' + elif channel == 'muonQCDNonIso': + treeName = 'MuPlusJets/QCD non iso mu+jets 3toInf' + signalTreeName = 'MuPlusJets/Ref selection' + elif channel == 'muonQCDNonIso2': + treeName = 'MuPlusJets/QCD non iso mu+jets 1p5to3' + signalTreeName = 'MuPlusJets/Ref selection' + + ################################################### + # HT + ################################################### + norm_variable = 'HT' + if 'QCDHT' in include_plots: + print '---> QCD HT' + make_plot( channel, + x_axis_title = '$%s$ [GeV]' % variables_latex['HT'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['HT']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + branchName = 'HT', + name_prefix = '%s_HT_' % channel, + x_limits = control_plots_bins['HT'], + nBins = 20, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + if 'QCDHT_dataControl_mcSignal' in include_plots: + print '---> QCD HT data to signal QCD' + make_plot( channel, + x_axis_title = '$%s$ [GeV]' % variables_latex['HT'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['HT']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % signalTreeName, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + branchName = 'HT', + name_prefix = '%s_HT_' % channel, + x_limits = control_plots_bins['HT'], + nBins = 20, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + ################################################### + # MET + ################################################### + norm_variable = 'MET' + if 'QCDMET' in include_plots: + print '---> QCD MET' + make_plot( channel, + x_axis_title = '$%s$ [GeV]' % variables_latex['MET'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['MET']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + branchName = 'MET', + name_prefix = '%s_MET_' % channel, + x_limits = control_plots_bins['MET'], + nBins = 20, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + ################################################### - # QCD Control Region + # ST + ################################################### + norm_variable = 'ST' + if 'QCDST' in include_plots: + print '---> QCD ST' + make_plot( channel, + x_axis_title = '$%s$ [GeV]' % variables_latex['ST'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['ST']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + branchName = 'ST', + name_prefix = '%s_ST_' % channel, + x_limits = control_plots_bins['ST'], + nBins = 20, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + ################################################### + # WPT ################################################### - for channel, label in { - 'electronQCDNonIso' : 'EPlusJets/QCD non iso e+jets', - 'electronQCDConversions' : 'EPlusJets/QCDConversions', - # 'muonQCDNonIso' : 'MuPlusJets/QCD non iso mu+jets 3toInf', - # 'muonQCDNonIso2' : 'MuPlusJets/QCD non iso mu+jets 1p5to3', - }.iteritems() : - b_tag_bin = '0btag' - - # Set folder for this batch of plots - output_folder = output_folder_base + "QCDControl/Variables/%s/" % channel - # output_folder = output_folder_base + "QCDControl/Variables/%s/TightElectron/" % channel - make_folder_if_not_exists(output_folder) - - print 'Control region :',label - - treeName = 'EPlusJets/QCD non iso e+jets' - signalTreeName = 'EPlusJets/Ref selection' - if channel == 'electronQCDConversions': - treeName = 'EPlusJets/QCDConversions' - elif channel == 'muonQCDNonIso': - treeName = 'MuPlusJets/QCD non iso mu+jets 3toInf' - signalTreeName = 'MuPlusJets/Ref selection' - elif channel == 'muonQCDNonIso2': - treeName = 'MuPlusJets/QCD non iso mu+jets 1p5to3' - signalTreeName = 'MuPlusJets/Ref selection' - - ################################################### - # HT - ################################################### - norm_variable = 'HT' - if 'QCDHT' in include_plots: - print '---> QCD HT' - make_plot( channel, - x_axis_title = '$%s$ [GeV]' % variables_latex['HT'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['HT']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - branchName = 'HT', - name_prefix = '%s_HT_' % channel, - x_limits = control_plots_bins['HT'], - nBins = 20, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - if 'QCDHT_dataControl_mcSignal' in include_plots: - print '---> QCD HT data to signal QCD' - make_plot( channel, - x_axis_title = '$%s$ [GeV]' % variables_latex['HT'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['HT']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % signalTreeName, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - branchName = 'HT', - name_prefix = '%s_HT_' % channel, - x_limits = control_plots_bins['HT'], - nBins = 20, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # MET - ################################################### - norm_variable = 'MET' - if 'QCDMET' in include_plots: - print '---> QCD MET' - make_plot( channel, - x_axis_title = '$%s$ [GeV]' % variables_latex['MET'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['MET']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - branchName = 'MET', - name_prefix = '%s_MET_' % channel, - x_limits = control_plots_bins['MET'], - nBins = 20, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # ST - ################################################### - norm_variable = 'ST' - if 'QCDST' in include_plots: - print '---> QCD ST' - make_plot( channel, - x_axis_title = '$%s$ [GeV]' % variables_latex['ST'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['ST']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - branchName = 'ST', - name_prefix = '%s_ST_' % channel, - x_limits = control_plots_bins['ST'], - nBins = 20, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # WPT - ################################################### - norm_variable = 'WPT' - if 'QCDWPT' in include_plots: - print '---> QCD WPT' - make_plot( channel, - x_axis_title = '$%s$ [GeV]' % variables_latex['WPT'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['WPT']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, - branchName = 'WPT', - name_prefix = '%s_WPT_' % channel, - x_limits = control_plots_bins['WPT'], - nBins = 20, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # Abs Lepton Eta - ################################################### - if 'QCDAbsLeptonEta' in include_plots: - print '---> QCD Abs Lepton Eta' - make_plot( channel, - x_axis_title = '$%s$' % control_plots_latex['eta'], - y_axis_title = 'Events/(%.1f)' % binWidth(control_plots_bins['AbsLeptonEtaQCD']), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - branchName = 'abs(lepton_eta)', - name_prefix = '%s_AbsLeptonEta_' % channel, - x_limits = control_plots_bins['AbsLeptonEtaQCD'], - nBins = len(control_plots_bins['AbsLeptonEtaQCD'])-1, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # Lepton Pt - ################################################### - if 'QCDLeptonPt' in include_plots: - print '---> QCD Lepton Pt' - binsLabel = 'ElectronPt' - if channel == 'muon': - binsLabel = 'MuonPt' - - make_plot( channel, - x_axis_title = '$%s$' % control_plots_latex['pt'], - y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins[binsLabel]), - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - branchName = 'lepton_pt', - name_prefix = '%s_LeptonPt_' % channel, - x_limits = control_plots_bins[binsLabel], - nBins = len(control_plots_bins[binsLabel])-1, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - ################################################### - # NJets - ################################################### - if 'QCDNJets' in include_plots: - print '---> QCD NJets' - make_plot( channel, - x_axis_title = '$%s$' % control_plots_latex['NJets'], - y_axis_title = 'Events', - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - branchName = 'NJets', - name_prefix = '%s_NJets_' % channel, - x_limits = control_plots_bins['NJets'], - nBins = len(control_plots_bins['NJets'])-1, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - - # # Set folder for this batch of plots - # output_folder = output_folder_base + "QCDControl/Control/%s/" % channel - # # output_folder = output_folder_base + "QCDControl/Control/%s/TightElectron/" % channel - # make_folder_if_not_exists(output_folder) - # ################################################### - # # Rel iso - # ################################################### - if 'QCDRelIso' in include_plots: - print '---> QCD Rel iso' - make_plot( channel, - x_axis_title = '$%s$' % control_plots_latex['relIso'], - y_axis_title = 'Events', - signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % label, - control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % label, - branchName = '%s' % 'lepton_isolation', - name_prefix = '%s_relIso_' % channel, - x_limits = control_plots_bins['relIsoQCD'], - nBins = len(control_plots_bins['relIsoQCD'])-1, - rebin = 1, - legend_location = ( 0.95, 0.78 ), - cms_logo_location = 'right', - ) - # ################################################### - # # Sigma ieta ieta - # ################################################### - - # norm_variable = 'sigmaietaieta' - # if 'QCDsigmaietaieta' in include_plots and not 'MuPlusJets' in treeName: - # print '---> sigmaietaieta' - # make_plot( channel, - # x_axis_title = '$%s$' % variables_latex['sigmaietaieta'], - # y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['sigmaietaieta']), - # signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - # control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), - # branchName = 'sigmaIetaIeta', - # name_prefix = '%s_sigmaIetaIeta_' % channel, - # x_limits = control_plots_bins['sigmaietaieta'], - # y_max_scale = 1.5, - # nBins = len(control_plots_bins['sigmaietaieta'])-1, - # rebin = 1, - # legend_location = ( 0.95, 0.85 ), - # cms_logo_location = 'left', - # ) + norm_variable = 'WPT' + if 'QCDWPT' in include_plots: + print '---> QCD WPT' + make_plot( channel, + x_axis_title = '$%s$ [GeV]' % variables_latex['WPT'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['WPT']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % treeName, + branchName = 'WPT', + name_prefix = '%s_WPT_' % channel, + x_limits = control_plots_bins['WPT'], + nBins = 20, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + ################################################### + # Abs Lepton Eta + ################################################### + if 'QCDAbsLeptonEta' in include_plots: + print '---> QCD Abs Lepton Eta' + make_plot( channel, + x_axis_title = '$%s$' % control_plots_latex['eta'], + y_axis_title = 'Events/(%.1f)' % binWidth(control_plots_bins['AbsLeptonEtaQCD']), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + branchName = 'abs(lepton_eta)', + name_prefix = '%s_AbsLeptonEta_' % channel, + x_limits = control_plots_bins['AbsLeptonEtaQCD'], + nBins = len(control_plots_bins['AbsLeptonEtaQCD'])-1, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + ################################################### + # Lepton Pt + ################################################### + if 'QCDLeptonPt' in include_plots: + print '---> QCD Lepton Pt' + binsLabel = 'ElectronPt' + if channel == 'muon': + binsLabel = 'MuonPt' + + make_plot( channel, + x_axis_title = '$%s$' % control_plots_latex['pt'], + y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins[binsLabel]), + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + branchName = 'lepton_pt', + name_prefix = '%s_LeptonPt_' % channel, + x_limits = control_plots_bins[binsLabel], + nBins = len(control_plots_bins[binsLabel])-1, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + ################################################### + # NJets + ################################################### + if 'QCDNJets' in include_plots: + print '---> QCD NJets' + make_plot( channel, + x_axis_title = '$%s$' % control_plots_latex['NJets'], + y_axis_title = 'Events', + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + branchName = 'NJets', + name_prefix = '%s_NJets_' % channel, + x_limits = control_plots_bins['NJets'], + nBins = len(control_plots_bins['NJets'])-1, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + + # # Set folder for this batch of plots + # output_folder = output_folder_base + "QCDControl/Control/%s/" % channel + # # output_folder = output_folder_base + "QCDControl/Control/%s/TightElectron/" % channel + # make_folder_if_not_exists(output_folder) + # ################################################### + # # Rel iso + # ################################################### + if 'QCDRelIso' in include_plots: + print '---> QCD Rel iso' + make_plot( channel, + x_axis_title = '$%s$' % control_plots_latex['relIso'], + y_axis_title = 'Events', + signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % label, + control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % label, + branchName = '%s' % 'lepton_isolation', + name_prefix = '%s_relIso_' % channel, + x_limits = control_plots_bins['relIsoQCD'], + nBins = len(control_plots_bins['relIsoQCD'])-1, + rebin = 1, + legend_location = ( 0.95, 0.78 ), + cms_logo_location = 'right', + ) + # ################################################### + # # Sigma ieta ieta + # ################################################### + + # norm_variable = 'sigmaietaieta' + # if 'QCDsigmaietaieta' in include_plots and not 'MuPlusJets' in treeName: + # print '---> sigmaietaieta' + # make_plot( channel, + # x_axis_title = '$%s$' % variables_latex['sigmaietaieta'], + # y_axis_title = 'Events/(%i GeV)' % binWidth(control_plots_bins['sigmaietaieta']), + # signal_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + # control_region_tree = 'TTbar_plus_X_analysis/%s/FitVariables' % ( treeName ), + # branchName = 'sigmaIetaIeta', + # name_prefix = '%s_sigmaIetaIeta_' % channel, + # x_limits = control_plots_bins['sigmaietaieta'], + # y_max_scale = 1.5, + # nBins = len(control_plots_bins['sigmaietaieta'])-1, + # rebin = 1, + # legend_location = ( 0.95, 0.85 ), + # cms_logo_location = 'left', + # )