import numpy as np from math import sqrt # angular parameters for each folding (appearing in this order in the input files) pars = { 0: ['S1s', 'S3', 'S6s', 'S9'], 1: ['S1s', 'S3', 'S4'], 2: ['S1s', 'S3', 'S5'], 3: ['S1s', 'S3', 'S7'], 4: ['S1s', 'S3', 'S8'], } import glob files = {} # get input files for given folding (if more than 1 for same folding, take first) for fold in pars.keys(): files[fold] = glob.glob('./SensitivityFromToys/*folding{}.txt'.format(fold))[0] u = {} #fill with all uncertainties from toys for fold in pars.keys(): d = np.genfromtxt(files[fold], comments='#') i = 0 for angpar in pars[fold]: i = i+1 #header for q2bin in range(8): assert d[i][0]==q2bin u[(fold, angpar, q2bin)] = d[i][1] i = i+1 # simple conversions of uncertainties for FL and AFB def FLerr(S1serr): return S1serr*4/3 def AFBerr(S6serr): return S6serr*3/4 def get_ordvals(year, scale): # take various uncertainties from different foldings ordvals = [] for q2 in [0, 1, 2, 3, 4, 6, 7]: # not including q2bin 5 cause too close to ccbar ordvals = ordvals + [ year, FLerr(u[(1, 'S1s', q2)])*scale, u[(3, 'S3', q2)]*scale, u[(1, 'S4', q2)]*scale, u[(2, 'S5', q2)]*scale, AFBerr(u[(0, 'S6s', q2)])*scale, u[(3, 'S7', q2)]*scale, u[(4, 'S8', q2)]*scale, u[(0, 'S9', q2)]*scale, ] return ordvals with open('./measurements/Bp2Kstmumu_skeleton.yml', 'r') as myfile: skeleton = myfile.read() ordvals = get_ordvals(2016, 1) #no scaling for 2016 with open('measurements/Bp2Kstmumu_2016.yml', "w") as text_file: print(skeleton.format(*ordvals), file=text_file) lumi2016 = 5.2 lumi2018 = 9.0 addKppi0 = 2 # factor 2 larger stat? statfactor = lumi2018/lumi2016 * addKppi0 ordvals = get_ordvals(2018, 1/sqrt(statfactor)) #no scaling for 2016 with open('measurements/Bp2Kstmumu_2018andpi0.yml', "w") as text_file: print(skeleton.format(*ordvals), file=text_file)