356 lines
16 KiB
C++
356 lines
16 KiB
C++
/**
|
|
* @file toystudy.cc
|
|
* @author Renata Kopecna (cause I find 500 lines functioned defined in a header disgusting)
|
|
* @date 2021-02-25
|
|
*
|
|
*/
|
|
|
|
#include <toystudy.hh>
|
|
|
|
#include <iostream>
|
|
|
|
#include <event.hh>
|
|
#include <parameters.hh>
|
|
#include <funcs.hh>
|
|
#include <pdf.hh>
|
|
#include <options.hh>
|
|
#include <fitter.hh>
|
|
#include <generator.hh>
|
|
#include <multifit.hh>
|
|
|
|
|
|
void fcnc::toystudy::toy(unsigned int nevents, unsigned int nruns, pdf* prob, parameters* params, generator* gen){
|
|
std::vector<unsigned int> the_nevents;
|
|
the_nevents.push_back(nevents);
|
|
|
|
std::vector<pdf*> probs;
|
|
probs.push_back(prob);
|
|
|
|
std::vector<parameters*> the_params;
|
|
the_params.push_back(params);
|
|
|
|
std::vector<generator*> gens;
|
|
gens.push_back(gen);
|
|
|
|
toy(the_nevents, nruns, probs, the_params, gens);
|
|
}
|
|
|
|
void fcnc::toystudy::toy(std::vector<unsigned int> nevents, unsigned int nruns, std::vector<pdf*> pdfs, std::vector<parameters*> params, std::vector<generator*> gens){
|
|
|
|
//Open texFile
|
|
std::ofstream myFile;
|
|
open_Latex_noteFile(latex_toyFile(), myFile);
|
|
|
|
spdlog::info("Starting toy study on toy data");
|
|
//unsigned int run_no = 0;
|
|
|
|
spdlog::info("Will run over {0:d} runs with {1:d}", nruns, nevents.at(0));
|
|
if (spdlog_info()){
|
|
for (unsigned int i=1; i<nevents.size(); i++) std::cout << "/" << nevents.at(i);
|
|
}
|
|
spdlog::info(" events each.");
|
|
|
|
//need this for jitter on constraints
|
|
TRandom3* rnd = new TRandom3();
|
|
rnd->SetSeed(0);//non-static seed
|
|
//initialisation for histos and arrrays
|
|
unsigned int nparams = 0;
|
|
for (unsigned int i=0; i<params.size(); i++){
|
|
nparams += params.at(i)->nparameters();
|
|
}
|
|
|
|
std::vector<double> pull_width(nparams, 0.0);
|
|
std::vector<double> pull_mean(nparams, 0.0);
|
|
std::vector<double> pull_width_sigma(nparams, 0.0);
|
|
std::vector<double> pull_mean_sigma(nparams, 0.0);
|
|
std::vector<double> pull_chisquared(nparams, 0.0);
|
|
std::vector<double> value_width(nparams, 0.0);
|
|
std::vector<double> value_mean(nparams, 0.0);
|
|
std::vector<double> value_width_sigma(nparams, 0.0);
|
|
std::vector<double> value_mean_sigma(nparams, 0.0);
|
|
std::vector<double> value_chisquared(nparams, 0.0);
|
|
std::vector<double> error_width(nparams, 0.0);
|
|
std::vector<double> error_mean(nparams, 0.0);
|
|
std::vector<double> error_width_sigma(nparams, 0.0);
|
|
std::vector<double> error_mean_sigma(nparams, 0.0);
|
|
std::vector<double> error_chisquared(nparams, 0.0);
|
|
std::vector<std::vector<double> > errors(nparams, std::vector<double>());
|
|
std::vector<std::vector<double> > errors_up(nparams, std::vector<double>());
|
|
std::vector<std::vector<double> > errors_down(nparams, std::vector<double>());
|
|
std::vector<std::vector<double> > values(nparams, std::vector<double>());
|
|
std::vector<std::vector<double> > nominal_errors(nparams, std::vector<double>());
|
|
std::vector<std::vector<double> > nominal_values(nparams, std::vector<double>());
|
|
std::vector<int> return_values(nruns);
|
|
std::vector<std::vector<std::vector<double> > > corrs(nparams, std::vector<std::vector<double> >());
|
|
|
|
fcnc::fitter fit(opts);
|
|
for (unsigned int i=0; i<pdfs.size(); i++){
|
|
pdfs.at(i)->init(params.at(i));
|
|
}
|
|
fit.set_common_parameters(common_params);
|
|
|
|
for (unsigned int i=0; i < nruns; i++) {
|
|
spdlog::info("Starting run no. {0:d}", i + 1);
|
|
//run_no = i;
|
|
|
|
std::vector< std::vector<event> > temp_events;
|
|
for (unsigned int j=0; j<gens.size(); j++){
|
|
std::vector<event> ev = gens.at(j)->generate(nevents.at(j), params.at(j), pdfs.at(j));
|
|
temp_events.push_back(ev);
|
|
}
|
|
std::vector< std::vector<event>* > events;
|
|
for (unsigned int j=0; j<gens.size(); j++){
|
|
events.push_back(&temp_events.at(j));
|
|
}
|
|
|
|
std::string num;
|
|
std::stringstream out;
|
|
out << (i+1);
|
|
num = out.str();
|
|
int result = 0;
|
|
|
|
//in the case of constraints, jitter the mean value
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
for (unsigned int k = 0; k < params.at(j)->nparameters(); k++){
|
|
parameter* param = params.at(j)->get_parameter(k);
|
|
double shift = rnd->Rndm() > 0.5 ? -fabs(rnd->Gaus(0.0, param->get_previous_error_low())) : fabs(rnd->Gaus(0.0, param->get_previous_error_high()));//TODO, actually should probably use PDG method
|
|
if (param->get_gaussian_constraint()){
|
|
param->set_previous_measurement(param->get_start_value() + shift);//rnd->Gaus(0.0, sigma));
|
|
}
|
|
}
|
|
}
|
|
if (opts->eos_bsz_ff){
|
|
TMatrixD mU = fcnc::get_bsz_mu_invcov();
|
|
|
|
std::vector<double> rndvec(21, 0.0);
|
|
for (unsigned int i=0; i<21; i++){
|
|
rndvec.at(i) = rnd->Gaus(0.0, 1.0);
|
|
}
|
|
std::vector<double> shiftvec(21, 0.0);
|
|
for (unsigned int i=0; i<21; i++){
|
|
for (unsigned int j=0; j<21; j++){
|
|
shiftvec.at(i) += mU(j,i)*rndvec.at(j);//new order, corrected and tested
|
|
}
|
|
}
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
if (params.at(j)->get_parameter("a0_0") != 0){
|
|
//TODO: jitter according to covariance matrix given
|
|
//std::vector<double> coeffvec(21, 0.0);// = nominal;//fcnc::legendre_coefficients_nominal;
|
|
for (unsigned int i=0; i<21; i++){
|
|
parameter* param = 0;
|
|
switch (i) {
|
|
case 0 : param = params.at(j)->get_parameter("a0_0"); break;
|
|
case 1 : param = params.at(j)->get_parameter("a0_1"); break;
|
|
case 2 : param = params.at(j)->get_parameter("a0_2"); break;
|
|
case 3 : param = params.at(j)->get_parameter("a1_0"); break;
|
|
case 4 : param = params.at(j)->get_parameter("a1_1"); break;
|
|
case 5 : param = params.at(j)->get_parameter("a1_2"); break;
|
|
//case 6 : param = params.at(j)->get_parameter(""); break;
|
|
case 7 : param = params.at(j)->get_parameter("a12_1"); break;
|
|
case 8 : param = params.at(j)->get_parameter("a12_2"); break;
|
|
case 9 : param = params.at(j)->get_parameter("v_0"); break;
|
|
case 10 : param = params.at(j)->get_parameter("v_1"); break;
|
|
case 11 : param = params.at(j)->get_parameter("v_2"); break;
|
|
case 12 : param = params.at(j)->get_parameter("t1_0"); break;
|
|
case 13 : param = params.at(j)->get_parameter("t1_1"); break;
|
|
case 14 : param = params.at(j)->get_parameter("t1_2"); break;
|
|
//case 15 : param = params.at(j)->get_parameter(""); break;
|
|
case 16 : param = params.at(j)->get_parameter("t2_1"); break;
|
|
case 17 : param = params.at(j)->get_parameter("t2_2"); break;
|
|
case 18 : param = params.at(j)->get_parameter("t23_0"); break;
|
|
case 19 : param = params.at(j)->get_parameter("t23_1"); break;
|
|
case 20 : param = params.at(j)->get_parameter("t23_2"); break;
|
|
};
|
|
if (param != 0){
|
|
param->set_previous_measurement(param->get_start_value() + shiftvec.at(i));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
//perform the actual fit
|
|
result = fit.fit(pdfs, params, events, num);
|
|
return_values.at(i) = result;
|
|
|
|
if (result != 300 && opts->repeat_on_fail) {
|
|
spdlog::error("Fit failed, repeating run");
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
params.at(j)->reset_parameters();
|
|
}
|
|
i--;
|
|
continue;
|
|
}
|
|
|
|
//extract the parameters from the fit
|
|
unsigned int start_param = 0;
|
|
for (unsigned int j = 0; j < params.size(); j++) {
|
|
for (unsigned int k = 0; k < params.at(j)->nparameters(); k++){
|
|
parameter* param = params.at(j)->get_parameter(k);
|
|
unsigned int idx = start_param+k;
|
|
values.at(idx).push_back(param->get_value());
|
|
errors.at(idx).push_back(param->get_error());
|
|
errors_up.at(idx).push_back(param->get_error_up());
|
|
errors_down.at(idx).push_back(param->get_error_down());
|
|
corrs.at(idx).push_back(param->correlations);//for some, this is empty
|
|
}
|
|
start_param += params.at(j)->nparameters();
|
|
}
|
|
//todo whats this? //I don't know, David. Sincerely, Renata
|
|
for (unsigned int j = 0; j < nparams; j++){
|
|
nominal_values.at(j).push_back(0.0);
|
|
nominal_errors.at(j).push_back(0.0);
|
|
}
|
|
|
|
//Update pull histos every 10 steps or when finished.
|
|
//if ((i+1 >= 100) && (((i+1)%100 == 0) || (i+1 == nruns)))
|
|
if (i+1 == nruns) {
|
|
//This should be done a little differently:
|
|
//recreate histos every time, use min and max values as axis.
|
|
start_param = 0;
|
|
for (unsigned int j = 0; j < params.size(); j++){//this plots the pull histos
|
|
for (unsigned int k = 0; k < params.at(j)->nparameters(); k++){//this plots the pull histos
|
|
parameter* par = params.at(j)->get_parameter(k);
|
|
if (par->get_step_size() != 0.0) {
|
|
unsigned int idx = start_param+k;
|
|
update_pull(par, values.at(idx),
|
|
errors.at(idx),
|
|
pull_mean[idx], pull_mean_sigma[idx],
|
|
pull_width[idx], pull_width_sigma[idx],
|
|
pull_chisquared[idx]);
|
|
update_value(par, values.at(idx), errors.at(idx),
|
|
value_mean[idx], value_mean_sigma[idx],
|
|
value_width[idx], value_width_sigma[idx],
|
|
value_chisquared[idx]);
|
|
update_error(par, values.at(idx), errors.at(idx),
|
|
error_mean[idx], error_mean_sigma[idx],
|
|
error_width[idx], error_width_sigma[idx],
|
|
error_chisquared[idx]);
|
|
}
|
|
}
|
|
start_param += params.at(j)->nparameters();
|
|
}
|
|
}
|
|
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
params.at(j)->reset_parameters();
|
|
}
|
|
|
|
spdlog::info("Run {0:d} finished", i + 1);
|
|
}
|
|
//delete rnd;
|
|
|
|
//save result trees to root file
|
|
//TFile* output;
|
|
output->cd();
|
|
int param_index = 0;
|
|
|
|
unsigned int start_param = 0;
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
for (unsigned int k = 0; k < params.at(j)->nparameters(); k++){
|
|
parameter* param = params.at(j)->get_parameter(k);
|
|
unsigned int idx = start_param+k;
|
|
//for (unsigned int j = 0; j < nparams; j++)//this gives nice output
|
|
if (param->get_step_size() != 0.0) {//todo common parameters, only save once...
|
|
std::string parname(param->get_name());
|
|
std::string pardesc(param->get_description());
|
|
TTree* t = new TTree(parname.c_str(), pardesc.c_str());
|
|
double value, error, error_up, error_down, start_value, nominal_value, nominal_error;
|
|
int run, migrad, status_cov;
|
|
double tmp_corr[corrs.at(idx).at(0).size()];
|
|
t->Branch("run",&run,"run/I");
|
|
t->Branch("value",&value,"value/D");
|
|
t->Branch("error",&error,"error/D");
|
|
t->Branch("error_up",&error_up,"error_up/D");
|
|
t->Branch("error_down",&error_down,"error_down/D");
|
|
t->Branch("start_value",&start_value,"start_value/D");
|
|
t->Branch("migrad",&migrad,"migrad/I");
|
|
t->Branch("status_cov",&status_cov,"status_cov/I");
|
|
t->Branch("nominal_value",&nominal_value,"nominal_value/D");
|
|
t->Branch("nominal_error",&nominal_error,"nominal_error/D");
|
|
t->Branch("index",¶m_index, "index/I");
|
|
std::string corr_string = "correlations";
|
|
std::stringstream corr_stream;
|
|
corr_stream << "correlations[" << corrs.at(idx).at(0).size() << "]/D";
|
|
t->Branch("correlations",&tmp_corr,corr_stream.str().c_str());
|
|
for (unsigned int i=0; i<nruns; i++){
|
|
run = i;
|
|
if (param->is_blind()){
|
|
value = values.at(idx).at(i) + param->get_blinding_delta();//TODO add delta
|
|
}
|
|
else{
|
|
value = values.at(idx).at(i);//TODO add delta
|
|
}
|
|
error = errors.at(idx).at(i);
|
|
if (param->is_blind()){
|
|
nominal_value = nominal_values.at(idx).at(i) + param->get_blinding_delta();
|
|
}
|
|
else{
|
|
nominal_value = nominal_values.at(idx).at(i);
|
|
}
|
|
nominal_error = nominal_errors.at(idx).at(i);
|
|
error_up = errors_up.at(idx).at(i);
|
|
error_down = errors_down.at(idx).at(i);
|
|
if (param->is_blind()){
|
|
start_value = param->get_start_value() + param->get_blinding_delta();
|
|
}
|
|
else{
|
|
start_value = param->get_start_value();
|
|
}
|
|
migrad = return_values.at(i) % 100;
|
|
status_cov = return_values.at(i) / 100;
|
|
//corr = corrs.at(idx).at(i);
|
|
for (unsigned int l = 0; l < corrs.at(idx).at(i).size(); l++){
|
|
tmp_corr[l] = corrs.at(idx).at(i).at(l);
|
|
}
|
|
t->Fill();
|
|
}
|
|
t->Write();
|
|
delete t;
|
|
param_index++;
|
|
}
|
|
}
|
|
start_param += params.at(j)->nparameters();
|
|
}
|
|
//latex output
|
|
bool blind = false;
|
|
for (unsigned int j = 0; j < params.size(); j++){
|
|
if (!params.at(j)->is_blind()) blind = true;
|
|
}
|
|
spdlog::info("Toy Study results");
|
|
if (!blind){
|
|
myFile <<"\\begin{tabular}{|cccccccc|}\\hline" << std::endl;
|
|
myFile <<"\\# & parameter & Mean Value & Mean Width & Mean Error & Error Width & Pull Mean & Pull Width \\\\ \\hline \\hline" << std::endl;
|
|
start_param = 0;
|
|
for (unsigned int j = 0; j < params.size(); j++)
|
|
{
|
|
for (unsigned int k = 0; k < params.at(j)->nparameters(); k++)
|
|
{
|
|
parameter* param = params.at(j)->get_parameter(k);
|
|
unsigned int idx = start_param+k;
|
|
if (param->get_step_size() != 0.0){
|
|
std::string partex = param->get_description();
|
|
myFile <<std::setw(2) << idx << " & $" << std::setw(22) << partex << "$ & $"
|
|
<< std::fixed << std::setprecision(4)
|
|
<< std::setw(7) << value_mean[idx] << " \\pm " << std::setw(7) << value_mean_sigma[idx] << "$ & $"
|
|
<< std::setw(7) << value_width[idx] << " \\pm " << std::setw(7) << value_width_sigma[idx] << "$ & $"
|
|
<< std::setw(7) << error_mean[idx] << " \\pm " << std::setw(7) << error_mean_sigma[idx] << "$ & $"
|
|
<< std::setw(7) << error_width[idx] << " \\pm " << std::setw(7) << error_width_sigma[idx] << "$ & $"
|
|
<< std::setw(7) << pull_mean[idx] << " \\pm " << std::setw(7) << pull_mean_sigma[idx] << "$ & $"
|
|
<< std::setw(7) << pull_width[idx] << " \\pm " << std::setw(7) << pull_width_sigma[idx] << "$ \\\\"
|
|
<< std::endl;
|
|
}
|
|
}
|
|
start_param += params.at(j)->nparameters();
|
|
}
|
|
myFile <<"\\hline\\end{tabular}" << std::endl;
|
|
}
|
|
|
|
delete rnd;
|
|
|
|
//Close Latex file
|
|
myFile.close();
|
|
|
|
}
|
|
|
|
|