Angular analysis of B+->K*+(K+pi0)mumu
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

725 lines
30 KiB

  1. /**********************************************************************************
  2. * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
  3. * Package : TMVA *
  4. * Exectuable: TMVAClassificationApplication *
  5. * *
  6. * This macro provides a simple example on how to use the trained classifiers *
  7. * within an analysis module *
  8. **********************************************************************************/
  9. //Calculating the TMVA response from for all events
  10. //David Gerick
  11. //Renata Kopecna
  12. #include <cstdlib>
  13. #include <vector>
  14. #include <iostream>
  15. #include <map>
  16. #include <string>
  17. #include <sstream>
  18. #include "TChain.h"
  19. #include "TFile.h"
  20. #include "TMath.h"
  21. #include "TTree.h"
  22. #include "TString.h"
  23. #include "TSystem.h"
  24. #include "TROOT.h"
  25. #include "TStopwatch.h"
  26. #include "GlobalFunctions.hh"
  27. #include "Paths.hpp"
  28. #include "MVAclass.hpp"
  29. //#include "HeidelbergFitter/LHCbStyle.h"
  30. //#include "./MVA_b2kmm.cc"
  31. #if not defined(__CINT__) || defined(__MAKECINT__)
  32. #include "TMVA/TMVAGui.h"
  33. #include "TMVA/Tools.h"
  34. #include "TMVA/Reader.h"
  35. #include "TMVA/MethodCuts.h"
  36. #endif
  37. using namespace TMVA;
  38. //////////////////////////////////////////////////////
  39. /// TMVAClassificationApplication()
  40. /// for each event the BDTresponse is calculated and saved in a new branch in the tree.
  41. /// based on the training of the BDT, which's information are stored in the xml file,
  42. /// the BDTresponse is calculated for each event. The splitting of the data-set has to be
  43. /// identical to the splitting of the training. Splitting can be done in years, Kshort
  44. /// decay inside (LL) or outside the Velo (DD).
  45. /// The used variables (ll. 195) have to have identical order than they are introduced to the
  46. /// factory in MVA_b2kmm.cc, otherwise the program will not work
  47. ///
  48. /// TMVAClassApp()
  49. /// Perform the TMVAClassificationApplication function with predined values and strings
  50. /// for the two subdecays!
  51. ///
  52. /// TMVA2MC_AfterPreSelection()
  53. /// Same function as above, but this will apply the BDT to the MC sample.
  54. /// Apply to the tuple after the pre-selection!
  55. /// Optional to the truthmatched or un-matched tree
  56. ///
  57. /// TMVA2MC_AfterReweighting()
  58. /// Same function as above, but this will apply the BDT to the MC sample.
  59. /// Apply to the tuple after the reweighting, i.e. the BDTinput tuple!
  60. /// Optional to the truthmatched or un-matched tree
  61. ///
  62. /// TMVA2AllMC()
  63. /// Get the MC assign with a BDTresponse for all years and magnet polarities at all stages and
  64. /// for truthmatched and non-matched trees!
  65. ///
  66. string InputToReader(string ReaderName){
  67. if (ReaderName.find(":=", 0) == string::npos){
  68. coutDebug("Reader name: " + ReaderName);
  69. return ReaderName;
  70. }
  71. std::size_t position = ReaderName.find(":=");
  72. //string tmp (ReaderName.begin(),ReaderName.begin()+position);
  73. string tmp (ReaderName.begin()+position+2,ReaderName.end());
  74. coutDebug("Reader name: " + tmp);
  75. return tmp;
  76. }
  77. Int_t TMVAClassificationApplication_b2kmm(Int_t Run = 1, Int_t year = 2011, bool KshortDecayInVelo = false,
  78. bool MC = false, bool ReferenceChannel = false, bool PHSP = false, bool UseLowQ2Range = false, bool IncSample = false){
  79. #ifdef __CINT__
  80. gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
  81. #endif
  82. //---------------------------------------------------------------
  83. //NOT NEEDED AS IT LOOKS LIKE! (onlyi f you have different BDTs for different years)
  84. bool SplitYears = false;
  85. //---------------------------------------------------------------
  86. //Check the sample
  87. if (!checkMC(MC,ReferenceChannel,PHSP,true)) return 0;
  88. //Check MVAcongif
  89. if(!checkRun(Run)) return 0;
  90. //Check if correct year was given (if Inc, just load the files, screw it)
  91. if (!checkYear(year,MC,ReferenceChannel,PHSP)) return 0;
  92. //---------------------------------------------------------------
  93. // This loads the library
  94. TMVA::Tools::Instance();
  95. // Default MVA methods to be trained + tested
  96. std::map<string,int> Use;
  97. // --- Cut optimisation
  98. Use["Cuts"] = 1;
  99. Use["CutsD"] = 1;
  100. Use["CutsPCA"] = 0;
  101. Use["CutsGA"] = 0;
  102. Use["CutsSA"] = 0;
  103. //
  104. // --- 1-dimensional likelihood ("naive Bayes estimator")
  105. Use["Likelihood"] = 1;
  106. Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
  107. Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
  108. Use["LikelihoodKDE"] = 0;
  109. Use["LikelihoodMIX"] = 0;
  110. //
  111. // --- Mutidimensional likelihood and Nearest-Neighbour methods
  112. Use["PDERS"] = 1;
  113. Use["PDERSD"] = 0;
  114. Use["PDERSPCA"] = 0;
  115. Use["PDEFoam"] = 1;
  116. Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
  117. Use["KNN"] = 1; // k-nearest neighbour method
  118. //
  119. // --- Linear Discriminant Analysis
  120. Use["LD"] = 1; // Linear Discriminant identical to Fisher
  121. Use["Fisher"] = 0;
  122. Use["FisherG"] = 0;
  123. Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
  124. Use["HMatrix"] = 0;
  125. //
  126. // --- Function Discriminant analysis
  127. Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
  128. Use["FDA_SA"] = 0;
  129. Use["FDA_MC"] = 0;
  130. Use["FDA_MT"] = 0;
  131. Use["FDA_GAMT"] = 0;
  132. Use["FDA_MCMT"] = 0;
  133. //
  134. // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
  135. Use["MLP"] = 0; // Recommended ANN
  136. Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
  137. Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
  138. Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
  139. Use["TMlpANN"] = 0; // ROOT's own ANN
  140. //
  141. // --- Support Vector Machine
  142. Use["SVM"] = 1;
  143. //
  144. // --- Boosted Decision Trees
  145. Use["BDT"] = 1; // uses Adaptive Boost
  146. Use["BDTG"] = 0; // uses Gradient Boost
  147. Use["BDTB"] = 0; // uses Bagging
  148. Use["BDTD"] = 0; // decorrelation + Adaptive Boost
  149. //
  150. // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
  151. Use["RuleFit"] = 1;
  152. // ---------------------------------------------------------------
  153. Use["Plugin"] = 0;
  154. Use["Category"] = 0;
  155. Use["SVM_Gauss"] = 0;
  156. Use["SVM_Poly"] = 0;
  157. Use["SVM_Lin"] = 0;
  158. coutInfo("==> Start TMVAClassificationApplication");
  159. // Select methods (don't look at this code - not of interest)
  160. if (TMVAmethod != "") {
  161. for (std::map<string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
  162. std::vector<TString> mlist = gTools().SplitString( TMVAmethod, ',' );
  163. for (UInt_t i=0; i<mlist.size(); i++) {
  164. string regMethod(mlist[i]);
  165. if (Use.find(regMethod) == Use.end()) {
  166. coutERROR("Method \"" + regMethod + "\" not known in TMVA under this name. Choose among the following:");
  167. for (std::map<string,int>::iterator it = Use.begin(); it != Use.end(); it++) coutERROR(it->first);
  168. return 0;
  169. }
  170. Use[regMethod] = 1;
  171. }
  172. }
  173. //Not completely sure what happens when one puts several methods as input to regMethod
  174. // --------------------------------------------------------------------------------------------------
  175. // --- Create the Reader object
  176. TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
  177. TMVA::Reader *readerDD = new TMVA::Reader( "!Color:!Silent" );
  178. TMVA::Reader *readerLL = new TMVA::Reader( "!Color:!Silent" );
  179. // Create a set of variables and declare them to the reader
  180. // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
  181. MVA_variables * InputVariables = NULL;
  182. int Nvar = 0;
  183. int Nbranch = 0;
  184. MVA_variables * InputVariablesDD = NULL;
  185. MVA_variables * InputVariablesLL = NULL;
  186. int NvarDD = 0;
  187. int NvarLL = 0;
  188. int NbranchDD = 0;
  189. int NbranchLL = 0;
  190. if(Kst2Kspiplus && SplitDDandLL){
  191. InputVariablesDD = new MVA_variables("DD");
  192. InputVariablesDD->print();
  193. NvarDD = InputVariablesDD->NumberOfVariables();
  194. NbranchDD = InputVariablesDD->NumberOfBranches();
  195. InputVariablesLL = new MVA_variables("LL");
  196. InputVariablesLL->print();
  197. NvarLL = InputVariablesLL->NumberOfVariables();
  198. NbranchLL = InputVariablesLL->NumberOfBranches();
  199. }
  200. else {
  201. InputVariables = new MVA_variables("");
  202. InputVariables->print();
  203. Nvar = InputVariables->NumberOfVariables();
  204. Nbranch = InputVariables->NumberOfBranches();
  205. }
  206. Float_t f_varDD[NvarDD];
  207. Float_t f_varLL[NvarLL];
  208. Float_t f_var[Nvar];
  209. //Feeding the reader(s)
  210. if(Kst2Kspiplus && SplitDDandLL){
  211. //DDtracks
  212. for (int i = 0; i < NvarDD; i++)
  213. readerDD->AddVariable( InputToReader(InputVariablesDD->AllVariables[i].ReaderName), &f_varDD[i]) ;
  214. //LLtracks
  215. for (int i = 0; i < NvarLL; i++)
  216. readerLL->AddVariable( InputToReader(InputVariablesLL->AllVariables[i].ReaderName), &f_varLL[i]) ;
  217. }
  218. else
  219. for (int i = 0; i < Nvar; i++){
  220. reader->AddVariable( InputToReader(InputVariables->AllVariables[i].ReaderName), &f_var[i]);
  221. }
  222. // --- Book the MVA methods
  223. TString dir = "weights/";
  224. TString prefix, prefixDD, prefixLL;
  225. if(Kst2Kspiplus){
  226. if(SplitYears){
  227. prefix = Form("%i_B2Kstmumu_%s", year, TheDecay.c_str());
  228. prefixLL = Form("%i_B2Kstmumu_%s_LL", year, TheDecay.c_str());
  229. prefixDD = Form("%i_B2Kstmumu_%s_DD", year, TheDecay.c_str());
  230. }
  231. else {
  232. prefix = Form("B2Kstmumu_%s_Run%i", TheDecay.c_str(), Run);
  233. prefixLL = Form("B2Kstmumu_%s_LL_Run%i", TheDecay.c_str(), Run);
  234. prefixDD = Form("B2Kstmumu_%s_DD_Run%i", TheDecay.c_str(), Run);
  235. }
  236. }
  237. else{ //pi0 channel(s)
  238. if(SplitYears){
  239. prefix = Form("%i_B2Kstmumu_%s%s", year, TheDecay.c_str(),SplitInQ2? (UseLowQ2Range ? "_lowQ2" : "_highQ2") : "");
  240. }
  241. else{
  242. prefix = Form("B2Kstmumu_%s_Run%i%s", TheDecay.c_str(), Run,SplitInQ2? (UseLowQ2Range ? "_lowQ2" : "_highQ2") : "");
  243. }
  244. }
  245. // Book method(s) and XML config file from BDT training and testing
  246. for (std::map<string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
  247. if (it->second) {
  248. TString methodName = TString(it->first) + TString(" method");
  249. TString weightfile, weightfileDD, weightfileLL;
  250. if(Kst2Kspiplus){
  251. if(SplitDDandLL){
  252. weightfileDD = dir + prefixDD + TString("_") + TString(it->first) + TString(".weights.xml");
  253. weightfileLL = dir + prefixLL + TString("_") + TString(it->first) + TString(".weights.xml");
  254. readerDD->BookMVA( methodName, weightfileDD );
  255. readerLL->BookMVA( methodName, weightfileLL );
  256. }
  257. else { //combined DD and LL tracks for Kshort
  258. weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
  259. reader->BookMVA( methodName, weightfile );
  260. }
  261. }
  262. else{ //pi0 channel(s)
  263. weightfile = dir + prefix + TString("_") + TString(it->first)+ TString(".weights.xml");
  264. reader->BookMVA( methodName, weightfile );
  265. }
  266. }
  267. }
  268. // Book output histograms
  269. UInt_t nbin = 100;
  270. TH1D *histBdt(0), *histBdtG(0), *histBdtD(0), *histMlp(0);
  271. if (Use["BDT"]) histBdt = new TH1D( "MVA_BDT", "MVA_BDT", nbin, -0.8, 0.8 );
  272. if (Use["BDTD"]) histBdtD = new TH1D( "MVA_BDTD", "MVA_BDTD", nbin, -0.8, 0.8 );
  273. if (Use["BDTG"]) histBdtG = new TH1D( "MVA_BDTG", "MVA_BDTG", nbin, -1.0, 1.0 );
  274. if (Use["MLP"]) histMlp = new TH1D( "MVA_MLP", "MVA_MLP", nbin, -1.0, 1.0 );
  275. // Prepare input tree (this must be replaced by your data source)
  276. // in this example, there is a toy tree with signal and one with background events
  277. // we'll later on use only the "signal" events for the test in this example.
  278. //If years are not split, take one Run at the time and set 'year-variable' accordingly to the Run
  279. Int_t RunYear = 0;
  280. if(!SplitYears) RunYear = year;
  281. TChain* theTree = new TChain(MC ? "DecayTreeTruthMatched" : "DecayTree");
  282. string inputPath = GetBDTinputFile(RunYear,MC,ReferenceChannel,PHSP,false);
  283. if(SplitYears){
  284. if(SplitDDandLL && Kst2Kspiplus){ //@David please check this, not sure if this is correct!
  285. theTree->Add(GetBDTinputFile(year,MC,ReferenceChannel,PHSP,true).c_str());
  286. theTree->Add(GetBDTinputFile(year,MC,ReferenceChannel,PHSP,false).c_str());
  287. }
  288. else theTree->Add(GetBDTinputFile(year,MC,ReferenceChannel,PHSP,false).c_str());
  289. coutDebug("Reading " + GetBDTinputFile(year,MC,ReferenceChannel,PHSP,false));
  290. }
  291. else{
  292. if(SplitDDandLL && Kst2Kspiplus){
  293. theTree->Add(GetBDTinputFile(RunYear,MC,ReferenceChannel,PHSP,true).c_str());
  294. theTree->Add(GetBDTinputFile(RunYear,MC,ReferenceChannel,PHSP,false).c_str());
  295. }
  296. else{
  297. if (IncSample) replace(inputPath,"KplusPi0Resolved","Inclusive");
  298. theTree->Add(inputPath.c_str());
  299. coutDebug("Reading " + inputPath);
  300. }
  301. }
  302. if(theTree == nullptr){
  303. coutERROR("File(s) not found! Exit!");
  304. return 0;
  305. }
  306. coutInfo("--- TMVAClassificationApp : Using input tree: " + string(theTree->GetName()));
  307. // --- Event loop
  308. // Prepare the event tree
  309. // - here the variable names have to corresponds to your tree
  310. // - you can use the same variables as above which is slightly faster,
  311. // but of course you can use different ones and copy the values inside the event loop
  312. //
  313. coutInfo("--- Select signal sample");
  314. //Cout what variables are used in the TTree
  315. if (Kst2Kspiplus && SplitDDandLL){
  316. coutDebug("List of used branches: ");
  317. std::cout << std::endl;
  318. coutDebug("DD TRACKS: ");
  319. for (vector<string>::iterator tracksIter1 = InputVariablesDD->AllBranches.begin(); tracksIter1 !=InputVariablesDD->AllBranches.end();++tracksIter1)
  320. coutDebug("DD " + (*tracksIter1));
  321. std::cout << std::endl;
  322. coutDebug(" LL TRACKS: ");
  323. for (vector<string>::iterator tracksIter1 = InputVariablesLL->AllBranches.begin(); tracksIter1 !=InputVariablesLL->AllBranches.end();++tracksIter1)
  324. coutDebug("LL " + (*tracksIter1) );
  325. std::cout << std::endl;
  326. }
  327. else{
  328. coutDebug("List of used branches: ");
  329. coutDebug("\t\t Note that Branches != reader!");
  330. coutDebug("\t\t Number of branches: " + to_string(Nbranch));
  331. for (vector<string>::iterator tracksIter1 = InputVariables->AllBranches.begin(); tracksIter1 !=InputVariables->AllBranches.end();++tracksIter1) coutDebug( (*tracksIter1) );
  332. }
  333. Int_t EventIsLL = 0;
  334. Double_t d_var[Nbranch];
  335. Double_t d_varDD[NbranchDD];
  336. Double_t d_varLL[NbranchLL];
  337. //because we have one tree for DD and LL tracks and only one of the two Double_t array items can be addressed to one branch, we copy the doubles afterwards manually.
  338. //the following pair is created from checking the string names of the variables in both track vectors and saves the link (i.e. item 0 in DD copied to item 3 in LL: b_VarLL[3] = b_VarDD[0] )
  339. std::vector< std::pair<int, int> > DoubleUsedVariablesInDDandLL;
  340. DoubleUsedVariablesInDDandLL.clear();
  341. int counter = 0; //Bruteforce, but oh well
  342. //link double variables to branches
  343. if (Kst2Kspiplus && SplitDDandLL){
  344. //DD tracks
  345. counter = 0;
  346. for (vector<string>::iterator tracksIterDD = InputVariablesDD->AllBranches.begin(); tracksIterDD !=InputVariablesDD->AllBranches.end();++tracksIterDD){
  347. theTree->SetBranchAddress((*tracksIterDD).c_str(), &d_varDD[counter]) ;
  348. counter ++;
  349. }
  350. //LL tracks
  351. counter = 0;
  352. for (vector<string>::iterator tracksIterLL = InputVariablesLL->AllBranches.begin(); tracksIterLL !=InputVariablesLL->AllBranches.end();++tracksIterLL){
  353. //Since some variables are used in DD and LL track BDT training, we have to find out which variableID in DD tracks this is, and later copy the value from b_varDD to b_varLL manually
  354. bool VariableAlreadyAssignInDDtracks = false;
  355. int varIDinDDtracks = 0;
  356. for (vector<string>::iterator tracksIterDD = InputVariablesDD->AllBranches.begin(); tracksIterDD !=InputVariablesDD->AllBranches.end();++tracksIterDD){
  357. if(strcmp((*tracksIterDD).c_str(), (*tracksIterLL).c_str()) == 0){
  358. VariableAlreadyAssignInDDtracks = true;
  359. break;
  360. }
  361. varIDinDDtracks++;
  362. }
  363. if(VariableAlreadyAssignInDDtracks){ //when a matching variable is found in DD and LL tracks, the link between DD ID and LL ID is stored in the vector
  364. auto DD2LLlink = std::make_pair(varIDinDDtracks, counter);
  365. DoubleUsedVariablesInDDandLL.push_back(DD2LLlink);
  366. }
  367. else { //if the LL variable is not used for DD BDT training, the double is linked 'normally' to the tree branch
  368. theTree->SetBranchAddress((*tracksIterLL).c_str(), &d_varLL[counter]) ;
  369. }
  370. counter ++;
  371. }
  372. theTree->SetBranchAddress("KshortDecayInVeLo", &EventIsLL);
  373. //finished DD and LL tracks
  374. coutInfo("Found " + to_string(DoubleUsedVariablesInDDandLL.size()) + " variables that are used by DD and LL track BDT training:");
  375. for (vector< std::pair<int, int> >::iterator it = DoubleUsedVariablesInDDandLL.begin(); it !=DoubleUsedVariablesInDDandLL.end();++it){
  376. coutInfo(" LL #" + to_string(it->second) + " <-- DD #" + to_string(it->first) );
  377. }
  378. }
  379. else{
  380. counter = 0;
  381. if(Kst2Kspiplus)theTree->SetBranchAddress("KshortDecayInVeLo", &EventIsLL);
  382. for (vector<string>::iterator tracksIter1 = InputVariables->AllBranches.begin(); tracksIter1 !=InputVariables->AllBranches.end();++tracksIter1){
  383. theTree->SetBranchAddress((*tracksIter1).c_str(), &d_var[counter]) ;
  384. counter ++;
  385. }
  386. }
  387. string outputPath = GetBDToutputFile(year,Run,MC,ReferenceChannel,PHSP,KshortDecayInVelo,UseLowQ2Range,false);
  388. if (IncSample) replace(outputPath,"KplusPi0Resolved","Inclusive");
  389. TFile * output = new TFile (outputPath.c_str(),"RECREATE");
  390. coutInfo("--- New file created! ");
  391. theTree->SetBranchStatus("*",1);
  392. coutInfo("Copy the data Tree... ");
  393. TTree* newTree = theTree->CopyTree("");
  394. coutInfo("Finished!");
  395. coutInfo("Add branch to cloned tree!");
  396. //Add new TBranch into TTree for BDT response value for each event
  397. Double_t BDTresponse;
  398. TBranch* Bra_BDT = newTree->Branch(TMVAmethod+"response", &BDTresponse, TMVAmethod+"response/D");
  399. if(Bra_BDT == NULL){
  400. coutERROR("New branch was not created successfully!");
  401. return 0;
  402. }
  403. //Add new TBranch into TTree for random seperation of the dataset into two subsets:
  404. Int_t RandomSubSet;
  405. TBranch* Bra_RandomSubSet = newTree->Branch("RandomSubSet", &RandomSubSet, "RandomSubSet/I");
  406. TRandom3 * RandomSubSetter = new TRandom3(13);
  407. //get the formulas for each variable (i.e. log(x) or log(1-x) according to the settings in the txt file
  408. std::vector<TFormula>formula;
  409. std::vector<TFormula>formulaDD;
  410. std::vector<TFormula>formulaLL;
  411. if(Kst2Kspiplus && SplitDDandLL){
  412. for (int i = 0; i < NvarDD; i++){
  413. TFormula form = TFormula(Form("formDD_%s", InputVariablesDD->AllBranches[i].c_str()),InputVariablesDD->AllVariables[i].Formula.c_str());
  414. formulaDD.push_back(form);
  415. }
  416. for (int i = 0; i < NvarLL; i++){
  417. TFormula form = TFormula(Form("formLL_%s", InputVariablesLL->AllBranches[i].c_str()),InputVariablesLL->AllVariables[i].Formula.c_str());
  418. formulaLL.push_back(form);
  419. }
  420. }
  421. else{
  422. for (int i = 0; i < Nvar; i++){
  423. TFormula form = TFormula(Form("form_%s", InputVariables->AllBranches[i].c_str()),InputVariables->AllVariables[i].Formula.c_str());
  424. formula.push_back(form);
  425. }
  426. }
  427. coutInfo("--- Processing: " + to_string(theTree->GetEntries()) + " events");
  428. assert(theTree->GetEntries() == newTree->GetEntries());
  429. TStopwatch sw;
  430. sw.Start();
  431. for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
  432. if (ievt%1000 == 0) coutInfo("--- ... Processing event: " + to_string(ievt));
  433. newTree->GetEntry(ievt);
  434. if (Kst2Kspiplus && SplitDDandLL){
  435. if(!EventIsLL){ //DD tracks
  436. for (int i = 0, k = 0; i < NvarDD; i++){
  437. //formula = new TFormula("form",InputVariablesDD->AllVariables[i].Formula.c_str());
  438. switch(InputVariablesDD->AllVariables[i].NoBr) {
  439. case 1 : f_varDD[i] = (Float_t)formulaDD.at(i).Eval(d_varDD[k]);
  440. k++;
  441. break;
  442. case 2 : f_varDD[i] = (Float_t)formulaDD.at(i).Eval(d_varDD[k],d_varDD[k+1]);
  443. k+=2;
  444. break;
  445. case 3 : f_varDD[i] = (Float_t)formulaDD.at(i).Eval(d_varDD[k],d_varDD[k+1],d_varDD[k+2]);
  446. k+=3;
  447. break;
  448. }
  449. if (TMath::IsNaN(f_varDD[i])){
  450. coutDebug("Formula " + string(formulaDD.at(i).GetTitle()) + "\tVariable " + to_string(i) + " \tvalue:" + to_string(f_varDD[i]) + "\tBranch value " +to_string(d_varDD[k-1]));
  451. f_varDD[i] = 0.0; //quick temporary hack for ln(1-DIRA) problem
  452. }
  453. }
  454. }
  455. else{ //LL tracks
  456. //copy the variables, which are used by both DD and LL tracks from the DD tracks array to LL tracks array!
  457. for (vector< std::pair<int, int> >::iterator DD2LLlinkIter = DoubleUsedVariablesInDDandLL.begin(); DD2LLlinkIter != DoubleUsedVariablesInDDandLL.end();++DD2LLlinkIter){
  458. d_varLL[DD2LLlinkIter->second] = d_varDD[DD2LLlinkIter->first];
  459. }
  460. for (int i = 0, k = 0; i < NvarLL; i++){
  461. switch(InputVariablesLL->AllVariables[i].NoBr) {
  462. case 1 : f_varLL[i] = (Float_t)formulaLL.at(i).Eval(d_varLL[k]);
  463. k++;
  464. break;
  465. case 2 : f_varLL[i] = (Float_t)formulaLL.at(i).Eval(d_varLL[k],d_varLL[k+1]);
  466. k+=2;
  467. break;
  468. case 3 : f_varLL[i] = (Float_t)formulaLL.at(i).Eval(d_varLL[k],d_varLL[k+1],d_varLL[k+2]);
  469. k+=3;
  470. break;
  471. }
  472. if (TMath::IsNaN(f_varLL[i])){
  473. cout << "[DEBUG]\t\tFormula "<<formulaLL.at(i).GetTitle() << "\tVariable " << i << " \tvalue:" << f_varLL[i] << "\tBranch value "<<d_varLL[k-1] << endl;
  474. f_varLL[i] = 0.0; //quick temporary hack for ln(1-DIRA) problem
  475. }
  476. }
  477. }
  478. }
  479. else{ // no seperation between DD and LL. ALL pi_zero tuples
  480. for (int i = 0, k = 0; i < Nvar; i++){
  481. switch(InputVariables->AllVariables[i].NoBr) {
  482. case 1 : f_var[i] = (Float_t)formula.at(i).Eval(d_var[k]);
  483. k++;
  484. break;
  485. case 2 : f_var[i] = (Float_t)formula.at(i).Eval(d_var[k],d_var[k+1]);
  486. k+=2;
  487. break;
  488. case 3 : f_var[i] = (Float_t)formula.at(i).Eval(d_var[k],d_var[k+1],d_var[k+2]);
  489. k+=3;
  490. break;
  491. }
  492. if (TMath::IsNaN(f_var[i])){
  493. cout << "[DEBUG]\t\tFormula "<<formula.at(i).GetTitle() << "\tVariable " << i << " \tvalue:" << f_var[i] << "\tBranch value "<<d_var[k-1] << endl;
  494. f_var[i] = -24.0; //quick temporary hack for ln(1-DIRA) problem
  495. }
  496. }
  497. }
  498. //evaluate the BDT response for this event with the according reader:
  499. if (Use["BDT"]){
  500. if(Kst2Kspiplus){
  501. if(SplitDDandLL){
  502. if(EventIsLL)BDTresponse = (Double_t)readerLL->EvaluateMVA( "BDT method");
  503. else BDTresponse = (Double_t)readerDD->EvaluateMVA( "BDT method");
  504. }
  505. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDT method");
  506. }
  507. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDT method");
  508. histBdt ->Fill(BDTresponse );
  509. }
  510. else if (Use["BDTD"]){
  511. if(Kst2Kspiplus){
  512. if(SplitDDandLL){
  513. if(EventIsLL)BDTresponse = (Double_t)readerLL->EvaluateMVA( "BDTD method");
  514. else BDTresponse = (Double_t)readerDD->EvaluateMVA( "BDTD method");
  515. }
  516. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDTD method");
  517. }
  518. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDTD method");
  519. histBdtD ->Fill(BDTresponse);
  520. }
  521. else if (Use["BDTG"]) {
  522. if(Kst2Kspiplus){
  523. if(SplitDDandLL){
  524. if(EventIsLL)BDTresponse = (Double_t)readerLL->EvaluateMVA( "BDTG method");
  525. else BDTresponse = (Double_t)readerDD->EvaluateMVA( "BDTG method");
  526. }
  527. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDTG method");
  528. }
  529. else BDTresponse = (Double_t)reader->EvaluateMVA( "BDTG method");
  530. histBdtG ->Fill(BDTresponse);
  531. }
  532. else if (Use["MLP"]) {
  533. if(Kst2Kspiplus){
  534. if(SplitDDandLL){
  535. if(EventIsLL)BDTresponse = (Double_t)readerLL->EvaluateMVA( "MLP method");
  536. else BDTresponse = (Double_t)readerDD->EvaluateMVA( "MLP method");
  537. }
  538. else BDTresponse = (Double_t)reader->EvaluateMVA( "MLP method");
  539. }
  540. else BDTresponse = (Double_t)reader->EvaluateMVA( "MLP method");
  541. histMlp ->Fill(BDTresponse);
  542. }
  543. //separate into two random subsets:
  544. if(RandomSubSetter->Rndm() > 0.5)RandomSubSet = 1;
  545. else RandomSubSet = -1;
  546. //save value to TBranch:
  547. Bra_BDT->Fill();
  548. Bra_RandomSubSet->Fill();
  549. }
  550. // Get elapsed time
  551. sw.Stop();
  552. coutInfo("--- End of event loop: "); sw.Print();
  553. output->cd();
  554. //save updated tree
  555. newTree->Write("",TObject::kWriteDelete);
  556. coutInfo("--- Tree saved! ");
  557. output->Close();
  558. delete readerDD;
  559. delete readerLL;
  560. delete reader;
  561. coutInfo("==> TMVAClassificationApplication is done!");
  562. return 1;
  563. }
  564. Int_t TMVAClassAppData(Int_t Run){
  565. std::vector<string> years = yearsData(Run);
  566. bool UseLowQ2Range = false;
  567. for (auto year: years){
  568. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),false,false, false, false, UseLowQ2Range)) return 0;
  569. }
  570. if (KshortChannel){
  571. for (auto year: years)
  572. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),true,false, false, false, UseLowQ2Range)) return 0;
  573. }
  574. return 1;
  575. }
  576. Int_t TMVAClassAppMC(Int_t Run){
  577. std::vector<string> years = yearsMC(false, false,Run);
  578. bool UseLowQ2Range = false;
  579. for (auto year: years){
  580. cout << year << endl;
  581. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),false,true, false, false, UseLowQ2Range)) return 0;
  582. }
  583. if (KshortChannel){
  584. for (auto year: years)
  585. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),true,true, false, false, UseLowQ2Range)) return 0;
  586. }
  587. return 1;
  588. }
  589. Int_t TMVAClassAppRefMC(Int_t Run){
  590. std::vector<string> years = yearsMC(true, false,Run);
  591. bool UseLowQ2Range = false;
  592. for (auto year: years){
  593. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),false,true, true, false, UseLowQ2Range)) return 0;
  594. }
  595. if (KshortChannel){
  596. for (auto year: years)
  597. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),true,true, true, false, UseLowQ2Range)) return 0;
  598. }
  599. return 1;
  600. }
  601. Int_t TMVAClassAppPHSP(Int_t Run){
  602. std::vector<string> years = yearsMC(false,true,Run);
  603. bool UseLowQ2Range = false;
  604. for (auto year: years){
  605. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),false,true, false, true, UseLowQ2Range)) return 0;
  606. }
  607. if (KshortChannel){
  608. for (auto year: years)
  609. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),true,true, false, true, UseLowQ2Range)) return 0;
  610. }
  611. return 1;
  612. }
  613. Int_t TMVAClassAppInc(Int_t Run){
  614. std::vector<string> years = {"2011","2012","2016"};
  615. bool UseLowQ2Range = false;
  616. for (auto year: years){
  617. if (!TMVAClassificationApplication_b2kmm(Run,stoi(year),false,true, false, false, UseLowQ2Range,true)) return 0;
  618. }
  619. return 1;
  620. }
  621. Int_t TMVAClassAppAll(Int_t Run = 1){
  622. if (!TMVAClassAppData(Run)) return 0;
  623. if (!TMVAClassAppMC(Run)) return 0;
  624. if (!TMVAClassAppRefMC(Run)) return 0;
  625. if (!TMVAClassAppPHSP(Run)) return 0;
  626. return 1;
  627. }