star-travex
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
TMVAClassification.C
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 /**********************************************************************************
3  * Project : TMVA - a ROOT-integrated toolkit for multivariate data analysis *
4  * Package : TMVA *
5  * Root Macro: TMVAClassification *
6  * *
7  * This macro provides examples for the training and testing of the *
8  * TMVA classifiers. *
9  * *
10  * As input data is used a toy-MC sample consisting of four Gaussian-distributed *
11  * and linearly correlated input variables. *
12  * *
13  * The methods to be used can be switched on and off by means of booleans, or *
14  * via the prompt command, for example: *
15  * *
16  * root -l ./TMVAClassification.C\(\"Fisher,Likelihood\"\) *
17  * *
18  * (note that the backslashes are mandatory) *
19  * If no method given, a default set of classifiers is used. *
20  * *
21  * The output file "TMVA.root" can be analysed with the use of dedicated *
22  * macros (simply say: root -l <macro.C>), which can be conveniently *
23  * invoked through a GUI that will appear at the end of the run of this macro. *
24  * Launch the GUI via the command: *
25  * *
26  * root -l ./TMVAGui.C *
27  * *
28  **********************************************************************************/
29 
30 #include <cstdlib>
31 #include <iostream>
32 #include <map>
33 #include <string>
34 
35 #include "TChain.h"
36 #include "TFile.h"
37 #include "TTree.h"
38 #include "TString.h"
39 #include "TObjString.h"
40 #include "TSystem.h"
41 #include "TROOT.h"
42 
43 #if not defined(__CINT__) || defined(__MAKECINT__)
44 // needs to be included when makecint runs (ACLIC)
45 #include "TMVA/Factory.h"
46 #include "TMVA/Tools.h"
47 #endif
48 
49 void TMVAClassification( TString myMethodList = "" )
50 {
51  // The explicit loading of the shared libTMVA is done in TMVAlogon.C, defined in .rootrc
52  // if you use your private .rootrc, or run from a different directory, please copy the
53  // corresponding lines from .rootrc
54 
55  // methods to be processed can be given as an argument; use format:
56  //
57  // mylinux~> root -l TMVAClassification.C\(\"myMethod1,myMethod2,myMethod3\"\)
58  //
59  // if you like to use a method via the plugin mechanism, we recommend using
60  //
61  // mylinux~> root -l TMVAClassification.C\(\"P_myMethod\"\)
62  // (an example is given for using the BDT as plugin (see below),
63  // but of course the real application is when you write your own
64  // method based)
65 
66  //---------------------------------------------------------------
67  // This loads the library
68  TMVA::Tools::Instance();
69 
70  // to get access to the GUI and all tmva macros
71  TString tmva_dir(TString(gRootDir) + "/tmva");
72  if(gSystem->Getenv("TMVASYS"))
73  tmva_dir = TString(gSystem->Getenv("TMVASYS"));
74  gROOT->SetMacroPath(tmva_dir + "/test/:" + gROOT->GetMacroPath() );
75  gROOT->ProcessLine(".L TMVAGui.C");
76 
77  // Default MVA methods to be trained + tested
78  std::map<std::string,int> Use;
79 
80  // --- Cut optimisation
81  Use["Cuts"] = 1;
82  Use["CutsD"] = 1;
83  Use["CutsPCA"] = 0;
84  Use["CutsGA"] = 0;
85  Use["CutsSA"] = 0;
86  //
87  // --- 1-dimensional likelihood ("naive Bayes estimator")
88  Use["Likelihood"] = 1;
89  Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
90  Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
91  Use["LikelihoodKDE"] = 0;
92  Use["LikelihoodMIX"] = 0;
93  //
94  // --- Mutidimensional likelihood and Nearest-Neighbour methods
95  Use["PDERS"] = 1;
96  Use["PDERSD"] = 0;
97  Use["PDERSPCA"] = 0;
98  Use["PDEFoam"] = 1;
99  Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
100  Use["KNN"] = 1; // k-nearest neighbour method
101  //
102  // --- Linear Discriminant Analysis
103  Use["LD"] = 1; // Linear Discriminant identical to Fisher
104  Use["Fisher"] = 0;
105  Use["FisherG"] = 0;
106  Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
107  Use["HMatrix"] = 0;
108  //
109  // --- Function Discriminant analysis
110  Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
111  Use["FDA_SA"] = 0;
112  Use["FDA_MC"] = 0;
113  Use["FDA_MT"] = 0;
114  Use["FDA_GAMT"] = 0;
115  Use["FDA_MCMT"] = 0;
116  //
117  // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
118  Use["MLP"] = 0; // Recommended ANN
119  Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
120  Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
121  Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
122  Use["TMlpANN"] = 0; // ROOT's own ANN
123  //
124  // --- Support Vector Machine
125  Use["SVM"] = 1;
126  //
127  // --- Boosted Decision Trees
128  Use["BDT"] = 1; // uses Adaptive Boost
129  Use["BDTG"] = 0; // uses Gradient Boost
130  Use["BDTB"] = 0; // uses Bagging
131  Use["BDTD"] = 0; // decorrelation + Adaptive Boost
132  Use["BDTF"] = 0; // allow usage of fisher discriminant for node splitting
133  //
134  // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
135  Use["RuleFit"] = 1;
136  // ---------------------------------------------------------------
137 
138  std::cout << std::endl;
139  std::cout << "==> Start TMVAClassification" << std::endl;
140 
141  // Select methods (don't look at this code - not of interest)
142  if (myMethodList != "") {
143  for (std::map<std::string, int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
144 
145  std::vector<TString> mlist = TMVA::gTools().SplitString( myMethodList, ',' );
146 
147  for (UInt_t i = 0; i < mlist.size(); i++) {
148  std::string regMethod(mlist[i]);
149 
150  if (Use.find(regMethod) == Use.end()) {
151  std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
152 
153  for (std::map<std::string, int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
154 
155  std::cout << std::endl;
156  return;
157  }
158 
159  Use[regMethod] = 1;
160  }
161  }
162 
163  // --------------------------------------------------------------------------------------------------
164 
165  // --- Here the preparation phase begins
166 
167  // Create a ROOT output file where TMVA will store ntuples, histograms, etc.
168  TString outfileName( "TMVA_vertex.root" ); //Amilkar
169  TFile *outputFile = TFile::Open( outfileName, "RECREATE" );
170 
171  // Create the factory object. Later you can choose the methods
172  // whose performance you'd like to investigate. The factory is
173  // the only TMVA object you have to interact with
174  //
175  // The first argument is the base of the name of all the
176  // weightfiles in the directory weight/
177  //
178  // The second argument is the output file for the training results
179  // All TMVA output can be suppressed by removing the "!" (not) in
180  // front of the "Silent" argument in the option string
181  TMVA::Factory *factory = new TMVA::Factory( "TMVAClassification", outputFile,
182  "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=Classification" );
183 
184  // If you wish to modify default settings
185  // (please check "src/Config.h" to see all available global options)
186  // (TMVA::gConfig().GetVariablePlotting()).fTimesRMS = 8.0;
187  // (TMVA::gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory";
188 
189  // Define the input variables that shall be used for the MVA training
190  // note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
191  // [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
192  //factory->AddVariable( "myvar1 := var1+var2", 'F' );
193  //factory->AddVariable( "myvar2 := var1-var2", "Expression 2", "", 'F' );
194  //factory->AddVariable( "var3", "Variable 3", "units", 'F' );
195  //factory->AddVariable( "var4", "Variable 4", "units", 'F' );
196 
197  //factory->AddVariable("beam",'I'); //-->This variable is constant NEED TO CHECK
198  factory->AddVariable("postx",'I');
199  factory->AddVariable("prompt",'I');
200  factory->AddVariable("cross",'I');
201  factory->AddVariable("tof",'I');
202  factory->AddVariable("notof",'I');
203  factory->AddVariable("BEMC",'I');
204  factory->AddVariable("noBEMC",'I');
205  factory->AddVariable("EEMC",'I');
206  factory->AddVariable("noEEMC",'I');
207  //factory->AddVariable("chi2",'F'); //-->Cosntant NEED TO CHECK
208 
209  // You can add so-called "Spectator variables", which are not used in the MVA training,
210  // but will appear in the final "TestTree" produced by TMVA. This TestTree will contain the
211  // input variables, the response values of all trained MVAs, and the spectator variables
212  //factory->AddSpectator( "spec1 := var1*2", "Spectator 1", "units", 'F' );
213  //factory->AddSpectator( "spec2 := var1*3", "Spectator 2", "units", 'F' );
214  factory->AddSpectator( "primZ", 'F' );
215  factory->AddSpectator( "event", 'I' );
216 
217  // Read training and test data
218  // (it is also possible to use ASCII format as input -> see TMVA Users Guide)
219  TString fname = "./outvertex.root"; //Amilkar
220 
221  //if (gSystem->AccessPathName( fname )) // file does not exist in local directory
222  // gSystem->Exec("wget http://root.cern.ch/files/tmva_class_example.root");
223 
224  TFile *input = TFile::Open( fname );
225 
226  std::cout << "--- TMVAClassification : Using input file: " << input->GetName() << std::endl;
227 
228  // --- Register the training and test trees
229 
230  TTree *signal = (TTree*)input->Get("primaryvtx"); //Amilkar
231  TTree *background = (TTree*)input->Get("primaryvtx"); //Amilkar
232 
233  // global event weights per tree (see below for setting event-wise weights)
234  Double_t signalWeight = 1.0;
235  Double_t backgroundWeight = 1.0;
236 
237  // You can add an arbitrary number of signal or background trees
238  factory->AddSignalTree ( signal, signalWeight );
239  factory->AddBackgroundTree( background, backgroundWeight );
240 
241  // To give different trees for training and testing, do as follows:
242  // factory->AddSignalTree( signalTrainingTree, signalTrainWeight, "Training" );
243  // factory->AddSignalTree( signalTestTree, signalTestWeight, "Test" );
244 
245  // Use the following code instead of the above two or four lines to add signal and background
246  // training and test events "by hand"
247  // NOTE that in this case one should not give expressions (such as "var1+var2") in the input
248  // variable definition, but simply compute the expression before adding the event
249  //
250  // // --- begin ----------------------------------------------------------
251  // std::vector<Double_t> vars( 4 ); // vector has size of number of input variables
252  // Float_t treevars[4], weight;
253  //
254  // // Signal
255  // for (UInt_t ivar=0; ivar<4; ivar++) signal->SetBranchAddress( Form( "var%i", ivar+1 ), &(treevars[ivar]) );
256  // for (UInt_t i=0; i<signal->GetEntries(); i++) {
257  // signal->GetEntry(i);
258  // for (UInt_t ivar=0; ivar<4; ivar++) vars[ivar] = treevars[ivar];
259  // // add training and test events; here: first half is training, second is testing
260  // // note that the weight can also be event-wise
261  // if (i < signal->GetEntries()/2.0) factory->AddSignalTrainingEvent( vars, signalWeight );
262  // else factory->AddSignalTestEvent ( vars, signalWeight );
263  // }
264  //
265  // // Background (has event weights)
266  // background->SetBranchAddress( "weight", &weight );
267  // for (UInt_t ivar=0; ivar<4; ivar++) background->SetBranchAddress( Form( "var%i", ivar+1 ), &(treevars[ivar]) );
268  // for (UInt_t i=0; i<background->GetEntries(); i++) {
269  // background->GetEntry(i);
270  // for (UInt_t ivar=0; ivar<4; ivar++) vars[ivar] = treevars[ivar];
271  // // add training and test events; here: first half is training, second is testing
272  // // note that the weight can also be event-wise
273  // if (i < background->GetEntries()/2) factory->AddBackgroundTrainingEvent( vars, backgroundWeight*weight );
274  // else factory->AddBackgroundTestEvent ( vars, backgroundWeight*weight );
275  // }
276  // --- end ------------------------------------------------------------
277  //
278  // --- end of tree registration
279 
280  // Set individual event weights (the variables must exist in the original TTree)
281  // for signal : factory->SetSignalWeightExpression ("weight1*weight2");
282  // for background: factory->SetBackgroundWeightExpression("weight1*weight2");
283  //factory->SetBackgroundWeightExpression( "weight" );
284 
285  // Apply additional cuts on the signal and background samples (can be different)
286  TCut mycuts = ""; //
287  TCut mycutb = "index!=0"; // Amilkar: the background is when the max mult is not the primary vtx
288 
289  // Tell the factory how to use the training and testing events
290  //
291  // If no numbers of events are given, half of the events in the tree are used
292  // for training, and the other half for testing:
293  // factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
294  // To also specify the number of testing events, use:
295  // factory->PrepareTrainingAndTestTree( mycut,
296  // "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
297  factory->PrepareTrainingAndTestTree( mycuts, mycutb,
298  "nTrain_Signal=0:nTrain_Background=0:SplitMode=Random:NormMode=NumEvents:!V" );
299 
300  // ---- Book MVA methods
301  //
302  // Please lookup the various method configuration options in the corresponding cxx files, eg:
303  // src/MethoCuts.cxx, etc, or here: http://tmva.sourceforge.net/optionRef.html
304  // it is possible to preset ranges in the option string in which the cut optimisation should be done:
305  // "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
306 
307  // Cut optimisation
308  if (Use["Cuts"])
309  factory->BookMethod( TMVA::Types::kCuts, "Cuts",
310  "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
311 
312  if (Use["CutsD"])
313  factory->BookMethod( TMVA::Types::kCuts, "CutsD",
314  "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
315 
316  if (Use["CutsPCA"])
317  factory->BookMethod( TMVA::Types::kCuts, "CutsPCA",
318  "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=PCA" );
319 
320  if (Use["CutsGA"])
321  factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
322  "H:!V:FitMethod=GA:CutRangeMin[0]=-10:CutRangeMax[0]=10:VarProp[1]=FMax:EffSel:Steps=30:Cycles=3:PopSize=400:SC_steps=10:SC_rate=5:SC_factor=0.95" );
323 
324  if (Use["CutsSA"])
325  factory->BookMethod( TMVA::Types::kCuts, "CutsSA",
326  "!H:!V:FitMethod=SA:EffSel:MaxCalls=150000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
327 
328  // Likelihood ("naive Bayes estimator")
329  if (Use["Likelihood"])
330  factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
331  "H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmoothBkg[1]=10:NSmooth=1:NAvEvtPerBin=50" );
332 
333  // Decorrelated likelihood
334  if (Use["LikelihoodD"])
335  factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD",
336  "!H:!V:TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
337 
338  // PCA-transformed likelihood
339  if (Use["LikelihoodPCA"])
340  factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA",
341  "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=20:NSmoothBkg[0]=20:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
342 
343  // Use a kernel density estimator to approximate the PDFs
344  if (Use["LikelihoodKDE"])
345  factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodKDE",
346  "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Adaptive:KDEFineFactor=0.3:KDEborder=None:NAvEvtPerBin=50" );
347 
348  // Use a variable-dependent mix of splines and kernel density estimator
349  if (Use["LikelihoodMIX"])
350  factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodMIX",
351  "!H:!V:!TransformOutput:PDFInterpolSig[0]=KDE:PDFInterpolBkg[0]=KDE:PDFInterpolSig[1]=KDE:PDFInterpolBkg[1]=KDE:PDFInterpolSig[2]=Spline2:PDFInterpolBkg[2]=Spline2:PDFInterpolSig[3]=Spline2:PDFInterpolBkg[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" );
352 
353  // Test the multi-dimensional probability density estimator
354  // here are the options strings for the MinMax and RMS methods, respectively:
355  // "!H:!V:VolumeRangeMode=MinMax:DeltaFrac=0.2:KernelEstimator=Gauss:GaussSigma=0.3" );
356  // "!H:!V:VolumeRangeMode=RMS:DeltaFrac=3:KernelEstimator=Gauss:GaussSigma=0.3" );
357  if (Use["PDERS"])
358  factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
359  "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600" );
360 
361  if (Use["PDERSD"])
362  factory->BookMethod( TMVA::Types::kPDERS, "PDERSD",
363  "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=Decorrelate" );
364 
365  if (Use["PDERSPCA"])
366  factory->BookMethod( TMVA::Types::kPDERS, "PDERSPCA",
367  "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:VarTransform=PCA" );
368 
369  // Multi-dimensional likelihood estimator using self-adapting phase-space binning
370  if (Use["PDEFoam"])
371  factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam",
372  "!H:!V:SigBgSeparate=F:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" );
373 
374  if (Use["PDEFoamBoost"])
375  factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoamBoost",
376  "!H:!V:Boost_Num=30:Boost_Transform=linear:SigBgSeparate=F:MaxDepth=4:UseYesNoCell=T:DTLogic=MisClassificationError:FillFoamWithOrigWeights=F:TailCut=0:nActiveCells=500:nBin=20:Nmin=400:Kernel=None:Compress=T" );
377 
378  // K-Nearest Neighbour classifier (KNN)
379  if (Use["KNN"])
380  factory->BookMethod( TMVA::Types::kKNN, "KNN",
381  "H:nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
382 
383  // H-Matrix (chi2-squared) method
384  if (Use["HMatrix"])
385  factory->BookMethod( TMVA::Types::kHMatrix, "HMatrix", "!H:!V:VarTransform=None" );
386 
387  // Linear discriminant (same as Fisher discriminant)
388  if (Use["LD"])
389  factory->BookMethod( TMVA::Types::kLD, "LD", "H:!V:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
390 
391  // Fisher discriminant (same as LD)
392  if (Use["Fisher"])
393  factory->BookMethod( TMVA::Types::kFisher, "Fisher", "H:!V:Fisher:VarTransform=None:CreateMVAPdfs:PDFInterpolMVAPdf=Spline2:NbinsMVAPdf=50:NsmoothMVAPdf=10" );
394 
395  // Fisher with Gauss-transformed input variables
396  if (Use["FisherG"])
397  factory->BookMethod( TMVA::Types::kFisher, "FisherG", "H:!V:VarTransform=Gauss" );
398 
399  // Composite classifier: ensemble (tree) of boosted Fisher classifiers
400  if (Use["BoostedFisher"])
401  factory->BookMethod( TMVA::Types::kFisher, "BoostedFisher",
402  "H:!V:Boost_Num=20:Boost_Transform=log:Boost_Type=AdaBoost:Boost_AdaBoostBeta=0.2:!Boost_DetailedMonitoring" );
403 
404  // Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit (or GA or SA)
405  if (Use["FDA_MC"])
406  factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
407  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" );
408 
409  if (Use["FDA_GA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options])
410  factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
411  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=300:Cycles=3:Steps=20:Trim=True:SaveBestGen=1" );
412 
413  if (Use["FDA_SA"]) // can also use Simulated Annealing (SA) algorithm (see Cuts_SA options])
414  factory->BookMethod( TMVA::Types::kFDA, "FDA_SA",
415  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=15000:KernelTemp=IncAdaptive:InitialTemp=1e+6:MinTemp=1e-6:Eps=1e-10:UseDefaultScale" );
416 
417  if (Use["FDA_MT"])
418  factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
419  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
420 
421  if (Use["FDA_GAMT"])
422  factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
423  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
424 
425  if (Use["FDA_MCMT"])
426  factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
427  "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
428 
429  // TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
430  if (Use["MLP"])
431  factory->BookMethod( TMVA::Types::kMLP, "MLP", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:!UseRegulator" );
432 
433  if (Use["MLPBFGS"])
434  factory->BookMethod( TMVA::Types::kMLP, "MLPBFGS", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:!UseRegulator" );
435 
436  if (Use["MLPBNN"])
437  factory->BookMethod( TMVA::Types::kMLP, "MLPBNN", "H:!V:NeuronType=tanh:VarTransform=N:NCycles=600:HiddenLayers=N+5:TestRate=5:TrainingMethod=BFGS:UseRegulator" ); // BFGS training with bayesian regulators
438 
439  // CF(Clermont-Ferrand)ANN
440  if (Use["CFMlpANN"])
441  factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=2000:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
442 
443  // Tmlp(Root)ANN
444  if (Use["TMlpANN"])
445  factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N:LearningMethod=BFGS:ValidationFraction=0.3" ); // n_cycles:#nodes:#nodes:...
446 
447  // Support Vector Machine
448  if (Use["SVM"])
449  factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
450 
451  // Boosted Decision Trees
452  if (Use["BDTG"]) // Gradient Boost
453  factory->BookMethod( TMVA::Types::kBDT, "BDTG",
454  "!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedGrad:GradBaggingFraction=0.5:nCuts=20:NNodesMax=5" );
455 
456  if (Use["BDT"]) // Adaptive Boost
457  factory->BookMethod( TMVA::Types::kBDT, "BDT",
458  "!H:!V:NTrees=850:nEventsMin=150:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
459 
460 
461  if (Use["BDTB"]) // Bagging
462  factory->BookMethod( TMVA::Types::kBDT, "BDTB",
463  "!H:!V:NTrees=400:BoostType=Bagging:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
464 
465  if (Use["BDTD"]) // Decorrelation + Adaptive Boost
466  factory->BookMethod( TMVA::Types::kBDT, "BDTD",
467  "!H:!V:NTrees=400:nEventsMin=400:MaxDepth=3:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning:VarTransform=Decorrelate" );
468 
469  if (Use["BDTF"]) // Allow Using Fisher discriminant in node splitting for (strong) linearly correlated variables
470  factory->BookMethod( TMVA::Types::kBDT, "BDTMitFisher",
471  "!H:!V:NTrees=50:nEventsMin=150:UseFisherCuts:MaxDepth=3:BoostType=AdaBoost:AdaBoostBeta=0.5:SeparationType=GiniIndex:nCuts=20:PruneMethod=NoPruning" );
472 
473  // RuleFit -- TMVA implementation of Friedman's method
474  if (Use["RuleFit"])
475  factory->BookMethod( TMVA::Types::kRuleFit, "RuleFit",
476  "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=10000:GDErrScale=1.02" );
477 
478  // For an example of the category classifier usage, see: TMVAClassificationCategory
479 
480  // --------------------------------------------------------------------------------------------------
481 
482  // ---- Now you can optimize the setting (configuration) of the MVAs using the set of training events
483 
484  // factory->OptimizeAllMethods("SigEffAt001","Scan");
485  // factory->OptimizeAllMethods("ROCIntegral","GA");
486 
487  // --------------------------------------------------------------------------------------------------
488 
489  // ---- Now you can tell the factory to train, test, and evaluate the MVAs
490 
491  // Train MVAs using the set of training events
492  factory->TrainAllMethods();
493 
494  // ---- Evaluate all MVAs using the set of test events
495  factory->TestAllMethods();
496 
497  // ----- Evaluate and compare performance of all configured MVAs
498  factory->EvaluateAllMethods();
499 
500  // --------------------------------------------------------------
501 
502  // Save the output
503  outputFile->Close();
504 
505  std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
506  std::cout << "==> TMVAClassification is done!" << std::endl;
507 
508  delete factory;
509 
510  // Launch the GUI for the root macros
511  if (!gROOT->IsBatch()) TMVAGui( outfileName );
512 }
void TMVAClassification(TString myMethodList="")