// Class: ReadPion_Upstream_TMVA // Automatically generated by MethodBase::MakeClass // /* configuration options ===================================================== #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- Method : MLP::Pion_Upstream_TMVA TMVA Release : 4.2.0 [262656] ROOT Release : 6.02/01 [393729] Creator : jonesc Date : Sun Feb 14 17:25:50 2016 Host : Linux lcgapp-slc6-physical1.cern.ch 2.6.32-431.29.2.el6.x86_64 #1 SMP Wed Sep 10 11:13:12 CEST 2014 x86_64 x86_64 x86_64 GNU/Linux Dir : /var/pcfst/r03/lhcb/jonesc/ANNPID/results/MC12/TrainMixture/TrainPhysTks-EvalPhysTks-ReweightRICH2/GhostAccFrac1.0/ProtonAccFrac1.0/KaonAccFrac1.0/TMVA-Run2-NoTkLikCDVelodEdx/MLP/Norm/ScaleF1.2/BP/NCycles750/CE/tanh/CVTest15/CVImp1e-16/NotUseRegulator/Pion/Upstream Training events: 2700000 Analysis type : [Classification] #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- # Set by User: NCycles: "750" [Number of training cycles] HiddenLayers: "20" [Specification of hidden layer architecture] NeuronType: "tanh" [Neuron activation function type] EstimatorType: "CE" [MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood] V: "True" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] VarTransform: "Norm" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] H: "True" [Print method-specific help message] TrainingMethod: "BP" [Train with Back-Propagation (BP), BFGS Algorithm (BFGS), or Genetic Algorithm (GA - slower and worse)] EpochMonitoring: "True" [Provide epoch-wise monitoring plots according to TestRate (caution: causes big ROOT output file!)] ConvergenceImprove: "1.000000e-16" [Minimum improvement which counts as improvement (<0 means automatic convergence check is turned off)] ConvergenceTests: "15" [Number of steps (without improvement) required for convergence (<0 means automatic convergence check is turned off)] UseRegulator: "False" [Use regulator to avoid over-training] # Default: RandomSeed: "1" [Random seed for initial synapse weights (0 means unique seed for each run; default value '1')] NeuronInputType: "sum" [Neuron input function type] VerbosityLevel: "Verbose" [Verbosity level] CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] LearningRate: "2.000000e-02" [ANN learning rate parameter] DecayRate: "1.000000e-02" [Decay rate for learning parameter] TestRate: "10" [Test for overtraining performed at each #th epochs] Sampling: "1.000000e+00" [Only 'Sampling' (randomly selected) events are trained each epoch] SamplingEpoch: "1.000000e+00" [Sampling is used for the first 'SamplingEpoch' epochs, afterwards, all events are taken for training] SamplingImportance: "1.000000e+00" [ The sampling weights of events in epochs which successful (worse estimator than before) are multiplied with SamplingImportance, else they are divided.] SamplingTraining: "True" [The training sample is sampled] SamplingTesting: "False" [The testing sample is sampled] ResetStep: "50" [How often BFGS should reset history] Tau: "3.000000e+00" [LineSearch "size step"] BPMode: "sequential" [Back-propagation learning mode: sequential or batch] BatchSize: "-1" [Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events] UpdateLimit: "10000" [Maximum times of regulator update] CalculateErrors: "False" [Calculates inverse Hessian matrix at the end of the training to be able to calculate the uncertainties of an MVA value] WeightRange: "1.000000e+00" [Take the events for the estimator calculations from small deviations from the desired value to large deviations only over the weight range] ## #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- NVar 17 TrackP TrackP TrackP TrackP 'F' [147.869995117,4760267.5] TrackPt TrackPt TrackPt TrackPt 'F' [0.360198408365,978077.5] TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof 'F' [0.0029748051893,2.99983906746] TrackNumDof TrackNumDof TrackNumDof TrackNumDof 'I' [4,28] TrackGhostProbability TrackGhostProbability TrackGhostProbability TrackGhostProbability 'F' [0.0391823016107,1] TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 'F' [8.64220680796e-07,53.9077644348] TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF 'I' [1,24] RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas 'I' [0,1] RichAbovePiThres RichAbovePiThres RichAbovePiThres RichAbovePiThres 'I' [0,1] RichAboveKaThres RichAboveKaThres RichAboveKaThres RichAboveKaThres 'I' [0,1] RichDLLe RichDLLe RichDLLe RichDLLe 'F' [-999,118.707099915] RichDLLmu RichDLLmu RichDLLmu RichDLLmu 'F' [-999,81.0063018799] RichDLLk RichDLLk RichDLLk RichDLLk 'F' [-999,82.1451034546] RichDLLp RichDLLp RichDLLp RichDLLp 'F' [-999,92.269203186] RichDLLbt RichDLLbt RichDLLbt RichDLLbt 'F' [-999,50.5774993896] InAccBrem InAccBrem InAccBrem InAccBrem 'I' [0,1] BremPIDe BremPIDe BremPIDe BremPIDe 'F' [-999,4.79148387909] NSpec 0 ============================================================================ */ #include #include #include #include #ifndef IClassifierReader__def #define IClassifierReader__def class IClassifierReader { public: // constructor IClassifierReader() : fStatusIsClean( true ) {} virtual ~IClassifierReader() {} // return classifier response virtual double GetMvaValue( const std::vector& inputValues ) const = 0; // returns classifier status bool IsStatusClean() const { return fStatusIsClean; } protected: bool fStatusIsClean; }; #endif class ReadPion_Upstream_TMVA : public IClassifierReader { public: // constructor ReadPion_Upstream_TMVA( std::vector& theInputVars ) : IClassifierReader(), fClassName( "ReadPion_Upstream_TMVA" ), fNvars( 17 ), fIsNormalised( false ) { // the training input variables const char* inputVars[] = { "TrackP", "TrackPt", "TrackChi2PerDof", "TrackNumDof", "TrackGhostProbability", "TrackFitVeloChi2", "TrackFitVeloNDoF", "RichUsedR1Gas", "RichAbovePiThres", "RichAboveKaThres", "RichDLLe", "RichDLLmu", "RichDLLk", "RichDLLp", "RichDLLbt", "InAccBrem", "BremPIDe" }; // sanity checks if (theInputVars.size() <= 0) { std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; fStatusIsClean = false; } if (theInputVars.size() != fNvars) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " << theInputVars.size() << " != " << fNvars << std::endl; fStatusIsClean = false; } // validate input variables for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { if (theInputVars[ivar] != inputVars[ivar]) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; fStatusIsClean = false; } } // initialize min and max vectors (for normalisation) fVmin[0] = -1; fVmax[0] = 0.99999988079071; fVmin[1] = -1; fVmax[1] = 1; fVmin[2] = -1; fVmax[2] = 1; fVmin[3] = -1; fVmax[3] = 1; fVmin[4] = -1; fVmax[4] = 1; fVmin[5] = -1; fVmax[5] = 1; fVmin[6] = -1; fVmax[6] = 1; fVmin[7] = -1; fVmax[7] = 1; fVmin[8] = -1; fVmax[8] = 1; fVmin[9] = -1; fVmax[9] = 1; fVmin[10] = -1; fVmax[10] = 1; fVmin[11] = -1; fVmax[11] = 1; fVmin[12] = -1; fVmax[12] = 1; fVmin[13] = -1; fVmax[13] = 1; fVmin[14] = -1; fVmax[14] = 1; fVmin[15] = -1; fVmax[15] = 1; fVmin[16] = -1; fVmax[16] = 1; // initialize input variable types fType[0] = 'F'; fType[1] = 'F'; fType[2] = 'F'; fType[3] = 'I'; fType[4] = 'F'; fType[5] = 'F'; fType[6] = 'I'; fType[7] = 'I'; fType[8] = 'I'; fType[9] = 'I'; fType[10] = 'F'; fType[11] = 'F'; fType[12] = 'F'; fType[13] = 'F'; fType[14] = 'F'; fType[15] = 'I'; fType[16] = 'F'; // initialize constants Initialize(); // initialize transformation InitTransform(); } // destructor virtual ~ReadPion_Upstream_TMVA() { Clear(); // method-specific } // the classifier response // "inputValues" is a vector of input values in the same order as the // variables given to the constructor double GetMvaValue( const std::vector& inputValues ) const; private: // method-specific destructor void Clear(); // input variable transformation double fMin_1[3][17]; double fMax_1[3][17]; void InitTransform_1(); void Transform_1( std::vector & iv, int sigOrBgd ) const; void InitTransform(); void Transform( std::vector & iv, int sigOrBgd ) const; // common member variables const char* fClassName; const size_t fNvars; size_t GetNvar() const { return fNvars; } char GetType( int ivar ) const { return fType[ivar]; } // normalisation of input variables const bool fIsNormalised; bool IsNormalised() const { return fIsNormalised; } double fVmin[17]; double fVmax[17]; double NormVariable( double x, double xmin, double xmax ) const { // normalise to output range: [-1, 1] return 2*(x - xmin)/(xmax - xmin) - 1.0; } // type of input variable: 'F' or 'I' char fType[17]; // initialize internal variables void Initialize(); double GetMvaValue__( const std::vector& inputValues ) const; // private members (method specific) double ActivationFnc(double x) const; double OutputActivationFnc(double x) const; int fLayers; int fLayerSize[3]; double fWeightMatrix0to1[21][18]; // weight matrix from layer 0 to 1 double fWeightMatrix1to2[1][21]; // weight matrix from layer 1 to 2 double * fWeights[3]; }; inline void ReadPion_Upstream_TMVA::Initialize() { // build network structure fLayers = 3; fLayerSize[0] = 18; fWeights[0] = new double[18]; fLayerSize[1] = 21; fWeights[1] = new double[21]; fLayerSize[2] = 1; fWeights[2] = new double[1]; // weight matrix from layer 0 to 1 fWeightMatrix0to1[0][0] = 1.19522321481095; fWeightMatrix0to1[1][0] = 33.1291231329986; fWeightMatrix0to1[2][0] = 11.9142878844307; fWeightMatrix0to1[3][0] = -18.7869699682091; fWeightMatrix0to1[4][0] = -14.0692381794851; fWeightMatrix0to1[5][0] = -15.4359771976434; fWeightMatrix0to1[6][0] = 2.18533255553274; fWeightMatrix0to1[7][0] = -30.2892326366578; fWeightMatrix0to1[8][0] = -5.37800248547297; fWeightMatrix0to1[9][0] = 18.6015373608627; fWeightMatrix0to1[10][0] = -2.72858518435716; fWeightMatrix0to1[11][0] = -1.44138213413462; fWeightMatrix0to1[12][0] = -0.830132333356163; fWeightMatrix0to1[13][0] = 0.485666375282076; fWeightMatrix0to1[14][0] = -1.08973316751044; fWeightMatrix0to1[15][0] = 22.4174605429878; fWeightMatrix0to1[16][0] = 21.3925937726726; fWeightMatrix0to1[17][0] = -8.73411159526142; fWeightMatrix0to1[18][0] = 2.42161879360648; fWeightMatrix0to1[19][0] = 0.631842902479481; fWeightMatrix0to1[0][1] = -0.745361660698243; fWeightMatrix0to1[1][1] = 3.15457186672961; fWeightMatrix0to1[2][1] = -0.682958397553365; fWeightMatrix0to1[3][1] = 5.42131940477684; fWeightMatrix0to1[4][1] = 19.1843080013028; fWeightMatrix0to1[5][1] = -5.80388000815377; fWeightMatrix0to1[6][1] = 0.214602235285382; fWeightMatrix0to1[7][1] = -0.559677268782305; fWeightMatrix0to1[8][1] = -4.13372452606158; fWeightMatrix0to1[9][1] = -4.06850196596274; fWeightMatrix0to1[10][1] = 0.0430675564867252; fWeightMatrix0to1[11][1] = -0.105224479673813; fWeightMatrix0to1[12][1] = 2.87200439765819; fWeightMatrix0to1[13][1] = 4.50039943013433; fWeightMatrix0to1[14][1] = -0.25017769126898; fWeightMatrix0to1[15][1] = 2.55793472447466; fWeightMatrix0to1[16][1] = 6.01988718727328; fWeightMatrix0to1[17][1] = 5.14287730159419; fWeightMatrix0to1[18][1] = -1.4600000054078; fWeightMatrix0to1[19][1] = 0.742250596814007; fWeightMatrix0to1[0][2] = 1.20292266268416; fWeightMatrix0to1[1][2] = 0.756467213712138; fWeightMatrix0to1[2][2] = 0.386222563953186; fWeightMatrix0to1[3][2] = 0.576605058308277; fWeightMatrix0to1[4][2] = -0.815339491871464; fWeightMatrix0to1[5][2] = -0.265557998768179; fWeightMatrix0to1[6][2] = -0.374476461468037; fWeightMatrix0to1[7][2] = 0.180283390274642; fWeightMatrix0to1[8][2] = -2.7098444943179; fWeightMatrix0to1[9][2] = 0.0387844134613424; fWeightMatrix0to1[10][2] = -4.61970540020059; fWeightMatrix0to1[11][2] = 2.16364562428373; fWeightMatrix0to1[12][2] = -3.01000735587625; fWeightMatrix0to1[13][2] = 0.17539822088863; fWeightMatrix0to1[14][2] = 0.826566570736088; fWeightMatrix0to1[15][2] = -0.191989544057493; fWeightMatrix0to1[16][2] = -2.42010909448405; fWeightMatrix0to1[17][2] = -1.11354714446669; fWeightMatrix0to1[18][2] = -1.24068612328278; fWeightMatrix0to1[19][2] = 0.6589701329795; fWeightMatrix0to1[0][3] = -1.38131866920807; fWeightMatrix0to1[1][3] = -2.19929844569512; fWeightMatrix0to1[2][3] = -2.16919358285854; fWeightMatrix0to1[3][3] = -10.7485490847247; fWeightMatrix0to1[4][3] = 7.34678798110468; fWeightMatrix0to1[5][3] = 21.4198294331657; fWeightMatrix0to1[6][3] = -13.3664040360987; fWeightMatrix0to1[7][3] = -6.32827286547752; fWeightMatrix0to1[8][3] = 17.9785511172957; fWeightMatrix0to1[9][3] = -14.8779148976063; fWeightMatrix0to1[10][3] = 0.374509527012416; fWeightMatrix0to1[11][3] = -8.3316155343209; fWeightMatrix0to1[12][3] = -25.0412840941044; fWeightMatrix0to1[13][3] = 55.8166131253793; fWeightMatrix0to1[14][3] = 4.28131033974749; fWeightMatrix0to1[15][3] = -3.3539952833802; fWeightMatrix0to1[16][3] = 6.3152942565228; fWeightMatrix0to1[17][3] = -14.8471875767049; fWeightMatrix0to1[18][3] = 0.538798693422777; fWeightMatrix0to1[19][3] = 1.53454224998456; fWeightMatrix0to1[0][4] = 0.0778590752461197; fWeightMatrix0to1[1][4] = 0.555974587372372; fWeightMatrix0to1[2][4] = -0.561082370786488; fWeightMatrix0to1[3][4] = -0.549563298714941; fWeightMatrix0to1[4][4] = 0.641768435298387; fWeightMatrix0to1[5][4] = 0.203331614284152; fWeightMatrix0to1[6][4] = -0.461074668174937; fWeightMatrix0to1[7][4] = -0.637469630452333; fWeightMatrix0to1[8][4] = 2.82106445981481; fWeightMatrix0to1[9][4] = -0.585329007912093; fWeightMatrix0to1[10][4] = 4.83019820589945; fWeightMatrix0to1[11][4] = -0.249952870589247; fWeightMatrix0to1[12][4] = 6.04830332965979; fWeightMatrix0to1[13][4] = -1.26314168959913; fWeightMatrix0to1[14][4] = -0.761230039913081; fWeightMatrix0to1[15][4] = 0.399202894222202; fWeightMatrix0to1[16][4] = -1.24503000027599; fWeightMatrix0to1[17][4] = 0.127897856952868; fWeightMatrix0to1[18][4] = 0.40181136972088; fWeightMatrix0to1[19][4] = -0.386712388306123; fWeightMatrix0to1[0][5] = -1.595197359438; fWeightMatrix0to1[1][5] = -1.40833537995421; fWeightMatrix0to1[2][5] = -0.669596946402229; fWeightMatrix0to1[3][5] = -1.47677818916921; fWeightMatrix0to1[4][5] = -1.06229744955009; fWeightMatrix0to1[5][5] = 1.18852770162264; fWeightMatrix0to1[6][5] = -1.52936659510644; fWeightMatrix0to1[7][5] = -9.22853047204137; fWeightMatrix0to1[8][5] = 1.73384830771215; fWeightMatrix0to1[9][5] = 0.479152906524313; fWeightMatrix0to1[10][5] = 4.70706044106084; fWeightMatrix0to1[11][5] = -6.36100282502386; fWeightMatrix0to1[12][5] = -1.88484441216295; fWeightMatrix0to1[13][5] = 0.32889752781009; fWeightMatrix0to1[14][5] = -2.31757236046121; fWeightMatrix0to1[15][5] = 25.0282694216549; fWeightMatrix0to1[16][5] = 3.13499922013842; fWeightMatrix0to1[17][5] = 2.02091155858168; fWeightMatrix0to1[18][5] = 3.02442513948971; fWeightMatrix0to1[19][5] = -0.388558106877577; fWeightMatrix0to1[0][6] = 1.20653022788212; fWeightMatrix0to1[1][6] = 2.58905839425725; fWeightMatrix0to1[2][6] = 1.91302095681877; fWeightMatrix0to1[3][6] = 12.0042863790641; fWeightMatrix0to1[4][6] = -0.669744440206253; fWeightMatrix0to1[5][6] = -21.72878782147; fWeightMatrix0to1[6][6] = 17.6969373863882; fWeightMatrix0to1[7][6] = 7.97178301163163; fWeightMatrix0to1[8][6] = -10.1766013969697; fWeightMatrix0to1[9][6] = 14.1732485741062; fWeightMatrix0to1[10][6] = -0.213319730697232; fWeightMatrix0to1[11][6] = 9.69020762962552; fWeightMatrix0to1[12][6] = 28.02445764642; fWeightMatrix0to1[13][6] = -53.943681291614; fWeightMatrix0to1[14][6] = -2.55762211207137; fWeightMatrix0to1[15][6] = 1.24328117257402; fWeightMatrix0to1[16][6] = -7.50052793076397; fWeightMatrix0to1[17][6] = 12.8507031809937; fWeightMatrix0to1[18][6] = -1.28176963143177; fWeightMatrix0to1[19][6] = 2.99663980088921; fWeightMatrix0to1[0][7] = -0.0716079183291211; fWeightMatrix0to1[1][7] = -6.52190586519782; fWeightMatrix0to1[2][7] = 22.7062749079329; fWeightMatrix0to1[3][7] = 1.19094465282266; fWeightMatrix0to1[4][7] = -0.698059628193825; fWeightMatrix0to1[5][7] = 7.49545292377349; fWeightMatrix0to1[6][7] = 1.52060381815987; fWeightMatrix0to1[7][7] = 5.93035563188097; fWeightMatrix0to1[8][7] = -11.1517853225838; fWeightMatrix0to1[9][7] = -20.2790553459807; fWeightMatrix0to1[10][7] = -1.00103612615017; fWeightMatrix0to1[11][7] = -0.542662398989847; fWeightMatrix0to1[12][7] = 5.3392745427458; fWeightMatrix0to1[13][7] = 4.73535477463991; fWeightMatrix0to1[14][7] = 0.522133938156385; fWeightMatrix0to1[15][7] = -4.89613307968856; fWeightMatrix0to1[16][7] = -5.2826998912794; fWeightMatrix0to1[17][7] = 8.6439590033349; fWeightMatrix0to1[18][7] = 5.99851690655632; fWeightMatrix0to1[19][7] = -1.19108118021745; fWeightMatrix0to1[0][8] = 0.109207028750975; fWeightMatrix0to1[1][8] = -0.771960386648688; fWeightMatrix0to1[2][8] = 24.6410939385207; fWeightMatrix0to1[3][8] = -0.0186891122811878; fWeightMatrix0to1[4][8] = -5.55277252274444; fWeightMatrix0to1[5][8] = 0.591090707756403; fWeightMatrix0to1[6][8] = -7.36622750364681; fWeightMatrix0to1[7][8] = -0.0175297052941777; fWeightMatrix0to1[8][8] = -11.5356226800624; fWeightMatrix0to1[9][8] = -24.0715247159023; fWeightMatrix0to1[10][8] = 2.99488230715308; fWeightMatrix0to1[11][8] = -0.0697197123583702; fWeightMatrix0to1[12][8] = -0.00133424774637596; fWeightMatrix0to1[13][8] = 0.474863075321265; fWeightMatrix0to1[14][8] = 5.71938878193919; fWeightMatrix0to1[15][8] = -6.64713767601069; fWeightMatrix0to1[16][8] = -5.86294390779486; fWeightMatrix0to1[17][8] = -0.141898602946873; fWeightMatrix0to1[18][8] = 5.52826149607174; fWeightMatrix0to1[19][8] = -1.92695668688798; fWeightMatrix0to1[0][9] = -0.0918841739615373; fWeightMatrix0to1[1][9] = -0.0742896599206494; fWeightMatrix0to1[2][9] = 25.190134470566; fWeightMatrix0to1[3][9] = -0.0330324395669655; fWeightMatrix0to1[4][9] = 0.257776631321141; fWeightMatrix0to1[5][9] = -0.393631615370544; fWeightMatrix0to1[6][9] = -1.55320153170355; fWeightMatrix0to1[7][9] = 25.7763530398092; fWeightMatrix0to1[8][9] = 0.287985584542267; fWeightMatrix0to1[9][9] = 0.0560441538905073; fWeightMatrix0to1[10][9] = 0.148742314236878; fWeightMatrix0to1[11][9] = -0.20836210361327; fWeightMatrix0to1[12][9] = -0.0740189787373472; fWeightMatrix0to1[13][9] = -0.173971675117249; fWeightMatrix0to1[14][9] = 0.536723056908991; fWeightMatrix0to1[15][9] = -5.24815441559987; fWeightMatrix0to1[16][9] = -8.51572145138326; fWeightMatrix0to1[17][9] = 0.266545491219686; fWeightMatrix0to1[18][9] = 0.241771586595355; fWeightMatrix0to1[19][9] = -1.60161578358076; fWeightMatrix0to1[0][10] = -3.00632785402349; fWeightMatrix0to1[1][10] = 60.1462141148625; fWeightMatrix0to1[2][10] = -4.58021176090524; fWeightMatrix0to1[3][10] = -9.3298264736268; fWeightMatrix0to1[4][10] = -39.1202027771056; fWeightMatrix0to1[5][10] = 14.5373797223058; fWeightMatrix0to1[6][10] = -1.04717259549811; fWeightMatrix0to1[7][10] = 4.29149335747607; fWeightMatrix0to1[8][10] = 11.536477141016; fWeightMatrix0to1[9][10] = 16.8039574497688; fWeightMatrix0to1[10][10] = -0.143452076664263; fWeightMatrix0to1[11][10] = 2.71795188749132; fWeightMatrix0to1[12][10] = -12.8071059912595; fWeightMatrix0to1[13][10] = -20.765736833543; fWeightMatrix0to1[14][10] = -3.94852433475339; fWeightMatrix0to1[15][10] = -3.77313317995551; fWeightMatrix0to1[16][10] = -5.46079387240468; fWeightMatrix0to1[17][10] = -33.0606319324423; fWeightMatrix0to1[18][10] = -5.02173614282628; fWeightMatrix0to1[19][10] = -1.46759311990646; fWeightMatrix0to1[0][11] = -2.03337752759284; fWeightMatrix0to1[1][11] = -4.04680921358347; fWeightMatrix0to1[2][11] = -24.1135901098227; fWeightMatrix0to1[3][11] = -0.0956046702044451; fWeightMatrix0to1[4][11] = -0.935150934422625; fWeightMatrix0to1[5][11] = 14.7329144515658; fWeightMatrix0to1[6][11] = 1.1266665031139; fWeightMatrix0to1[7][11] = -0.382876629781865; fWeightMatrix0to1[8][11] = -6.89361033050771; fWeightMatrix0to1[9][11] = -19.9679784460046; fWeightMatrix0to1[10][11] = -0.181117898175316; fWeightMatrix0to1[11][11] = 3.31953939110337; fWeightMatrix0to1[12][11] = 2.26302618147807; fWeightMatrix0to1[13][11] = -3.2672522647376; fWeightMatrix0to1[14][11] = 2.87234654607053; fWeightMatrix0to1[15][11] = -6.92673007850601; fWeightMatrix0to1[16][11] = -6.41728736978837; fWeightMatrix0to1[17][11] = 3.17208687037243; fWeightMatrix0to1[18][11] = -0.0129960791701256; fWeightMatrix0to1[19][11] = -1.04247617675131; fWeightMatrix0to1[0][12] = 0.341982130909138; fWeightMatrix0to1[1][12] = 12.0146116996895; fWeightMatrix0to1[2][12] = -19.949653512159; fWeightMatrix0to1[3][12] = 2.49285955417732; fWeightMatrix0to1[4][12] = 10.8565525563465; fWeightMatrix0to1[5][12] = -9.50220915307318; fWeightMatrix0to1[6][12] = -0.726966648591376; fWeightMatrix0to1[7][12] = -5.31969909480679; fWeightMatrix0to1[8][12] = 16.0554241053617; fWeightMatrix0to1[9][12] = 14.5663146134645; fWeightMatrix0to1[10][12] = -0.207192971947514; fWeightMatrix0to1[11][12] = -2.13839769650453; fWeightMatrix0to1[12][12] = 0.181817964798853; fWeightMatrix0to1[13][12] = 3.78469738167117; fWeightMatrix0to1[14][12] = 2.48007970700888; fWeightMatrix0to1[15][12] = -6.0134752938609; fWeightMatrix0to1[16][12] = 21.9902780524819; fWeightMatrix0to1[17][12] = -3.02224127186197; fWeightMatrix0to1[18][12] = -3.65807910683016; fWeightMatrix0to1[19][12] = -1.36867700584012; fWeightMatrix0to1[0][13] = -0.130728625460527; fWeightMatrix0to1[1][13] = -26.1422114272289; fWeightMatrix0to1[2][13] = -14.0765296567639; fWeightMatrix0to1[3][13] = 4.89355874811142; fWeightMatrix0to1[4][13] = 15.6100945485107; fWeightMatrix0to1[5][13] = -7.19148289794022; fWeightMatrix0to1[6][13] = -1.64142266494522; fWeightMatrix0to1[7][13] = -2.41537320296377; fWeightMatrix0to1[8][13] = 1.6165941756099; fWeightMatrix0to1[9][13] = 6.36078481546167; fWeightMatrix0to1[10][13] = -1.02059510143637; fWeightMatrix0to1[11][13] = -2.10109400982091; fWeightMatrix0to1[12][13] = 4.41582648723868; fWeightMatrix0to1[13][13] = 4.27866438640179; fWeightMatrix0to1[14][13] = 0.65226795647031; fWeightMatrix0to1[15][13] = -5.41285604975487; fWeightMatrix0to1[16][13] = -5.068499312188; fWeightMatrix0to1[17][13] = 15.0771507622223; fWeightMatrix0to1[18][13] = 0.627064646375856; fWeightMatrix0to1[19][13] = -0.028091425848016; fWeightMatrix0to1[0][14] = 4.57910268250357; fWeightMatrix0to1[1][14] = 20.1407755150667; fWeightMatrix0to1[2][14] = -16.3870287194615; fWeightMatrix0to1[3][14] = 0.090783040807263; fWeightMatrix0to1[4][14] = 18.0411904369175; fWeightMatrix0to1[5][14] = -19.4846360171183; fWeightMatrix0to1[6][14] = -1.30181453310963; fWeightMatrix0to1[7][14] = -2.53383338590105; fWeightMatrix0to1[8][14] = 17.063780721124; fWeightMatrix0to1[9][14] = 29.7372175216008; fWeightMatrix0to1[10][14] = -0.731339053715266; fWeightMatrix0to1[11][14] = -1.37361672722463; fWeightMatrix0to1[12][14] = -0.551928838224794; fWeightMatrix0to1[13][14] = 10.3673612089826; fWeightMatrix0to1[14][14] = 2.03909611890674; fWeightMatrix0to1[15][14] = -7.00306267390333; fWeightMatrix0to1[16][14] = 45.8621548846068; fWeightMatrix0to1[17][14] = 7.30832661980013; fWeightMatrix0to1[18][14] = -4.08086476400185; fWeightMatrix0to1[19][14] = -1.20143645441829; fWeightMatrix0to1[0][15] = -5.01130373209418; fWeightMatrix0to1[1][15] = -11.7104210223126; fWeightMatrix0to1[2][15] = 12.2082823045145; fWeightMatrix0to1[3][15] = -9.39815636149359; fWeightMatrix0to1[4][15] = 20.7414263547692; fWeightMatrix0to1[5][15] = -6.94695536749658; fWeightMatrix0to1[6][15] = -11.5109954022511; fWeightMatrix0to1[7][15] = -23.9332680449985; fWeightMatrix0to1[8][15] = -10.7314171364162; fWeightMatrix0to1[9][15] = -8.86808004962935; fWeightMatrix0to1[10][15] = -0.712285777581663; fWeightMatrix0to1[11][15] = -18.4078285126948; fWeightMatrix0to1[12][15] = 7.4289485512628; fWeightMatrix0to1[13][15] = 15.0405235395912; fWeightMatrix0to1[14][15] = 3.03590048114197; fWeightMatrix0to1[15][15] = 11.7157077167391; fWeightMatrix0to1[16][15] = -0.326647476914942; fWeightMatrix0to1[17][15] = 12.3334983672731; fWeightMatrix0to1[18][15] = 0.921063731606818; fWeightMatrix0to1[19][15] = 1.49205379285564; fWeightMatrix0to1[0][16] = 4.9241944499403; fWeightMatrix0to1[1][16] = 11.5263800763934; fWeightMatrix0to1[2][16] = -12.1762742943933; fWeightMatrix0to1[3][16] = 9.11841064851058; fWeightMatrix0to1[4][16] = -20.9878005435203; fWeightMatrix0to1[5][16] = 7.49366374474913; fWeightMatrix0to1[6][16] = 8.46529309991388; fWeightMatrix0to1[7][16] = 23.9628590162884; fWeightMatrix0to1[8][16] = -4.63954360626243; fWeightMatrix0to1[9][16] = 31.0630100650127; fWeightMatrix0to1[10][16] = -0.624974695894088; fWeightMatrix0to1[11][16] = 18.0863314798211; fWeightMatrix0to1[12][16] = -7.34098267213639; fWeightMatrix0to1[13][16] = -15.2773245654097; fWeightMatrix0to1[14][16] = -3.27257467694786; fWeightMatrix0to1[15][16] = -11.8637479880412; fWeightMatrix0to1[16][16] = 0.0634517394261028; fWeightMatrix0to1[17][16] = -12.5593907576627; fWeightMatrix0to1[18][16] = -0.38616113546943; fWeightMatrix0to1[19][16] = 0.355956758732832; fWeightMatrix0to1[0][17] = -0.0705106510153285; fWeightMatrix0to1[1][17] = -6.82380829714885; fWeightMatrix0to1[2][17] = 3.9544534302287; fWeightMatrix0to1[3][17] = -15.0419422938497; fWeightMatrix0to1[4][17] = 8.46317206423583; fWeightMatrix0to1[5][17] = -19.8355676698138; fWeightMatrix0to1[6][17] = -2.01530948897503; fWeightMatrix0to1[7][17] = -13.2623655296581; fWeightMatrix0to1[8][17] = -1.9268985379624; fWeightMatrix0to1[9][17] = -4.02903459395172; fWeightMatrix0to1[10][17] = 1.00979804930581; fWeightMatrix0to1[11][17] = -4.4722009909993; fWeightMatrix0to1[12][17] = 1.69802870721275; fWeightMatrix0to1[13][17] = 3.43046549042955; fWeightMatrix0to1[14][17] = -0.0169945039607921; fWeightMatrix0to1[15][17] = 2.98093875202316; fWeightMatrix0to1[16][17] = -1.67270348894481; fWeightMatrix0to1[17][17] = -3.66857904098936; fWeightMatrix0to1[18][17] = 1.3080284098229; fWeightMatrix0to1[19][17] = -0.935755038408761; // weight matrix from layer 1 to 2 fWeightMatrix1to2[0][0] = -1.74645313457612; fWeightMatrix1to2[0][1] = -0.368278758614726; fWeightMatrix1to2[0][2] = 1.54266731181509; fWeightMatrix1to2[0][3] = -2.83592242709938; fWeightMatrix1to2[0][4] = 0.945017776052171; fWeightMatrix1to2[0][5] = -0.925850782875636; fWeightMatrix1to2[0][6] = -0.442155875843389; fWeightMatrix1to2[0][7] = -1.33848376171184; fWeightMatrix1to2[0][8] = -0.142392979234221; fWeightMatrix1to2[0][9] = -0.76642323882604; fWeightMatrix1to2[0][10] = -0.333752106707388; fWeightMatrix1to2[0][11] = -0.831181959848214; fWeightMatrix1to2[0][12] = 0.539408381491453; fWeightMatrix1to2[0][13] = 0.731052633981017; fWeightMatrix1to2[0][14] = 2.24306049848481; fWeightMatrix1to2[0][15] = 1.99443669410846; fWeightMatrix1to2[0][16] = -1.11221848845332; fWeightMatrix1to2[0][17] = 1.19819442451786; fWeightMatrix1to2[0][18] = -1.13708160305987; fWeightMatrix1to2[0][19] = -1.63983566541144; fWeightMatrix1to2[0][20] = -3.62553218498768; } inline double ReadPion_Upstream_TMVA::GetMvaValue__( const std::vector& inputValues ) const { if (inputValues.size() != (unsigned int)fLayerSize[0]-1) { std::cout << "Input vector needs to be of size " << fLayerSize[0]-1 << std::endl; return 0; } for (int l=0; l& inputValues ) const { // classifier response value double retval = 0; // classifier response, sanity check first if (!IsStatusClean()) { std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response" << " because status is dirty" << std::endl; retval = 0; } else { if (IsNormalised()) { // normalise variables std::vector iV; iV.reserve(inputValues.size()); int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] )); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } else { std::vector iV; int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(*varIt); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } } return retval; } //_______________________________________________________________________ inline void ReadPion_Upstream_TMVA::InitTransform_1() { // Normalization transformation, initialisation fMin_1[0][0] = 147.869995117; fMax_1[0][0] = 4760267.5; fMin_1[1][0] = 231.38999939; fMax_1[1][0] = 4747506; fMin_1[2][0] = 147.869995117; fMax_1[2][0] = 4760267.5; fMin_1[0][1] = 0.360198408365; fMax_1[0][1] = 978077.5; fMin_1[1][1] = 4.78933763504; fMax_1[1][1] = 952692.5; fMin_1[2][1] = 0.360198408365; fMax_1[2][1] = 978077.5; fMin_1[0][2] = 0.0029748051893; fMax_1[0][2] = 2.99983143806; fMin_1[1][2] = 0.00438635889441; fMax_1[1][2] = 2.99983906746; fMin_1[2][2] = 0.0029748051893; fMax_1[2][2] = 2.99983906746; fMin_1[0][3] = 4; fMax_1[0][3] = 28; fMin_1[1][3] = 4; fMax_1[1][3] = 27; fMin_1[2][3] = 4; fMax_1[2][3] = 28; fMin_1[0][4] = 0.0391823016107; fMax_1[0][4] = 1; fMin_1[1][4] = 0.0402400195599; fMax_1[1][4] = 1; fMin_1[2][4] = 0.0391823016107; fMax_1[2][4] = 1; fMin_1[0][5] = 6.29194892099e-06; fMax_1[0][5] = 51.1343193054; fMin_1[1][5] = 8.64220680796e-07; fMax_1[1][5] = 53.9077644348; fMin_1[2][5] = 8.64220680796e-07; fMax_1[2][5] = 53.9077644348; fMin_1[0][6] = 1; fMax_1[0][6] = 24; fMin_1[1][6] = 1; fMax_1[1][6] = 23; fMin_1[2][6] = 1; fMax_1[2][6] = 24; fMin_1[0][7] = 0; fMax_1[0][7] = 1; fMin_1[1][7] = 0; fMax_1[1][7] = 1; fMin_1[2][7] = 0; fMax_1[2][7] = 1; fMin_1[0][8] = 0; fMax_1[0][8] = 1; fMin_1[1][8] = 0; fMax_1[1][8] = 1; fMin_1[2][8] = 0; fMax_1[2][8] = 1; fMin_1[0][9] = 0; fMax_1[0][9] = 1; fMin_1[1][9] = 0; fMax_1[1][9] = 1; fMin_1[2][9] = 0; fMax_1[2][9] = 1; fMin_1[0][10] = -999; fMax_1[0][10] = 118.707099915; fMin_1[1][10] = -999; fMax_1[1][10] = 88.657699585; fMin_1[2][10] = -999; fMax_1[2][10] = 118.707099915; fMin_1[0][11] = -999; fMax_1[0][11] = 76.2277984619; fMin_1[1][11] = -999; fMax_1[1][11] = 81.0063018799; fMin_1[2][11] = -999; fMax_1[2][11] = 81.0063018799; fMin_1[0][12] = -999; fMax_1[0][12] = 82.1451034546; fMin_1[1][12] = -999; fMax_1[1][12] = 79.2463989258; fMin_1[2][12] = -999; fMax_1[2][12] = 82.1451034546; fMin_1[0][13] = -999; fMax_1[0][13] = 72.6592025757; fMin_1[1][13] = -999; fMax_1[1][13] = 92.269203186; fMin_1[2][13] = -999; fMax_1[2][13] = 92.269203186; fMin_1[0][14] = -999; fMax_1[0][14] = 48.4067001343; fMin_1[1][14] = -999; fMax_1[1][14] = 50.5774993896; fMin_1[2][14] = -999; fMax_1[2][14] = 50.5774993896; fMin_1[0][15] = 0; fMax_1[0][15] = 1; fMin_1[1][15] = 0; fMax_1[1][15] = 1; fMin_1[2][15] = 0; fMax_1[2][15] = 1; fMin_1[0][16] = -999; fMax_1[0][16] = 4.79148387909; fMin_1[1][16] = -999; fMax_1[1][16] = 4.79148387909; fMin_1[2][16] = -999; fMax_1[2][16] = 4.79148387909; } //_______________________________________________________________________ inline void ReadPion_Upstream_TMVA::Transform_1( std::vector& iv, int cls) const { // Normalization transformation if (cls < 0 || cls > 2) { if (2 > 1 ) cls = 2; else cls = 2; } const int nVar = 17; // get indices of used variables // define the indices of the variables which are transformed by this transformation static std::vector indicesGet; static std::vector indicesPut; if ( indicesGet.empty() ) { indicesGet.reserve(fNvars); indicesGet.push_back( 0); indicesGet.push_back( 1); indicesGet.push_back( 2); indicesGet.push_back( 3); indicesGet.push_back( 4); indicesGet.push_back( 5); indicesGet.push_back( 6); indicesGet.push_back( 7); indicesGet.push_back( 8); indicesGet.push_back( 9); indicesGet.push_back( 10); indicesGet.push_back( 11); indicesGet.push_back( 12); indicesGet.push_back( 13); indicesGet.push_back( 14); indicesGet.push_back( 15); indicesGet.push_back( 16); } if ( indicesPut.empty() ) { indicesPut.reserve(fNvars); indicesPut.push_back( 0); indicesPut.push_back( 1); indicesPut.push_back( 2); indicesPut.push_back( 3); indicesPut.push_back( 4); indicesPut.push_back( 5); indicesPut.push_back( 6); indicesPut.push_back( 7); indicesPut.push_back( 8); indicesPut.push_back( 9); indicesPut.push_back( 10); indicesPut.push_back( 11); indicesPut.push_back( 12); indicesPut.push_back( 13); indicesPut.push_back( 14); indicesPut.push_back( 15); indicesPut.push_back( 16); } static std::vector dv; dv.resize(nVar); for (int ivar=0; ivar& iv, int sigOrBgd ) const { Transform_1( iv, sigOrBgd ); }