// Class: ReadElectron_Upstream_TMVA // Automatically generated by MethodBase::MakeClass // /* configuration options ===================================================== #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- Method : MLP::Electron_Upstream_TMVA TMVA Release : 4.2.0 [262656] ROOT Release : 6.02/01 [393729] Creator : jonesc Date : Sun Feb 14 00:26:52 2016 Host : Linux lcgapp-slc6-physical1.cern.ch 2.6.32-431.29.2.el6.x86_64 #1 SMP Wed Sep 10 11:13:12 CEST 2014 x86_64 x86_64 x86_64 GNU/Linux Dir : /var/pcfst/r03/lhcb/jonesc/ANNPID/results/MC12/TrainMixture/TrainPhysTks-EvalPhysTks-ReweightRICH2/GhostAccFrac0.01/ProtonAccFrac1.0/KaonAccFrac1.0/TMVA-Run2-NoTkLikCDVelodEdx/MLP/Norm/ScaleF1.2/BP/NCycles750/CE/tanh/CVTest15/CVImp1e-16/NotUseRegulator/Electron/Upstream Training events: 1811352 Analysis type : [Classification] #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- # Set by User: NCycles: "750" [Number of training cycles] HiddenLayers: "20" [Specification of hidden layer architecture] NeuronType: "tanh" [Neuron activation function type] EstimatorType: "CE" [MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood] V: "True" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] VarTransform: "Norm" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] H: "True" [Print method-specific help message] TrainingMethod: "BP" [Train with Back-Propagation (BP), BFGS Algorithm (BFGS), or Genetic Algorithm (GA - slower and worse)] EpochMonitoring: "True" [Provide epoch-wise monitoring plots according to TestRate (caution: causes big ROOT output file!)] ConvergenceImprove: "1.000000e-16" [Minimum improvement which counts as improvement (<0 means automatic convergence check is turned off)] ConvergenceTests: "15" [Number of steps (without improvement) required for convergence (<0 means automatic convergence check is turned off)] UseRegulator: "False" [Use regulator to avoid over-training] # Default: RandomSeed: "1" [Random seed for initial synapse weights (0 means unique seed for each run; default value '1')] NeuronInputType: "sum" [Neuron input function type] VerbosityLevel: "Verbose" [Verbosity level] CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] LearningRate: "2.000000e-02" [ANN learning rate parameter] DecayRate: "1.000000e-02" [Decay rate for learning parameter] TestRate: "10" [Test for overtraining performed at each #th epochs] Sampling: "1.000000e+00" [Only 'Sampling' (randomly selected) events are trained each epoch] SamplingEpoch: "1.000000e+00" [Sampling is used for the first 'SamplingEpoch' epochs, afterwards, all events are taken for training] SamplingImportance: "1.000000e+00" [ The sampling weights of events in epochs which successful (worse estimator than before) are multiplied with SamplingImportance, else they are divided.] SamplingTraining: "True" [The training sample is sampled] SamplingTesting: "False" [The testing sample is sampled] ResetStep: "50" [How often BFGS should reset history] Tau: "3.000000e+00" [LineSearch "size step"] BPMode: "sequential" [Back-propagation learning mode: sequential or batch] BatchSize: "-1" [Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events] UpdateLimit: "10000" [Maximum times of regulator update] CalculateErrors: "False" [Calculates inverse Hessian matrix at the end of the training to be able to calculate the uncertainties of an MVA value] WeightRange: "1.000000e+00" [Take the events for the estimator calculations from small deviations from the desired value to large deviations only over the weight range] ## #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- NVar 17 TrackP TrackP TrackP TrackP 'F' [231.839996338,4745141] TrackPt TrackPt TrackPt TrackPt 'F' [2.86109733582,952692.5] TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof 'F' [0.00438635889441,2.99983906746] TrackNumDof TrackNumDof TrackNumDof TrackNumDof 'I' [4,27] TrackGhostProbability TrackGhostProbability TrackGhostProbability TrackGhostProbability 'F' [0.0299832448363,1] TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 'F' [8.64220680796e-07,53.9077644348] TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF 'I' [1,23] RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas 'I' [0,1] RichAboveMuThres RichAboveMuThres RichAboveMuThres RichAboveMuThres 'I' [0,1] RichAboveKaThres RichAboveKaThres RichAboveKaThres RichAboveKaThres 'I' [0,1] RichDLLe RichDLLe RichDLLe RichDLLe 'F' [-999,118.707099915] RichDLLmu RichDLLmu RichDLLmu RichDLLmu 'F' [-999,79.7293014526] RichDLLk RichDLLk RichDLLk RichDLLk 'F' [-999,94.0835037231] RichDLLp RichDLLp RichDLLp RichDLLp 'F' [-999,92.269203186] RichDLLbt RichDLLbt RichDLLbt RichDLLbt 'F' [-999,50.5774993896] InAccBrem InAccBrem InAccBrem InAccBrem 'I' [0,1] BremPIDe BremPIDe BremPIDe BremPIDe 'F' [-999,4.79148387909] NSpec 0 ============================================================================ */ #include #include #include #include #ifndef IClassifierReader__def #define IClassifierReader__def class IClassifierReader { public: // constructor IClassifierReader() : fStatusIsClean( true ) {} virtual ~IClassifierReader() {} // return classifier response virtual double GetMvaValue( const std::vector& inputValues ) const = 0; // returns classifier status bool IsStatusClean() const { return fStatusIsClean; } protected: bool fStatusIsClean; }; #endif class ReadElectron_Upstream_TMVA : public IClassifierReader { public: // constructor ReadElectron_Upstream_TMVA( std::vector& theInputVars ) : IClassifierReader(), fClassName( "ReadElectron_Upstream_TMVA" ), fNvars( 17 ), fIsNormalised( false ) { // the training input variables const char* inputVars[] = { "TrackP", "TrackPt", "TrackChi2PerDof", "TrackNumDof", "TrackGhostProbability", "TrackFitVeloChi2", "TrackFitVeloNDoF", "RichUsedR1Gas", "RichAboveMuThres", "RichAboveKaThres", "RichDLLe", "RichDLLmu", "RichDLLk", "RichDLLp", "RichDLLbt", "InAccBrem", "BremPIDe" }; // sanity checks if (theInputVars.size() <= 0) { std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; fStatusIsClean = false; } if (theInputVars.size() != fNvars) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " << theInputVars.size() << " != " << fNvars << std::endl; fStatusIsClean = false; } // validate input variables for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { if (theInputVars[ivar] != inputVars[ivar]) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; fStatusIsClean = false; } } // initialize min and max vectors (for normalisation) fVmin[0] = -1; fVmax[0] = 0.99999988079071; fVmin[1] = -1; fVmax[1] = 1; fVmin[2] = -1; fVmax[2] = 1; fVmin[3] = -1; fVmax[3] = 1; fVmin[4] = -1; fVmax[4] = 1; fVmin[5] = -1; fVmax[5] = 1; fVmin[6] = -1; fVmax[6] = 1; fVmin[7] = -1; fVmax[7] = 1; fVmin[8] = -1; fVmax[8] = 1; fVmin[9] = -1; fVmax[9] = 1; fVmin[10] = -1; fVmax[10] = 1; fVmin[11] = -1; fVmax[11] = 1; fVmin[12] = -1; fVmax[12] = 1; fVmin[13] = -1; fVmax[13] = 1; fVmin[14] = -1; fVmax[14] = 1; fVmin[15] = -1; fVmax[15] = 1; fVmin[16] = -1; fVmax[16] = 1; // initialize input variable types fType[0] = 'F'; fType[1] = 'F'; fType[2] = 'F'; fType[3] = 'I'; fType[4] = 'F'; fType[5] = 'F'; fType[6] = 'I'; fType[7] = 'I'; fType[8] = 'I'; fType[9] = 'I'; fType[10] = 'F'; fType[11] = 'F'; fType[12] = 'F'; fType[13] = 'F'; fType[14] = 'F'; fType[15] = 'I'; fType[16] = 'F'; // initialize constants Initialize(); // initialize transformation InitTransform(); } // destructor virtual ~ReadElectron_Upstream_TMVA() { Clear(); // method-specific } // the classifier response // "inputValues" is a vector of input values in the same order as the // variables given to the constructor double GetMvaValue( const std::vector& inputValues ) const; private: // method-specific destructor void Clear(); // input variable transformation double fMin_1[3][17]; double fMax_1[3][17]; void InitTransform_1(); void Transform_1( std::vector & iv, int sigOrBgd ) const; void InitTransform(); void Transform( std::vector & iv, int sigOrBgd ) const; // common member variables const char* fClassName; const size_t fNvars; size_t GetNvar() const { return fNvars; } char GetType( int ivar ) const { return fType[ivar]; } // normalisation of input variables const bool fIsNormalised; bool IsNormalised() const { return fIsNormalised; } double fVmin[17]; double fVmax[17]; double NormVariable( double x, double xmin, double xmax ) const { // normalise to output range: [-1, 1] return 2*(x - xmin)/(xmax - xmin) - 1.0; } // type of input variable: 'F' or 'I' char fType[17]; // initialize internal variables void Initialize(); double GetMvaValue__( const std::vector& inputValues ) const; // private members (method specific) double ActivationFnc(double x) const; double OutputActivationFnc(double x) const; int fLayers; int fLayerSize[3]; double fWeightMatrix0to1[21][18]; // weight matrix from layer 0 to 1 double fWeightMatrix1to2[1][21]; // weight matrix from layer 1 to 2 double * fWeights[3]; }; inline void ReadElectron_Upstream_TMVA::Initialize() { // build network structure fLayers = 3; fLayerSize[0] = 18; fWeights[0] = new double[18]; fLayerSize[1] = 21; fWeights[1] = new double[21]; fLayerSize[2] = 1; fWeights[2] = new double[1]; // weight matrix from layer 0 to 1 fWeightMatrix0to1[0][0] = 0.288564800758088; fWeightMatrix0to1[1][0] = 3.39335639410402; fWeightMatrix0to1[2][0] = 0.884429477901062; fWeightMatrix0to1[3][0] = 3.17219551180617; fWeightMatrix0to1[4][0] = -6.348982839333; fWeightMatrix0to1[5][0] = -1.64033116926245; fWeightMatrix0to1[6][0] = 0.235145247327429; fWeightMatrix0to1[7][0] = 2.52182626851309; fWeightMatrix0to1[8][0] = -1.62301260159783; fWeightMatrix0to1[9][0] = -1.68716936391527; fWeightMatrix0to1[10][0] = -4.12820122764188; fWeightMatrix0to1[11][0] = -0.338609931956714; fWeightMatrix0to1[12][0] = 6.96772439424285; fWeightMatrix0to1[13][0] = -1.45373589089682; fWeightMatrix0to1[14][0] = -0.976118627613545; fWeightMatrix0to1[15][0] = 1.49171990380549; fWeightMatrix0to1[16][0] = -0.361486097064996; fWeightMatrix0to1[17][0] = 0.603862466794217; fWeightMatrix0to1[18][0] = 4.46328860571466; fWeightMatrix0to1[19][0] = 1.38570866844564; fWeightMatrix0to1[0][1] = -0.199324360214842; fWeightMatrix0to1[1][1] = -0.11015942457735; fWeightMatrix0to1[2][1] = 0.256782483395967; fWeightMatrix0to1[3][1] = 0.188778637226419; fWeightMatrix0to1[4][1] = 0.879172029375543; fWeightMatrix0to1[5][1] = -0.611350671046855; fWeightMatrix0to1[6][1] = 1.74709433600714; fWeightMatrix0to1[7][1] = -2.96063752189913; fWeightMatrix0to1[8][1] = -2.9076269791671; fWeightMatrix0to1[9][1] = -1.08944211673709; fWeightMatrix0to1[10][1] = 2.5966323714202; fWeightMatrix0to1[11][1] = 1.4111612867605; fWeightMatrix0to1[12][1] = -2.27706082234621; fWeightMatrix0to1[13][1] = -0.391759318673855; fWeightMatrix0to1[14][1] = -0.117835442653519; fWeightMatrix0to1[15][1] = -0.552990042650652; fWeightMatrix0to1[16][1] = -1.3913515365442; fWeightMatrix0to1[17][1] = 1.9852849280806; fWeightMatrix0to1[18][1] = 10.9102321649746; fWeightMatrix0to1[19][1] = 1.1140239188153; fWeightMatrix0to1[0][2] = 0.435260063636352; fWeightMatrix0to1[1][2] = 1.9230250318597; fWeightMatrix0to1[2][2] = -0.374126004029466; fWeightMatrix0to1[3][2] = -0.986301012190085; fWeightMatrix0to1[4][2] = 0.147145321351557; fWeightMatrix0to1[5][2] = 0.186569342626632; fWeightMatrix0to1[6][2] = 2.65746503300031; fWeightMatrix0to1[7][2] = -0.811077176576081; fWeightMatrix0to1[8][2] = 2.41470941813234; fWeightMatrix0to1[9][2] = 1.49579429629752; fWeightMatrix0to1[10][2] = -0.246594507429038; fWeightMatrix0to1[11][2] = -0.492147846762105; fWeightMatrix0to1[12][2] = 0.0133421226331742; fWeightMatrix0to1[13][2] = -0.00964569416559689; fWeightMatrix0to1[14][2] = -1.64985000578589; fWeightMatrix0to1[15][2] = -1.71852871092944; fWeightMatrix0to1[16][2] = -1.98360826176374; fWeightMatrix0to1[17][2] = -3.14978772956337; fWeightMatrix0to1[18][2] = 0.292577866779279; fWeightMatrix0to1[19][2] = 0.682235296074326; fWeightMatrix0to1[0][3] = 0.955288433908705; fWeightMatrix0to1[1][3] = 2.68658001425392; fWeightMatrix0to1[2][3] = 0.285302059196023; fWeightMatrix0to1[3][3] = 0.931116594219958; fWeightMatrix0to1[4][3] = -3.30466660752426; fWeightMatrix0to1[5][3] = -1.78078067537807; fWeightMatrix0to1[6][3] = -3.30981248426289; fWeightMatrix0to1[7][3] = -0.68753929649584; fWeightMatrix0to1[8][3] = 0.914404289217857; fWeightMatrix0to1[9][3] = -3.23001194321342; fWeightMatrix0to1[10][3] = 0.67981352134652; fWeightMatrix0to1[11][3] = -1.1628017778558; fWeightMatrix0to1[12][3] = -0.507993707278222; fWeightMatrix0to1[13][3] = 0.746824011864292; fWeightMatrix0to1[14][3] = 1.51933137347614; fWeightMatrix0to1[15][3] = -0.978378700543666; fWeightMatrix0to1[16][3] = -2.42948688002381; fWeightMatrix0to1[17][3] = 0.767895008727488; fWeightMatrix0to1[18][3] = -3.32293489984895; fWeightMatrix0to1[19][3] = 0.496453409849777; fWeightMatrix0to1[0][4] = -0.450893393549653; fWeightMatrix0to1[1][4] = -1.27808571743711; fWeightMatrix0to1[2][4] = 1.07020220611277; fWeightMatrix0to1[3][4] = 0.7468206302618; fWeightMatrix0to1[4][4] = 0.156171772574921; fWeightMatrix0to1[5][4] = -0.1379980272298; fWeightMatrix0to1[6][4] = 1.78513942704442; fWeightMatrix0to1[7][4] = 2.59870188012444; fWeightMatrix0to1[8][4] = 0.579012366452365; fWeightMatrix0to1[9][4] = 0.378733784418906; fWeightMatrix0to1[10][4] = 0.36457696919598; fWeightMatrix0to1[11][4] = 0.435325349671375; fWeightMatrix0to1[12][4] = 0.019284096867925; fWeightMatrix0to1[13][4] = -0.143062403526734; fWeightMatrix0to1[14][4] = -0.484409629463538; fWeightMatrix0to1[15][4] = 2.97314996889015; fWeightMatrix0to1[16][4] = 0.074864904633005; fWeightMatrix0to1[17][4] = -0.0718930791055189; fWeightMatrix0to1[18][4] = 0.164650316526512; fWeightMatrix0to1[19][4] = 0.0991785726448168; fWeightMatrix0to1[0][5] = -0.461902660137266; fWeightMatrix0to1[1][5] = -0.951779296503596; fWeightMatrix0to1[2][5] = 1.8840465836005; fWeightMatrix0to1[3][5] = -0.929583479226963; fWeightMatrix0to1[4][5] = -0.352958334375211; fWeightMatrix0to1[5][5] = 2.67006435508517; fWeightMatrix0to1[6][5] = 5.64003785355971; fWeightMatrix0to1[7][5] = -0.139176831528461; fWeightMatrix0to1[8][5] = -0.296494615545535; fWeightMatrix0to1[9][5] = -2.20909502447428; fWeightMatrix0to1[10][5] = 1.27390283252267; fWeightMatrix0to1[11][5] = -0.993726024612979; fWeightMatrix0to1[12][5] = -0.166394783295459; fWeightMatrix0to1[13][5] = -0.386363291573593; fWeightMatrix0to1[14][5] = -2.18574512531158; fWeightMatrix0to1[15][5] = 6.53726530419987; fWeightMatrix0to1[16][5] = 1.96988999760053; fWeightMatrix0to1[17][5] = 0.126040348676341; fWeightMatrix0to1[18][5] = 0.107695395002568; fWeightMatrix0to1[19][5] = -1.07558602136136; fWeightMatrix0to1[0][6] = -3.76693345602306; fWeightMatrix0to1[1][6] = 2.99460921526424; fWeightMatrix0to1[2][6] = 1.77820189640072; fWeightMatrix0to1[3][6] = 3.6300588475409; fWeightMatrix0to1[4][6] = 3.90227480857851; fWeightMatrix0to1[5][6] = -0.774725519512862; fWeightMatrix0to1[6][6] = -0.452486440061368; fWeightMatrix0to1[7][6] = 2.07485345840463; fWeightMatrix0to1[8][6] = -1.15395902098551; fWeightMatrix0to1[9][6] = -0.755987766490491; fWeightMatrix0to1[10][6] = -1.98713476362104; fWeightMatrix0to1[11][6] = -2.1082490975219; fWeightMatrix0to1[12][6] = 1.07240472951005; fWeightMatrix0to1[13][6] = -0.944608087212268; fWeightMatrix0to1[14][6] = -0.262434853903676; fWeightMatrix0to1[15][6] = 1.39635012833951; fWeightMatrix0to1[16][6] = 1.19951716802228; fWeightMatrix0to1[17][6] = 1.30881549832071; fWeightMatrix0to1[18][6] = 2.31242351184017; fWeightMatrix0to1[19][6] = 1.5328359657201; fWeightMatrix0to1[0][7] = 4.5226252761104; fWeightMatrix0to1[1][7] = 0.84354976982933; fWeightMatrix0to1[2][7] = -0.751688320810066; fWeightMatrix0to1[3][7] = -0.327875087528006; fWeightMatrix0to1[4][7] = -11.6389493354149; fWeightMatrix0to1[5][7] = 2.56038451788047; fWeightMatrix0to1[6][7] = 0.433033988139285; fWeightMatrix0to1[7][7] = 1.937155662177; fWeightMatrix0to1[8][7] = -0.300180731668779; fWeightMatrix0to1[9][7] = -0.0261061995929115; fWeightMatrix0to1[10][7] = -11.2801542507906; fWeightMatrix0to1[11][7] = 0.502336439083325; fWeightMatrix0to1[12][7] = 19.4827275683381; fWeightMatrix0to1[13][7] = -1.22801338855238; fWeightMatrix0to1[14][7] = -0.82237489849468; fWeightMatrix0to1[15][7] = -0.232479774677845; fWeightMatrix0to1[16][7] = 0.95549292328311; fWeightMatrix0to1[17][7] = -0.848789846192216; fWeightMatrix0to1[18][7] = -8.91656207507516; fWeightMatrix0to1[19][7] = -1.49422088575135; fWeightMatrix0to1[0][8] = 0.344711211035987; fWeightMatrix0to1[1][8] = 1.10009795654285; fWeightMatrix0to1[2][8] = 1.19499764591299; fWeightMatrix0to1[3][8] = 0.432192210162525; fWeightMatrix0to1[4][8] = -0.353539797258332; fWeightMatrix0to1[5][8] = 0.667020071350248; fWeightMatrix0to1[6][8] = -1.14453534215071; fWeightMatrix0to1[7][8] = 3.73966215698776; fWeightMatrix0to1[8][8] = -0.797672666582695; fWeightMatrix0to1[9][8] = -0.617992411984288; fWeightMatrix0to1[10][8] = 0.620841895069965; fWeightMatrix0to1[11][8] = 2.69027386784282; fWeightMatrix0to1[12][8] = 0.115083866266575; fWeightMatrix0to1[13][8] = 0.611834946739648; fWeightMatrix0to1[14][8] = 1.51469239049792; fWeightMatrix0to1[15][8] = -2.46905694892275; fWeightMatrix0to1[16][8] = 1.50764399513936; fWeightMatrix0to1[17][8] = 0.427314281443682; fWeightMatrix0to1[18][8] = 0.870567641305735; fWeightMatrix0to1[19][8] = -1.70436462883249; fWeightMatrix0to1[0][9] = -2.68743848705206; fWeightMatrix0to1[1][9] = 2.85186284654104; fWeightMatrix0to1[2][9] = 3.31495835404225; fWeightMatrix0to1[3][9] = -1.82348574467537; fWeightMatrix0to1[4][9] = -0.702929048889782; fWeightMatrix0to1[5][9] = -0.0581138218051933; fWeightMatrix0to1[6][9] = -0.301344245948314; fWeightMatrix0to1[7][9] = 0.204498985892549; fWeightMatrix0to1[8][9] = -0.574698076078748; fWeightMatrix0to1[9][9] = -2.99805963242542; fWeightMatrix0to1[10][9] = 0.11242572410515; fWeightMatrix0to1[11][9] = 1.07294848679127; fWeightMatrix0to1[12][9] = 0.471652055915929; fWeightMatrix0to1[13][9] = 2.28428304515166; fWeightMatrix0to1[14][9] = -0.0389735214174239; fWeightMatrix0to1[15][9] = -1.30989199835994; fWeightMatrix0to1[16][9] = -1.2863033682701; fWeightMatrix0to1[17][9] = -0.484636282108893; fWeightMatrix0to1[18][9] = 0.852582719792399; fWeightMatrix0to1[19][9] = -1.38781346697771; fWeightMatrix0to1[0][10] = -2.58468352395803; fWeightMatrix0to1[1][10] = -5.34548904380093; fWeightMatrix0to1[2][10] = -4.26988930745345; fWeightMatrix0to1[3][10] = -2.08030804436962; fWeightMatrix0to1[4][10] = 47.6911170584905; fWeightMatrix0to1[5][10] = -1.04647134943743; fWeightMatrix0to1[6][10] = -1.40244251725152; fWeightMatrix0to1[7][10] = -8.05269529671457; fWeightMatrix0to1[8][10] = 1.49533407806954; fWeightMatrix0to1[9][10] = -0.35621888596754; fWeightMatrix0to1[10][10] = 28.5087851891297; fWeightMatrix0to1[11][10] = -1.18130547571293; fWeightMatrix0to1[12][10] = -52.9627530619043; fWeightMatrix0to1[13][10] = -0.224574577735634; fWeightMatrix0to1[14][10] = 1.52429333833495; fWeightMatrix0to1[15][10] = 0.810449481543168; fWeightMatrix0to1[16][10] = 0.38392477503796; fWeightMatrix0to1[17][10] = 2.55212026843436; fWeightMatrix0to1[18][10] = 37.9152851749281; fWeightMatrix0to1[19][10] = -1.71915419062619; fWeightMatrix0to1[0][11] = 2.99492076073509; fWeightMatrix0to1[1][11] = -0.00551686030308176; fWeightMatrix0to1[2][11] = -1.99175781230754; fWeightMatrix0to1[3][11] = 0.854187840281839; fWeightMatrix0to1[4][11] = -6.15809019646892; fWeightMatrix0to1[5][11] = -0.494226326607724; fWeightMatrix0to1[6][11] = 0.0292633430279895; fWeightMatrix0to1[7][11] = 2.91002620435154; fWeightMatrix0to1[8][11] = -1.6757019943632; fWeightMatrix0to1[9][11] = -1.73117231189954; fWeightMatrix0to1[10][11] = -8.09136708384467; fWeightMatrix0to1[11][11] = -0.0993932911261171; fWeightMatrix0to1[12][11] = 18.372128087108; fWeightMatrix0to1[13][11] = -2.25654472918489; fWeightMatrix0to1[14][11] = 1.47015775479916; fWeightMatrix0to1[15][11] = -2.31921095720155; fWeightMatrix0to1[16][11] = 0.805542060313842; fWeightMatrix0to1[17][11] = -2.06804968930406; fWeightMatrix0to1[18][11] = -16.3195298136159; fWeightMatrix0to1[19][11] = -1.31037085470847; fWeightMatrix0to1[0][12] = -3.22067729326587; fWeightMatrix0to1[1][12] = 1.01055803393227; fWeightMatrix0to1[2][12] = -0.274927072542879; fWeightMatrix0to1[3][12] = -1.68743885856345; fWeightMatrix0to1[4][12] = -10.9364639915207; fWeightMatrix0to1[5][12] = 0.937896017333754; fWeightMatrix0to1[6][12] = -1.74089790981807; fWeightMatrix0to1[7][12] = -2.75302500909655; fWeightMatrix0to1[8][12] = 1.39379950490846; fWeightMatrix0to1[9][12] = -0.81911361747275; fWeightMatrix0to1[10][12] = -3.60324235532515; fWeightMatrix0to1[11][12] = -1.24834289976086; fWeightMatrix0to1[12][12] = 5.99292542517585; fWeightMatrix0to1[13][12] = 2.29704508192058; fWeightMatrix0to1[14][12] = 1.14173214750374; fWeightMatrix0to1[15][12] = -1.40313862891048; fWeightMatrix0to1[16][12] = -1.37104868594462; fWeightMatrix0to1[17][12] = 0.0982753079810924; fWeightMatrix0to1[18][12] = 3.73322747111198; fWeightMatrix0to1[19][12] = -1.63325914544673; fWeightMatrix0to1[0][13] = -0.941665712763702; fWeightMatrix0to1[1][13] = 1.40298287551755; fWeightMatrix0to1[2][13] = 3.62365238512523; fWeightMatrix0to1[3][13] = -0.44525703646199; fWeightMatrix0to1[4][13] = -6.32171720037199; fWeightMatrix0to1[5][13] = 0.303999190090131; fWeightMatrix0to1[6][13] = -2.65134028727742; fWeightMatrix0to1[7][13] = 0.288770369017429; fWeightMatrix0to1[8][13] = 1.2434195501499; fWeightMatrix0to1[9][13] = -0.574628785348935; fWeightMatrix0to1[10][13] = -5.86703058561815; fWeightMatrix0to1[11][13] = 0.69247966219428; fWeightMatrix0to1[12][13] = 8.0763353536546; fWeightMatrix0to1[13][13] = 0.872085656845216; fWeightMatrix0to1[14][13] = -0.674810521211329; fWeightMatrix0to1[15][13] = -0.808776429004283; fWeightMatrix0to1[16][13] = 1.53388580194761; fWeightMatrix0to1[17][13] = 2.04295724736223; fWeightMatrix0to1[18][13] = 7.77267841743029; fWeightMatrix0to1[19][13] = -0.288234286390518; fWeightMatrix0to1[0][14] = -1.1389260267557; fWeightMatrix0to1[1][14] = 0.731653149573192; fWeightMatrix0to1[2][14] = -0.801339628081168; fWeightMatrix0to1[3][14] = -0.670816608500947; fWeightMatrix0to1[4][14] = -9.13800904102032; fWeightMatrix0to1[5][14] = -0.480285910455961; fWeightMatrix0to1[6][14] = -2.26367604424434; fWeightMatrix0to1[7][14] = -0.174306304568164; fWeightMatrix0to1[8][14] = -1.7961416809566; fWeightMatrix0to1[9][14] = 1.50394749285109; fWeightMatrix0to1[10][14] = -0.608156951351463; fWeightMatrix0to1[11][14] = 0.956890036791934; fWeightMatrix0to1[12][14] = 0.210446995533484; fWeightMatrix0to1[13][14] = 0.900153545285312; fWeightMatrix0to1[14][14] = 0.630468431967871; fWeightMatrix0to1[15][14] = -2.37246063938355; fWeightMatrix0to1[16][14] = 0.322079515707413; fWeightMatrix0to1[17][14] = 2.03863201237912; fWeightMatrix0to1[18][14] = 9.09057588577597; fWeightMatrix0to1[19][14] = -1.48040447288478; fWeightMatrix0to1[0][15] = -2.25624385968256; fWeightMatrix0to1[1][15] = -1.6589998567036; fWeightMatrix0to1[2][15] = 3.1196567074014; fWeightMatrix0to1[3][15] = -0.795717076260626; fWeightMatrix0to1[4][15] = -3.80824374082471; fWeightMatrix0to1[5][15] = 0.198246521070756; fWeightMatrix0to1[6][15] = -0.932459453473823; fWeightMatrix0to1[7][15] = -0.105745720789694; fWeightMatrix0to1[8][15] = -0.302831061871399; fWeightMatrix0to1[9][15] = -0.641691146605043; fWeightMatrix0to1[10][15] = -1.25580585910007; fWeightMatrix0to1[11][15] = 0.0937644305301363; fWeightMatrix0to1[12][15] = 3.782001922068; fWeightMatrix0to1[13][15] = 1.33559352566026; fWeightMatrix0to1[14][15] = 0.318167806517446; fWeightMatrix0to1[15][15] = -0.737710908735198; fWeightMatrix0to1[16][15] = 2.22496889032225; fWeightMatrix0to1[17][15] = -3.93433226883431; fWeightMatrix0to1[18][15] = -2.78978508319233; fWeightMatrix0to1[19][15] = 0.964135498492762; fWeightMatrix0to1[0][16] = -2.54756757592623; fWeightMatrix0to1[1][16] = -1.8808801409372; fWeightMatrix0to1[2][16] = -0.108163652979769; fWeightMatrix0to1[3][16] = -1.44056039748044; fWeightMatrix0to1[4][16] = 4.21825795625469; fWeightMatrix0to1[5][16] = 2.64876772392894; fWeightMatrix0to1[6][16] = 1.20116157701435; fWeightMatrix0to1[7][16] = 1.39681500835743; fWeightMatrix0to1[8][16] = -0.90133056042022; fWeightMatrix0to1[9][16] = 1.12338425341477; fWeightMatrix0to1[10][16] = 0.555706809281387; fWeightMatrix0to1[11][16] = 1.73582955114227; fWeightMatrix0to1[12][16] = -6.51977034413445; fWeightMatrix0to1[13][16] = -1.93283394415298; fWeightMatrix0to1[14][16] = -0.000931441537656177; fWeightMatrix0to1[15][16] = 0.516115336100662; fWeightMatrix0to1[16][16] = 0.475687000905357; fWeightMatrix0to1[17][16] = -0.226941937729489; fWeightMatrix0to1[18][16] = 3.37362549690354; fWeightMatrix0to1[19][16] = -0.178556159804416; fWeightMatrix0to1[0][17] = -0.686585902604542; fWeightMatrix0to1[1][17] = -0.0752615298116904; fWeightMatrix0to1[2][17] = 2.08989035457255; fWeightMatrix0to1[3][17] = -1.48272932287127; fWeightMatrix0to1[4][17] = -1.65176538305262; fWeightMatrix0to1[5][17] = -0.57054674649253; fWeightMatrix0to1[6][17] = -0.322056238590214; fWeightMatrix0to1[7][17] = 1.53056694370593; fWeightMatrix0to1[8][17] = 0.90130575588818; fWeightMatrix0to1[9][17] = -0.329672684003192; fWeightMatrix0to1[10][17] = 0.871478112427722; fWeightMatrix0to1[11][17] = 1.99761533365266; fWeightMatrix0to1[12][17] = 2.12242346708182; fWeightMatrix0to1[13][17] = -0.128813590228224; fWeightMatrix0to1[14][17] = -0.599397990923242; fWeightMatrix0to1[15][17] = -0.955589233155283; fWeightMatrix0to1[16][17] = -1.61757309932889; fWeightMatrix0to1[17][17] = 1.14359409336326; fWeightMatrix0to1[18][17] = -10.975278784769; fWeightMatrix0to1[19][17] = -1.14953248375325; // weight matrix from layer 1 to 2 fWeightMatrix1to2[0][0] = -1.00204173333709; fWeightMatrix1to2[0][1] = -1.04929186178813; fWeightMatrix1to2[0][2] = -1.00425793796383; fWeightMatrix1to2[0][3] = -0.621989535768579; fWeightMatrix1to2[0][4] = 2.0527393183291; fWeightMatrix1to2[0][5] = -0.809049320124926; fWeightMatrix1to2[0][6] = -0.592738334433834; fWeightMatrix1to2[0][7] = -0.817958359292373; fWeightMatrix1to2[0][8] = -2.18893921252743; fWeightMatrix1to2[0][9] = -0.528280895656269; fWeightMatrix1to2[0][10] = 2.08145521563139; fWeightMatrix1to2[0][11] = -0.464720588464946; fWeightMatrix1to2[0][12] = -2.22190492844479; fWeightMatrix1to2[0][13] = 1.08227674849374; fWeightMatrix1to2[0][14] = 0.72642930262781; fWeightMatrix1to2[0][15] = -0.391122995161214; fWeightMatrix1to2[0][16] = -0.400873088449103; fWeightMatrix1to2[0][17] = 0.636887363666122; fWeightMatrix1to2[0][18] = 1.30922624408683; fWeightMatrix1to2[0][19] = 0.159269167621053; fWeightMatrix1to2[0][20] = -2.3700767030826; } inline double ReadElectron_Upstream_TMVA::GetMvaValue__( const std::vector& inputValues ) const { if (inputValues.size() != (unsigned int)fLayerSize[0]-1) { std::cout << "Input vector needs to be of size " << fLayerSize[0]-1 << std::endl; return 0; } for (int l=0; l& inputValues ) const { // classifier response value double retval = 0; // classifier response, sanity check first if (!IsStatusClean()) { std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response" << " because status is dirty" << std::endl; retval = 0; } else { if (IsNormalised()) { // normalise variables std::vector iV; iV.reserve(inputValues.size()); int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] )); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } else { std::vector iV; int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(*varIt); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } } return retval; } //_______________________________________________________________________ inline void ReadElectron_Upstream_TMVA::InitTransform_1() { // Normalization transformation, initialisation fMin_1[0][0] = 231.839996338; fMax_1[0][0] = 4745141; fMin_1[1][0] = 341.959991455; fMax_1[1][0] = 2425905.75; fMin_1[2][0] = 231.839996338; fMax_1[2][0] = 4745141; fMin_1[0][1] = 2.86109733582; fMax_1[0][1] = 952692.5; fMin_1[1][1] = 10.1653404236; fMax_1[1][1] = 685419.75; fMin_1[2][1] = 2.86109733582; fMax_1[2][1] = 952692.5; fMin_1[0][2] = 0.00438635889441; fMax_1[0][2] = 2.99983906746; fMin_1[1][2] = 0.016449213028; fMax_1[1][2] = 2.99244570732; fMin_1[2][2] = 0.00438635889441; fMax_1[2][2] = 2.99983906746; fMin_1[0][3] = 4; fMax_1[0][3] = 27; fMin_1[1][3] = 4; fMax_1[1][3] = 25; fMin_1[2][3] = 4; fMax_1[2][3] = 27; fMin_1[0][4] = 0.0299832448363; fMax_1[0][4] = 1; fMin_1[1][4] = 0.0568974018097; fMax_1[1][4] = 1; fMin_1[2][4] = 0.0299832448363; fMax_1[2][4] = 1; fMin_1[0][5] = 8.64220680796e-07; fMax_1[0][5] = 53.9077644348; fMin_1[1][5] = 0.000649704248644; fMax_1[1][5] = 37.2341995239; fMin_1[2][5] = 8.64220680796e-07; fMax_1[2][5] = 53.9077644348; fMin_1[0][6] = 1; fMax_1[0][6] = 23; fMin_1[1][6] = 1; fMax_1[1][6] = 20; fMin_1[2][6] = 1; fMax_1[2][6] = 23; fMin_1[0][7] = 0; fMax_1[0][7] = 1; fMin_1[1][7] = 0; fMax_1[1][7] = 1; fMin_1[2][7] = 0; fMax_1[2][7] = 1; fMin_1[0][8] = 0; fMax_1[0][8] = 1; fMin_1[1][8] = 0; fMax_1[1][8] = 1; fMin_1[2][8] = 0; fMax_1[2][8] = 1; fMin_1[0][9] = 0; fMax_1[0][9] = 1; fMin_1[1][9] = 0; fMax_1[1][9] = 1; fMin_1[2][9] = 0; fMax_1[2][9] = 1; fMin_1[0][10] = -999; fMax_1[0][10] = 88.657699585; fMin_1[1][10] = -999; fMax_1[1][10] = 118.707099915; fMin_1[2][10] = -999; fMax_1[2][10] = 118.707099915; fMin_1[0][11] = -999; fMax_1[0][11] = 79.7293014526; fMin_1[1][11] = -999; fMax_1[1][11] = 60.4417991638; fMin_1[2][11] = -999; fMax_1[2][11] = 79.7293014526; fMin_1[0][12] = -999; fMax_1[0][12] = 94.0835037231; fMin_1[1][12] = -999; fMax_1[1][12] = 40.0093002319; fMin_1[2][12] = -999; fMax_1[2][12] = 94.0835037231; fMin_1[0][13] = -999; fMax_1[0][13] = 92.269203186; fMin_1[1][13] = -999; fMax_1[1][13] = 29.8206005096; fMin_1[2][13] = -999; fMax_1[2][13] = 92.269203186; fMin_1[0][14] = -999; fMax_1[0][14] = 50.5774993896; fMin_1[1][14] = -999; fMax_1[1][14] = 34.4273986816; fMin_1[2][14] = -999; fMax_1[2][14] = 50.5774993896; fMin_1[0][15] = 0; fMax_1[0][15] = 1; fMin_1[1][15] = 0; fMax_1[1][15] = 1; fMin_1[2][15] = 0; fMax_1[2][15] = 1; fMin_1[0][16] = -999; fMax_1[0][16] = 4.79148387909; fMin_1[1][16] = -999; fMax_1[1][16] = 4.79148387909; fMin_1[2][16] = -999; fMax_1[2][16] = 4.79148387909; } //_______________________________________________________________________ inline void ReadElectron_Upstream_TMVA::Transform_1( std::vector& iv, int cls) const { // Normalization transformation if (cls < 0 || cls > 2) { if (2 > 1 ) cls = 2; else cls = 2; } const int nVar = 17; // get indices of used variables // define the indices of the variables which are transformed by this transformation static std::vector indicesGet; static std::vector indicesPut; if ( indicesGet.empty() ) { indicesGet.reserve(fNvars); indicesGet.push_back( 0); indicesGet.push_back( 1); indicesGet.push_back( 2); indicesGet.push_back( 3); indicesGet.push_back( 4); indicesGet.push_back( 5); indicesGet.push_back( 6); indicesGet.push_back( 7); indicesGet.push_back( 8); indicesGet.push_back( 9); indicesGet.push_back( 10); indicesGet.push_back( 11); indicesGet.push_back( 12); indicesGet.push_back( 13); indicesGet.push_back( 14); indicesGet.push_back( 15); indicesGet.push_back( 16); } if ( indicesPut.empty() ) { indicesPut.reserve(fNvars); indicesPut.push_back( 0); indicesPut.push_back( 1); indicesPut.push_back( 2); indicesPut.push_back( 3); indicesPut.push_back( 4); indicesPut.push_back( 5); indicesPut.push_back( 6); indicesPut.push_back( 7); indicesPut.push_back( 8); indicesPut.push_back( 9); indicesPut.push_back( 10); indicesPut.push_back( 11); indicesPut.push_back( 12); indicesPut.push_back( 13); indicesPut.push_back( 14); indicesPut.push_back( 15); indicesPut.push_back( 16); } static std::vector dv; dv.resize(nVar); for (int ivar=0; ivar& iv, int sigOrBgd ) const { Transform_1( iv, sigOrBgd ); }