// Class: ReadElectron_Upstream_TMVA // Automatically generated by MethodBase::MakeClass // /* configuration options ===================================================== #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- Method : MLP::Electron_Upstream_TMVA TMVA Release : 4.2.0 [262656] ROOT Release : 6.02/01 [393729] Creator : jonesc Date : Sun Feb 7 03:45:01 2016 Host : Linux lcgapp-slc6-physical1.cern.ch 2.6.32-431.29.2.el6.x86_64 #1 SMP Wed Sep 10 11:13:12 CEST 2014 x86_64 x86_64 x86_64 GNU/Linux Dir : /var/pcfst/r03/lhcb/jonesc/ANNPID/results/MC2015Sim09Dev03/TrainMixture/TrainPhysTks-EvalPhysTks-NoReweight/GhostAccFrac0.01/ProtonAccFrac1.0/KaonAccFrac1.0/TMVA-Run2-NoTkLikCDVelodEdx/MLP/Norm/ScaleF1.2/BP/NCycles750/CE/tanh/CVTest15/CVImp1e-16/NotUseRegulator/Electron/Upstream Training events: 1805260 Analysis type : [Classification] #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- # Set by User: NCycles: "750" [Number of training cycles] HiddenLayers: "20" [Specification of hidden layer architecture] NeuronType: "tanh" [Neuron activation function type] EstimatorType: "CE" [MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood] V: "True" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] VarTransform: "Norm" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] H: "True" [Print method-specific help message] TrainingMethod: "BP" [Train with Back-Propagation (BP), BFGS Algorithm (BFGS), or Genetic Algorithm (GA - slower and worse)] EpochMonitoring: "True" [Provide epoch-wise monitoring plots according to TestRate (caution: causes big ROOT output file!)] ConvergenceImprove: "1.000000e-16" [Minimum improvement which counts as improvement (<0 means automatic convergence check is turned off)] ConvergenceTests: "15" [Number of steps (without improvement) required for convergence (<0 means automatic convergence check is turned off)] UseRegulator: "False" [Use regulator to avoid over-training] # Default: RandomSeed: "1" [Random seed for initial synapse weights (0 means unique seed for each run; default value '1')] NeuronInputType: "sum" [Neuron input function type] VerbosityLevel: "Verbose" [Verbosity level] CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] LearningRate: "2.000000e-02" [ANN learning rate parameter] DecayRate: "1.000000e-02" [Decay rate for learning parameter] TestRate: "10" [Test for overtraining performed at each #th epochs] Sampling: "1.000000e+00" [Only 'Sampling' (randomly selected) events are trained each epoch] SamplingEpoch: "1.000000e+00" [Sampling is used for the first 'SamplingEpoch' epochs, afterwards, all events are taken for training] SamplingImportance: "1.000000e+00" [ The sampling weights of events in epochs which successful (worse estimator than before) are multiplied with SamplingImportance, else they are divided.] SamplingTraining: "True" [The training sample is sampled] SamplingTesting: "False" [The testing sample is sampled] ResetStep: "50" [How often BFGS should reset history] Tau: "3.000000e+00" [LineSearch "size step"] BPMode: "sequential" [Back-propagation learning mode: sequential or batch] BatchSize: "-1" [Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events] UpdateLimit: "10000" [Maximum times of regulator update] CalculateErrors: "False" [Calculates inverse Hessian matrix at the end of the training to be able to calculate the uncertainties of an MVA value] WeightRange: "1.000000e+00" [Take the events for the estimator calculations from small deviations from the desired value to large deviations only over the weight range] ## #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- NVar 17 TrackP TrackP TrackP TrackP 'F' [100.489997864,4998705.5] TrackPt TrackPt TrackPt TrackPt 'F' [16.465473175,983434.1875] TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof 'F' [0.000975214526989,3.99805927277] TrackNumDof TrackNumDof TrackNumDof TrackNumDof 'I' [4,27] TrackGhostProbability TrackGhostProbability TrackGhostProbability TrackGhostProbability 'F' [0.00192418228835,0.399999499321] TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 'F' [7.34402965463e-06,67.3927001953] TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF 'I' [1,23] RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas 'I' [0,1] RichAboveMuThres RichAboveMuThres RichAboveMuThres RichAboveMuThres 'I' [0,1] RichAboveKaThres RichAboveKaThres RichAboveKaThres RichAboveKaThres 'I' [0,1] RichDLLe RichDLLe RichDLLe RichDLLe 'F' [-999,152.429595947] RichDLLmu RichDLLmu RichDLLmu RichDLLmu 'F' [-999,115.130996704] RichDLLk RichDLLk RichDLLk RichDLLk 'F' [-999,135.562103271] RichDLLp RichDLLp RichDLLp RichDLLp 'F' [-999,123.221702576] RichDLLbt RichDLLbt RichDLLbt RichDLLbt 'F' [-999,50.9290008545] InAccBrem InAccBrem InAccBrem InAccBrem 'I' [0,1] BremPIDe BremPIDe BremPIDe BremPIDe 'F' [-999,4.79148387909] NSpec 0 ============================================================================ */ #include #include #include #include #ifndef IClassifierReader__def #define IClassifierReader__def class IClassifierReader { public: // constructor IClassifierReader() : fStatusIsClean( true ) {} virtual ~IClassifierReader() {} // return classifier response virtual double GetMvaValue( const std::vector& inputValues ) const = 0; // returns classifier status bool IsStatusClean() const { return fStatusIsClean; } protected: bool fStatusIsClean; }; #endif class ReadElectron_Upstream_TMVA : public IClassifierReader { public: // constructor ReadElectron_Upstream_TMVA( std::vector& theInputVars ) : IClassifierReader(), fClassName( "ReadElectron_Upstream_TMVA" ), fNvars( 17 ), fIsNormalised( false ) { // the training input variables const char* inputVars[] = { "TrackP", "TrackPt", "TrackChi2PerDof", "TrackNumDof", "TrackGhostProbability", "TrackFitVeloChi2", "TrackFitVeloNDoF", "RichUsedR1Gas", "RichAboveMuThres", "RichAboveKaThres", "RichDLLe", "RichDLLmu", "RichDLLk", "RichDLLp", "RichDLLbt", "InAccBrem", "BremPIDe" }; // sanity checks if (theInputVars.size() <= 0) { std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; fStatusIsClean = false; } if (theInputVars.size() != fNvars) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " << theInputVars.size() << " != " << fNvars << std::endl; fStatusIsClean = false; } // validate input variables for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { if (theInputVars[ivar] != inputVars[ivar]) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; fStatusIsClean = false; } } // initialize min and max vectors (for normalisation) fVmin[0] = -1; fVmax[0] = 1; fVmin[1] = -1; fVmax[1] = 0.99999988079071; fVmin[2] = -1; fVmax[2] = 0.99999988079071; fVmin[3] = -1; fVmax[3] = 1; fVmin[4] = -1; fVmax[4] = 0.99999988079071; fVmin[5] = -1; fVmax[5] = 1; fVmin[6] = -1; fVmax[6] = 1; fVmin[7] = -1; fVmax[7] = 1; fVmin[8] = -1; fVmax[8] = 1; fVmin[9] = -1; fVmax[9] = 1; fVmin[10] = -1; fVmax[10] = 1; fVmin[11] = -1; fVmax[11] = 1; fVmin[12] = -1; fVmax[12] = 1; fVmin[13] = -1; fVmax[13] = 1; fVmin[14] = -1; fVmax[14] = 1; fVmin[15] = -1; fVmax[15] = 1; fVmin[16] = -1; fVmax[16] = 1; // initialize input variable types fType[0] = 'F'; fType[1] = 'F'; fType[2] = 'F'; fType[3] = 'I'; fType[4] = 'F'; fType[5] = 'F'; fType[6] = 'I'; fType[7] = 'I'; fType[8] = 'I'; fType[9] = 'I'; fType[10] = 'F'; fType[11] = 'F'; fType[12] = 'F'; fType[13] = 'F'; fType[14] = 'F'; fType[15] = 'I'; fType[16] = 'F'; // initialize constants Initialize(); // initialize transformation InitTransform(); } // destructor virtual ~ReadElectron_Upstream_TMVA() { Clear(); // method-specific } // the classifier response // "inputValues" is a vector of input values in the same order as the // variables given to the constructor double GetMvaValue( const std::vector& inputValues ) const; private: // method-specific destructor void Clear(); // input variable transformation double fMin_1[3][17]; double fMax_1[3][17]; void InitTransform_1(); void Transform_1( std::vector & iv, int sigOrBgd ) const; void InitTransform(); void Transform( std::vector & iv, int sigOrBgd ) const; // common member variables const char* fClassName; const size_t fNvars; size_t GetNvar() const { return fNvars; } char GetType( int ivar ) const { return fType[ivar]; } // normalisation of input variables const bool fIsNormalised; bool IsNormalised() const { return fIsNormalised; } double fVmin[17]; double fVmax[17]; double NormVariable( double x, double xmin, double xmax ) const { // normalise to output range: [-1, 1] return 2*(x - xmin)/(xmax - xmin) - 1.0; } // type of input variable: 'F' or 'I' char fType[17]; // initialize internal variables void Initialize(); double GetMvaValue__( const std::vector& inputValues ) const; // private members (method specific) double ActivationFnc(double x) const; double OutputActivationFnc(double x) const; int fLayers; int fLayerSize[3]; double fWeightMatrix0to1[21][18]; // weight matrix from layer 0 to 1 double fWeightMatrix1to2[1][21]; // weight matrix from layer 1 to 2 double * fWeights[3]; }; inline void ReadElectron_Upstream_TMVA::Initialize() { // build network structure fLayers = 3; fLayerSize[0] = 18; fWeights[0] = new double[18]; fLayerSize[1] = 21; fWeights[1] = new double[21]; fLayerSize[2] = 1; fWeights[2] = new double[1]; // weight matrix from layer 0 to 1 fWeightMatrix0to1[0][0] = -0.795687278274368; fWeightMatrix0to1[1][0] = 2.31651728176056; fWeightMatrix0to1[2][0] = 1.20160407511716; fWeightMatrix0to1[3][0] = 2.31177479887014; fWeightMatrix0to1[4][0] = -3.56114579597795; fWeightMatrix0to1[5][0] = -1.20954476720813; fWeightMatrix0to1[6][0] = -0.887379478452008; fWeightMatrix0to1[7][0] = 2.49354327147255; fWeightMatrix0to1[8][0] = -1.50861396325263; fWeightMatrix0to1[9][0] = -2.10165000048139; fWeightMatrix0to1[10][0] = -1.46919116147763; fWeightMatrix0to1[11][0] = -2.43497973524837; fWeightMatrix0to1[12][0] = -0.0496298675349404; fWeightMatrix0to1[13][0] = -0.689584236647396; fWeightMatrix0to1[14][0] = 0.117249463957533; fWeightMatrix0to1[15][0] = -0.50783905740626; fWeightMatrix0to1[16][0] = -0.104218886266813; fWeightMatrix0to1[17][0] = 3.26956873356222; fWeightMatrix0to1[18][0] = 6.77378262804761; fWeightMatrix0to1[19][0] = 1.38548057020619; fWeightMatrix0to1[0][1] = -1.21210601265339; fWeightMatrix0to1[1][1] = -0.555297909223904; fWeightMatrix0to1[2][1] = 0.913328135851799; fWeightMatrix0to1[3][1] = 0.143642565578205; fWeightMatrix0to1[4][1] = -0.481870690092385; fWeightMatrix0to1[5][1] = -0.455866347202234; fWeightMatrix0to1[6][1] = 1.2904936798007; fWeightMatrix0to1[7][1] = -1.51216435065864; fWeightMatrix0to1[8][1] = -2.18921579231102; fWeightMatrix0to1[9][1] = -1.04106188706337; fWeightMatrix0to1[10][1] = 2.1361937680115; fWeightMatrix0to1[11][1] = -2.32389358359917; fWeightMatrix0to1[12][1] = -2.18404620940701; fWeightMatrix0to1[13][1] = -0.386396057566561; fWeightMatrix0to1[14][1] = 1.03533709729219; fWeightMatrix0to1[15][1] = -0.7636323754706; fWeightMatrix0to1[16][1] = -1.34824987550222; fWeightMatrix0to1[17][1] = 4.5580256070977; fWeightMatrix0to1[18][1] = 6.00920412237946; fWeightMatrix0to1[19][1] = 1.11395614621715; fWeightMatrix0to1[0][2] = 2.28388681956181; fWeightMatrix0to1[1][2] = 1.21816990261485; fWeightMatrix0to1[2][2] = 1.84929567557911; fWeightMatrix0to1[3][2] = 0.422338698196085; fWeightMatrix0to1[4][2] = -0.141087206832651; fWeightMatrix0to1[5][2] = -2.34499912393019; fWeightMatrix0to1[6][2] = 0.218470265461159; fWeightMatrix0to1[7][2] = 0.348393971061731; fWeightMatrix0to1[8][2] = 0.459234348382142; fWeightMatrix0to1[9][2] = 0.289935349815501; fWeightMatrix0to1[10][2] = 0.668256637857325; fWeightMatrix0to1[11][2] = 0.631434794374299; fWeightMatrix0to1[12][2] = -0.953436322646349; fWeightMatrix0to1[13][2] = 0.0850278558177564; fWeightMatrix0to1[14][2] = -3.58767966753107; fWeightMatrix0to1[15][2] = -0.383360568087821; fWeightMatrix0to1[16][2] = -1.76991817770661; fWeightMatrix0to1[17][2] = -1.16117918321155; fWeightMatrix0to1[18][2] = 0.811975900952796; fWeightMatrix0to1[19][2] = 0.681875051396126; fWeightMatrix0to1[0][3] = -0.533848060100463; fWeightMatrix0to1[1][3] = -3.09992507114803; fWeightMatrix0to1[2][3] = -0.171979702623906; fWeightMatrix0to1[3][3] = 0.859539711011502; fWeightMatrix0to1[4][3] = -0.379392825438913; fWeightMatrix0to1[5][3] = 0.261006366378346; fWeightMatrix0to1[6][3] = 0.310520589892557; fWeightMatrix0to1[7][3] = -2.87688801236023; fWeightMatrix0to1[8][3] = 0.419937655936982; fWeightMatrix0to1[9][3] = -1.23397916635825; fWeightMatrix0to1[10][3] = 0.218046804447606; fWeightMatrix0to1[11][3] = 1.16387762061847; fWeightMatrix0to1[12][3] = 1.71827644352758; fWeightMatrix0to1[13][3] = 1.24306896826112; fWeightMatrix0to1[14][3] = -0.406045732194464; fWeightMatrix0to1[15][3] = -2.52262018645605; fWeightMatrix0to1[16][3] = -3.4929418184894; fWeightMatrix0to1[17][3] = -1.93110740897217; fWeightMatrix0to1[18][3] = -0.780181248945809; fWeightMatrix0to1[19][3] = 0.496084893289479; fWeightMatrix0to1[0][4] = 1.80645022961798; fWeightMatrix0to1[1][4] = 0.312648914004901; fWeightMatrix0to1[2][4] = 0.482299168854729; fWeightMatrix0to1[3][4] = -1.72072052929186; fWeightMatrix0to1[4][4] = 0.18399539331623; fWeightMatrix0to1[5][4] = 0.145862645551073; fWeightMatrix0to1[6][4] = -0.0446605070087712; fWeightMatrix0to1[7][4] = 0.696014305303808; fWeightMatrix0to1[8][4] = -1.4913984582397; fWeightMatrix0to1[9][4] = -1.41381786215522; fWeightMatrix0to1[10][4] = 0.038030546439515; fWeightMatrix0to1[11][4] = -0.178989375273432; fWeightMatrix0to1[12][4] = -0.158676775005076; fWeightMatrix0to1[13][4] = 0.217721030065992; fWeightMatrix0to1[14][4] = 0.0430994602677295; fWeightMatrix0to1[15][4] = 0.460972941718539; fWeightMatrix0to1[16][4] = -0.379727587855347; fWeightMatrix0to1[17][4] = -0.285865487054564; fWeightMatrix0to1[18][4] = 0.264948344852931; fWeightMatrix0to1[19][4] = 0.0986762186412036; fWeightMatrix0to1[0][5] = -1.00983933779252; fWeightMatrix0to1[1][5] = -3.09833933477689; fWeightMatrix0to1[2][5] = 1.23570903005709; fWeightMatrix0to1[3][5] = 2.15258729223923; fWeightMatrix0to1[4][5] = -0.199710750879049; fWeightMatrix0to1[5][5] = -0.454448132639177; fWeightMatrix0to1[6][5] = -0.895484558742042; fWeightMatrix0to1[7][5] = -0.416163236195115; fWeightMatrix0to1[8][5] = -0.103978384939528; fWeightMatrix0to1[9][5] = -0.855434647036896; fWeightMatrix0to1[10][5] = -1.30319613595193; fWeightMatrix0to1[11][5] = -1.34669615082684; fWeightMatrix0to1[12][5] = 1.7521567737955; fWeightMatrix0to1[13][5] = -0.0680200709254446; fWeightMatrix0to1[14][5] = -1.83036305295819; fWeightMatrix0to1[15][5] = 1.80246402344235; fWeightMatrix0to1[16][5] = -0.140364053175955; fWeightMatrix0to1[17][5] = 3.90749840483421; fWeightMatrix0to1[18][5] = -1.13980230464587; fWeightMatrix0to1[19][5] = -1.07575933755902; fWeightMatrix0to1[0][6] = -0.634769095863607; fWeightMatrix0to1[1][6] = 4.87904735977472; fWeightMatrix0to1[2][6] = -0.752536082269542; fWeightMatrix0to1[3][6] = 2.60795762215561; fWeightMatrix0to1[4][6] = 1.01981750761999; fWeightMatrix0to1[5][6] = 1.10136784865006; fWeightMatrix0to1[6][6] = 0.255206777719739; fWeightMatrix0to1[7][6] = 2.78235205685891; fWeightMatrix0to1[8][6] = -2.79737594002448; fWeightMatrix0to1[9][6] = 0.785909632454984; fWeightMatrix0to1[10][6] = -1.55724191465334; fWeightMatrix0to1[11][6] = -0.227630855356872; fWeightMatrix0to1[12][6] = -3.43012405553375; fWeightMatrix0to1[13][6] = -1.04135006858611; fWeightMatrix0to1[14][6] = 2.15234243631986; fWeightMatrix0to1[15][6] = 1.23887083454184; fWeightMatrix0to1[16][6] = -0.549287299427312; fWeightMatrix0to1[17][6] = 0.0328610730373184; fWeightMatrix0to1[18][6] = 0.98437744116716; fWeightMatrix0to1[19][6] = 1.53227556363124; fWeightMatrix0to1[0][7] = 2.72682899369126; fWeightMatrix0to1[1][7] = -0.151616950130543; fWeightMatrix0to1[2][7] = -1.29334709018958; fWeightMatrix0to1[3][7] = -0.128263593561797; fWeightMatrix0to1[4][7] = -4.53541882078126; fWeightMatrix0to1[5][7] = 1.46196915845727; fWeightMatrix0to1[6][7] = 1.97996390320842; fWeightMatrix0to1[7][7] = 1.84063164425374; fWeightMatrix0to1[8][7] = 0.138965534170958; fWeightMatrix0to1[9][7] = 0.725073475557366; fWeightMatrix0to1[10][7] = -3.31860672037927; fWeightMatrix0to1[11][7] = 3.25292277982578; fWeightMatrix0to1[12][7] = 10.9903515858753; fWeightMatrix0to1[13][7] = -0.563520967045294; fWeightMatrix0to1[14][7] = -1.39464419987048; fWeightMatrix0to1[15][7] = -0.221409023057493; fWeightMatrix0to1[16][7] = 1.39624072427344; fWeightMatrix0to1[17][7] = -1.05516806342128; fWeightMatrix0to1[18][7] = -5.41367960677531; fWeightMatrix0to1[19][7] = -1.49525893518684; fWeightMatrix0to1[0][8] = 1.37497232413385; fWeightMatrix0to1[1][8] = 0.193719164475427; fWeightMatrix0to1[2][8] = 0.663442048784769; fWeightMatrix0to1[3][8] = 0.667342306638493; fWeightMatrix0to1[4][8] = -0.20130811645021; fWeightMatrix0to1[5][8] = -0.0556619410345694; fWeightMatrix0to1[6][8] = 0.0381948743157884; fWeightMatrix0to1[7][8] = 1.21513700134127; fWeightMatrix0to1[8][8] = -0.420550734115632; fWeightMatrix0to1[9][8] = -0.00334541477839752; fWeightMatrix0to1[10][8] = -1.33737053668722; fWeightMatrix0to1[11][8] = -0.435968820426441; fWeightMatrix0to1[12][8] = -0.0142397479543938; fWeightMatrix0to1[13][8] = 0.0256235409426683; fWeightMatrix0to1[14][8] = 4.39754045097973; fWeightMatrix0to1[15][8] = 0.534461014599069; fWeightMatrix0to1[16][8] = 1.04741317082887; fWeightMatrix0to1[17][8] = -0.333967751516615; fWeightMatrix0to1[18][8] = -0.0336344577455606; fWeightMatrix0to1[19][8] = -1.70441536695542; fWeightMatrix0to1[0][9] = -0.73571908318385; fWeightMatrix0to1[1][9] = 4.03723811051057; fWeightMatrix0to1[2][9] = 2.22628566558507; fWeightMatrix0to1[3][9] = -1.76373136925115; fWeightMatrix0to1[4][9] = -0.0121421281015812; fWeightMatrix0to1[5][9] = 0.521619657349787; fWeightMatrix0to1[6][9] = 0.138309285613673; fWeightMatrix0to1[7][9] = -0.338239827028389; fWeightMatrix0to1[8][9] = -0.301231373008419; fWeightMatrix0to1[9][9] = -0.476331221511332; fWeightMatrix0to1[10][9] = 1.16230935165784; fWeightMatrix0to1[11][9] = 0.188731285593036; fWeightMatrix0to1[12][9] = -0.23340683442941; fWeightMatrix0to1[13][9] = 1.24826726690473; fWeightMatrix0to1[14][9] = 1.20112183197252; fWeightMatrix0to1[15][9] = -0.236119303575903; fWeightMatrix0to1[16][9] = -1.5930978942495; fWeightMatrix0to1[17][9] = 1.28237470234191; fWeightMatrix0to1[18][9] = -0.286789405269088; fWeightMatrix0to1[19][9] = -1.38786234905422; fWeightMatrix0to1[0][10] = -4.01699657297527; fWeightMatrix0to1[1][10] = 0.602898158437548; fWeightMatrix0to1[2][10] = -0.0661970780026132; fWeightMatrix0to1[3][10] = -1.89783450933447; fWeightMatrix0to1[4][10] = 27.4873958411787; fWeightMatrix0to1[5][10] = 0.335074333073018; fWeightMatrix0to1[6][10] = -1.45794237158981; fWeightMatrix0to1[7][10] = -12.586730893796; fWeightMatrix0to1[8][10] = 1.9967457435911; fWeightMatrix0to1[9][10] = 1.30623287810762; fWeightMatrix0to1[10][10] = 14.0029209222731; fWeightMatrix0to1[11][10] = -25.1663720195412; fWeightMatrix0to1[12][10] = -34.7794562167397; fWeightMatrix0to1[13][10] = -1.13104190145012; fWeightMatrix0to1[14][10] = 6.84503089944018; fWeightMatrix0to1[15][10] = 7.5144767139296; fWeightMatrix0to1[16][10] = 1.10787483474178; fWeightMatrix0to1[17][10] = 20.4198116731113; fWeightMatrix0to1[18][10] = 40.9264491920901; fWeightMatrix0to1[19][10] = -1.72003196729005; fWeightMatrix0to1[0][11] = 0.19451510722367; fWeightMatrix0to1[1][11] = 2.26357305682466; fWeightMatrix0to1[2][11] = -1.41881675835021; fWeightMatrix0to1[3][11] = 1.04032470044269; fWeightMatrix0to1[4][11] = -1.97468633178348; fWeightMatrix0to1[5][11] = -0.921303940816304; fWeightMatrix0to1[6][11] = 1.57086707384667; fWeightMatrix0to1[7][11] = 5.25165236506112; fWeightMatrix0to1[8][11] = -1.26875231144211; fWeightMatrix0to1[9][11] = -0.739954555839099; fWeightMatrix0to1[10][11] = -3.26076210397817; fWeightMatrix0to1[11][11] = 5.71731669897146; fWeightMatrix0to1[12][11] = 13.6169920969843; fWeightMatrix0to1[13][11] = -1.09858581007871; fWeightMatrix0to1[14][11] = 1.12851439563871; fWeightMatrix0to1[15][11] = -2.87346013143456; fWeightMatrix0to1[16][11] = 1.20166393858972; fWeightMatrix0to1[17][11] = -4.99826827774983; fWeightMatrix0to1[18][11] = -8.65238026881829; fWeightMatrix0to1[19][11] = -1.31130767283766; fWeightMatrix0to1[0][12] = -1.17098234832852; fWeightMatrix0to1[1][12] = 0.224766261692398; fWeightMatrix0to1[2][12] = -1.66619649779822; fWeightMatrix0to1[3][12] = -1.50460866160719; fWeightMatrix0to1[4][12] = -9.16233614516845; fWeightMatrix0to1[5][12] = 0.236290955626897; fWeightMatrix0to1[6][12] = -0.380536707122573; fWeightMatrix0to1[7][12] = -0.0673353380202524; fWeightMatrix0to1[8][12] = 1.93130133092944; fWeightMatrix0to1[9][12] = -0.525426548649086; fWeightMatrix0to1[10][12] = -1.96667652442458; fWeightMatrix0to1[11][12] = 0.414139114384958; fWeightMatrix0to1[12][12] = 4.29174871559435; fWeightMatrix0to1[13][12] = 1.77477780412764; fWeightMatrix0to1[14][12] = 0.717368166578845; fWeightMatrix0to1[15][12] = -1.45637573885476; fWeightMatrix0to1[16][12] = -1.14858223901039; fWeightMatrix0to1[17][12] = -2.42784352751073; fWeightMatrix0to1[18][12] = -0.197896278288843; fWeightMatrix0to1[19][12] = -1.63418069344811; fWeightMatrix0to1[0][13] = 0.875237071560141; fWeightMatrix0to1[1][13] = 0.867011678790037; fWeightMatrix0to1[2][13] = 1.95644835001367; fWeightMatrix0to1[3][13] = -0.261415695160836; fWeightMatrix0to1[4][13] = -4.7150435426333; fWeightMatrix0to1[5][13] = -0.391764325211675; fWeightMatrix0to1[6][13] = -1.33339263789473; fWeightMatrix0to1[7][13] = 2.08127787550815; fWeightMatrix0to1[8][13] = 1.69812177011971; fWeightMatrix0to1[9][13] = -0.503839157435298; fWeightMatrix0to1[10][13] = -2.05812579076861; fWeightMatrix0to1[11][13] = 0.698139382558324; fWeightMatrix0to1[12][13] = 3.39745440831237; fWeightMatrix0to1[13][13] = 0.359221093888473; fWeightMatrix0to1[14][13] = -1.11819749304068; fWeightMatrix0to1[15][13] = -0.850384589739229; fWeightMatrix0to1[16][13] = 1.81091745897073; fWeightMatrix0to1[17][13] = 1.35976106761051; fWeightMatrix0to1[18][13] = 3.80691402857758; fWeightMatrix0to1[19][13] = -0.289164990343205; fWeightMatrix0to1[0][14] = 0.826965984168259; fWeightMatrix0to1[1][14] = 0.318202838197023; fWeightMatrix0to1[2][14] = -1.38873363369721; fWeightMatrix0to1[3][14] = -0.479546770453574; fWeightMatrix0to1[4][14] = -7.8426359179459; fWeightMatrix0to1[5][14] = -1.26635407649557; fWeightMatrix0to1[6][14] = -0.861175276733533; fWeightMatrix0to1[7][14] = 2.63233743048542; fWeightMatrix0to1[8][14] = -1.29873956395771; fWeightMatrix0to1[9][14] = 2.13014860563324; fWeightMatrix0to1[10][14] = -2.78435990599812; fWeightMatrix0to1[11][14] = 1.29443999565276; fWeightMatrix0to1[12][14] = 1.30289447086499; fWeightMatrix0to1[13][14] = 0.375281891676381; fWeightMatrix0to1[14][14] = 0.0812241094306737; fWeightMatrix0to1[15][14] = -2.40882725381463; fWeightMatrix0to1[16][14] = 0.61614850100752; fWeightMatrix0to1[17][14] = 0.636721481529658; fWeightMatrix0to1[18][14] = -0.100714158488246; fWeightMatrix0to1[19][14] = -1.48139450686939; fWeightMatrix0to1[0][15] = -0.588219227882829; fWeightMatrix0to1[1][15] = 1.11570120415108; fWeightMatrix0to1[2][15] = 2.17964579339289; fWeightMatrix0to1[3][15] = -0.793794789121476; fWeightMatrix0to1[4][15] = -2.39962401506366; fWeightMatrix0to1[5][15] = 0.536501531721283; fWeightMatrix0to1[6][15] = -1.08417735323335; fWeightMatrix0to1[7][15] = -0.0374696550389051; fWeightMatrix0to1[8][15] = -0.872846775965117; fWeightMatrix0to1[9][15] = -1.07902874718122; fWeightMatrix0to1[10][15] = 0.106650816457364; fWeightMatrix0to1[11][15] = 1.36586186604337; fWeightMatrix0to1[12][15] = 0.564405410948452; fWeightMatrix0to1[13][15] = 1.82027842149649; fWeightMatrix0to1[14][15] = -0.21292919946913; fWeightMatrix0to1[15][15] = -1.06845491883997; fWeightMatrix0to1[16][15] = 1.60443537293138; fWeightMatrix0to1[17][15] = -4.08623317144311; fWeightMatrix0to1[18][15] = -3.33990659207956; fWeightMatrix0to1[19][15] = 0.963007309013787; fWeightMatrix0to1[0][16] = -1.14287131177142; fWeightMatrix0to1[1][16] = -0.389949693891141; fWeightMatrix0to1[2][16] = -0.0700977431266502; fWeightMatrix0to1[3][16] = -1.41889304822153; fWeightMatrix0to1[4][16] = 2.5771576841442; fWeightMatrix0to1[5][16] = 3.17344098119848; fWeightMatrix0to1[6][16] = 1.30443473008883; fWeightMatrix0to1[7][16] = -0.174698181405684; fWeightMatrix0to1[8][16] = -1.42014948299522; fWeightMatrix0to1[9][16] = 1.31497841271003; fWeightMatrix0to1[10][16] = 0.413967315068428; fWeightMatrix0to1[11][16] = -2.28708832480168; fWeightMatrix0to1[12][16] = -7.73159532970894; fWeightMatrix0to1[13][16] = -1.78595779476494; fWeightMatrix0to1[14][16] = -0.248359704921308; fWeightMatrix0to1[15][16] = 1.6588238855445; fWeightMatrix0to1[16][16] = -0.0100374459202117; fWeightMatrix0to1[17][16] = 3.72343150426458; fWeightMatrix0to1[18][16] = 3.33498508444381; fWeightMatrix0to1[19][16] = -0.179673957339847; fWeightMatrix0to1[0][17] = 0.125262023047622; fWeightMatrix0to1[1][17] = -0.209641646306695; fWeightMatrix0to1[2][17] = 1.76967469909892; fWeightMatrix0to1[3][17] = -1.54300833859859; fWeightMatrix0to1[4][17] = 0.918123595084401; fWeightMatrix0to1[5][17] = -2.07299113476733; fWeightMatrix0to1[6][17] = -0.250226541805743; fWeightMatrix0to1[7][17] = -0.0900622911240581; fWeightMatrix0to1[8][17] = 0.330126004792484; fWeightMatrix0to1[9][17] = 0.489966001370781; fWeightMatrix0to1[10][17] = 0.206602843846252; fWeightMatrix0to1[11][17] = 4.50369800525252; fWeightMatrix0to1[12][17] = 2.26709013555732; fWeightMatrix0to1[13][17] = 0.0557933802981336; fWeightMatrix0to1[14][17] = -1.72582325381848; fWeightMatrix0to1[15][17] = -0.963775084080971; fWeightMatrix0to1[16][17] = -1.77051456797843; fWeightMatrix0to1[17][17] = -0.87505987858917; fWeightMatrix0to1[18][17] = -7.1198261292295; fWeightMatrix0to1[19][17] = -1.14948376679722; // weight matrix from layer 1 to 2 fWeightMatrix1to2[0][0] = -0.316192826932665; fWeightMatrix1to2[0][1] = -1.43962588890275; fWeightMatrix1to2[0][2] = -0.236185140591332; fWeightMatrix1to2[0][3] = -0.292240270220395; fWeightMatrix1to2[0][4] = 2.89363191783078; fWeightMatrix1to2[0][5] = -1.33241586632402; fWeightMatrix1to2[0][6] = 0.443026823538712; fWeightMatrix1to2[0][7] = -1.00069020896173; fWeightMatrix1to2[0][8] = -0.242996459892805; fWeightMatrix1to2[0][9] = -2.41083856235728; fWeightMatrix1to2[0][10] = 0.544699598929808; fWeightMatrix1to2[0][11] = -0.846513848843961; fWeightMatrix1to2[0][12] = -1.38704564856283; fWeightMatrix1to2[0][13] = 0.260740819571199; fWeightMatrix1to2[0][14] = 0.832442273496678; fWeightMatrix1to2[0][15] = 0.605930935692143; fWeightMatrix1to2[0][16] = -0.228033243810512; fWeightMatrix1to2[0][17] = 1.1122263023315; fWeightMatrix1to2[0][18] = 2.19804802496079; fWeightMatrix1to2[0][19] = 0.98820394541968; fWeightMatrix1to2[0][20] = -2.96818079188264; } inline double ReadElectron_Upstream_TMVA::GetMvaValue__( const std::vector& inputValues ) const { if (inputValues.size() != (unsigned int)fLayerSize[0]-1) { std::cout << "Input vector needs to be of size " << fLayerSize[0]-1 << std::endl; return 0; } for (int l=0; l& inputValues ) const { // classifier response value double retval = 0; // classifier response, sanity check first if (!IsStatusClean()) { std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response" << " because status is dirty" << std::endl; retval = 0; } else { if (IsNormalised()) { // normalise variables std::vector iV; iV.reserve(inputValues.size()); int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] )); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } else { std::vector iV; int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(*varIt); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } } return retval; } //_______________________________________________________________________ inline void ReadElectron_Upstream_TMVA::InitTransform_1() { // Normalization transformation, initialisation fMin_1[0][0] = 100.489997864; fMax_1[0][0] = 4998705.5; fMin_1[1][0] = 303.720001221; fMax_1[1][0] = 2925550.5; fMin_1[2][0] = 100.489997864; fMax_1[2][0] = 4998705.5; fMin_1[0][1] = 16.465473175; fMax_1[0][1] = 983434.1875; fMin_1[1][1] = 17.5056381226; fMax_1[1][1] = 514403.84375; fMin_1[2][1] = 16.465473175; fMax_1[2][1] = 983434.1875; fMin_1[0][2] = 0.000975214526989; fMax_1[0][2] = 3.99805927277; fMin_1[1][2] = 0.0198336597532; fMax_1[1][2] = 3.99586319923; fMin_1[2][2] = 0.000975214526989; fMax_1[2][2] = 3.99805927277; fMin_1[0][3] = 4; fMax_1[0][3] = 27; fMin_1[1][3] = 4; fMax_1[1][3] = 23; fMin_1[2][3] = 4; fMax_1[2][3] = 27; fMin_1[0][4] = 0.00192418228835; fMax_1[0][4] = 0.399999499321; fMin_1[1][4] = 0.00204366911203; fMax_1[1][4] = 0.399787157774; fMin_1[2][4] = 0.00192418228835; fMax_1[2][4] = 0.399999499321; fMin_1[0][5] = 7.34402965463e-06; fMax_1[0][5] = 67.3927001953; fMin_1[1][5] = 0.00049247656716; fMax_1[1][5] = 46.4988098145; fMin_1[2][5] = 7.34402965463e-06; fMax_1[2][5] = 67.3927001953; fMin_1[0][6] = 1; fMax_1[0][6] = 23; fMin_1[1][6] = 1; fMax_1[1][6] = 18; fMin_1[2][6] = 1; fMax_1[2][6] = 23; fMin_1[0][7] = 0; fMax_1[0][7] = 1; fMin_1[1][7] = 0; fMax_1[1][7] = 1; fMin_1[2][7] = 0; fMax_1[2][7] = 1; fMin_1[0][8] = 0; fMax_1[0][8] = 1; fMin_1[1][8] = 0; fMax_1[1][8] = 1; fMin_1[2][8] = 0; fMax_1[2][8] = 1; fMin_1[0][9] = 0; fMax_1[0][9] = 1; fMin_1[1][9] = 0; fMax_1[1][9] = 1; fMin_1[2][9] = 0; fMax_1[2][9] = 1; fMin_1[0][10] = -999; fMax_1[0][10] = 144.654205322; fMin_1[1][10] = -999; fMax_1[1][10] = 152.429595947; fMin_1[2][10] = -999; fMax_1[2][10] = 152.429595947; fMin_1[0][11] = -999; fMax_1[0][11] = 115.130996704; fMin_1[1][11] = -999; fMax_1[1][11] = 78.2866973877; fMin_1[2][11] = -999; fMax_1[2][11] = 115.130996704; fMin_1[0][12] = -999; fMax_1[0][12] = 135.562103271; fMin_1[1][12] = -999; fMax_1[1][12] = 27.7754001617; fMin_1[2][12] = -999; fMax_1[2][12] = 135.562103271; fMin_1[0][13] = -999; fMax_1[0][13] = 123.221702576; fMin_1[1][13] = -999; fMax_1[1][13] = 33.3460006714; fMin_1[2][13] = -999; fMax_1[2][13] = 123.221702576; fMin_1[0][14] = -999; fMax_1[0][14] = 50.9290008545; fMin_1[1][14] = -999; fMax_1[1][14] = 33.3460006714; fMin_1[2][14] = -999; fMax_1[2][14] = 50.9290008545; fMin_1[0][15] = 0; fMax_1[0][15] = 1; fMin_1[1][15] = 0; fMax_1[1][15] = 1; fMin_1[2][15] = 0; fMax_1[2][15] = 1; fMin_1[0][16] = -999; fMax_1[0][16] = 4.79148387909; fMin_1[1][16] = -999; fMax_1[1][16] = 4.79148387909; fMin_1[2][16] = -999; fMax_1[2][16] = 4.79148387909; } //_______________________________________________________________________ inline void ReadElectron_Upstream_TMVA::Transform_1( std::vector& iv, int cls) const { // Normalization transformation if (cls < 0 || cls > 2) { if (2 > 1 ) cls = 2; else cls = 2; } const int nVar = 17; // get indices of used variables // define the indices of the variables which are transformed by this transformation static std::vector indicesGet; static std::vector indicesPut; if ( indicesGet.empty() ) { indicesGet.reserve(fNvars); indicesGet.push_back( 0); indicesGet.push_back( 1); indicesGet.push_back( 2); indicesGet.push_back( 3); indicesGet.push_back( 4); indicesGet.push_back( 5); indicesGet.push_back( 6); indicesGet.push_back( 7); indicesGet.push_back( 8); indicesGet.push_back( 9); indicesGet.push_back( 10); indicesGet.push_back( 11); indicesGet.push_back( 12); indicesGet.push_back( 13); indicesGet.push_back( 14); indicesGet.push_back( 15); indicesGet.push_back( 16); } if ( indicesPut.empty() ) { indicesPut.reserve(fNvars); indicesPut.push_back( 0); indicesPut.push_back( 1); indicesPut.push_back( 2); indicesPut.push_back( 3); indicesPut.push_back( 4); indicesPut.push_back( 5); indicesPut.push_back( 6); indicesPut.push_back( 7); indicesPut.push_back( 8); indicesPut.push_back( 9); indicesPut.push_back( 10); indicesPut.push_back( 11); indicesPut.push_back( 12); indicesPut.push_back( 13); indicesPut.push_back( 14); indicesPut.push_back( 15); indicesPut.push_back( 16); } static std::vector dv; dv.resize(nVar); for (int ivar=0; ivar& iv, int sigOrBgd ) const { Transform_1( iv, sigOrBgd ); }