// Class: ReadGhost_Upstream_TMVA // Automatically generated by MethodBase::MakeClass // /* configuration options ===================================================== #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- Method : MLP::Ghost_Upstream_TMVA TMVA Release : 4.2.0 [262656] ROOT Release : 6.02/01 [393729] Creator : jonesc Date : Sun Feb 7 16:35:07 2016 Host : Linux lcgapp-slc6-physical1.cern.ch 2.6.32-431.29.2.el6.x86_64 #1 SMP Wed Sep 10 11:13:12 CEST 2014 x86_64 x86_64 x86_64 GNU/Linux Dir : /var/pcfst/r03/lhcb/jonesc/ANNPID/results/MC2015Sim09Dev03/TrainMixture/TrainPhysTks-EvalPhysTks-NoReweight/GhostAccFrac1.0/ProtonAccFrac1.0/KaonAccFrac1.0/TMVA-Run2-NoTkLikCDVelodEdx/MLP/Norm/ScaleF1.3/BP/NCycles750/CE/tanh/CVTest15/CVImp1e-16/NotUseRegulator/Ghost/Upstream Training events: 2319076 Analysis type : [Classification] #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- # Set by User: NCycles: "750" [Number of training cycles] HiddenLayers: "23" [Specification of hidden layer architecture] NeuronType: "tanh" [Neuron activation function type] EstimatorType: "CE" [MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood] V: "True" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] VarTransform: "Norm" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] H: "True" [Print method-specific help message] TrainingMethod: "BP" [Train with Back-Propagation (BP), BFGS Algorithm (BFGS), or Genetic Algorithm (GA - slower and worse)] EpochMonitoring: "True" [Provide epoch-wise monitoring plots according to TestRate (caution: causes big ROOT output file!)] ConvergenceImprove: "1.000000e-16" [Minimum improvement which counts as improvement (<0 means automatic convergence check is turned off)] ConvergenceTests: "15" [Number of steps (without improvement) required for convergence (<0 means automatic convergence check is turned off)] UseRegulator: "False" [Use regulator to avoid over-training] # Default: RandomSeed: "1" [Random seed for initial synapse weights (0 means unique seed for each run; default value '1')] NeuronInputType: "sum" [Neuron input function type] VerbosityLevel: "Verbose" [Verbosity level] CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] LearningRate: "2.000000e-02" [ANN learning rate parameter] DecayRate: "1.000000e-02" [Decay rate for learning parameter] TestRate: "10" [Test for overtraining performed at each #th epochs] Sampling: "1.000000e+00" [Only 'Sampling' (randomly selected) events are trained each epoch] SamplingEpoch: "1.000000e+00" [Sampling is used for the first 'SamplingEpoch' epochs, afterwards, all events are taken for training] SamplingImportance: "1.000000e+00" [ The sampling weights of events in epochs which successful (worse estimator than before) are multiplied with SamplingImportance, else they are divided.] SamplingTraining: "True" [The training sample is sampled] SamplingTesting: "False" [The testing sample is sampled] ResetStep: "50" [How often BFGS should reset history] Tau: "3.000000e+00" [LineSearch "size step"] BPMode: "sequential" [Back-propagation learning mode: sequential or batch] BatchSize: "-1" [Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events] UpdateLimit: "10000" [Maximum times of regulator update] CalculateErrors: "False" [Calculates inverse Hessian matrix at the end of the training to be able to calculate the uncertainties of an MVA value] WeightRange: "1.000000e+00" [Take the events for the estimator calculations from small deviations from the desired value to large deviations only over the weight range] ## #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- NVar 18 TrackP TrackP TrackP TrackP 'F' [115.480003357,4998705.5] TrackPt TrackPt TrackPt TrackPt 'F' [12.8830137253,867623.6875] TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof 'F' [0.00195923517458,3.99922418594] TrackNumDof TrackNumDof TrackNumDof TrackNumDof 'I' [4,27] TrackGhostProbability TrackGhostProbability TrackGhostProbability TrackGhostProbability 'F' [0.00192928232718,0.399999499321] TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 'F' [7.34402965463e-06,67.3927001953] TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF 'I' [1,23] RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas 'I' [0,1] RichAbovePiThres RichAbovePiThres RichAbovePiThres RichAbovePiThres 'I' [0,1] RichAboveKaThres RichAboveKaThres RichAboveKaThres RichAboveKaThres 'I' [0,1] RichAbovePrThres RichAbovePrThres RichAbovePrThres RichAbovePrThres 'I' [0,1] RichDLLe RichDLLe RichDLLe RichDLLe 'F' [-999,168.813705444] RichDLLmu RichDLLmu RichDLLmu RichDLLmu 'F' [-999,114.397399902] RichDLLk RichDLLk RichDLLk RichDLLk 'F' [-999,135.562103271] RichDLLp RichDLLp RichDLLp RichDLLp 'F' [-999,123.221702576] RichDLLbt RichDLLbt RichDLLbt RichDLLbt 'F' [-999,51.957901001] InAccBrem InAccBrem InAccBrem InAccBrem 'I' [0,1] BremPIDe BremPIDe BremPIDe BremPIDe 'F' [-999,4.79148387909] NSpec 0 ============================================================================ */ #include #include #include #include #ifndef IClassifierReader__def #define IClassifierReader__def class IClassifierReader { public: // constructor IClassifierReader() : fStatusIsClean( true ) {} virtual ~IClassifierReader() {} // return classifier response virtual double GetMvaValue( const std::vector& inputValues ) const = 0; // returns classifier status bool IsStatusClean() const { return fStatusIsClean; } protected: bool fStatusIsClean; }; #endif class ReadGhost_Upstream_TMVA : public IClassifierReader { public: // constructor ReadGhost_Upstream_TMVA( std::vector& theInputVars ) : IClassifierReader(), fClassName( "ReadGhost_Upstream_TMVA" ), fNvars( 18 ), fIsNormalised( false ) { // the training input variables const char* inputVars[] = { "TrackP", "TrackPt", "TrackChi2PerDof", "TrackNumDof", "TrackGhostProbability", "TrackFitVeloChi2", "TrackFitVeloNDoF", "RichUsedR1Gas", "RichAbovePiThres", "RichAboveKaThres", "RichAbovePrThres", "RichDLLe", "RichDLLmu", "RichDLLk", "RichDLLp", "RichDLLbt", "InAccBrem", "BremPIDe" }; // sanity checks if (theInputVars.size() <= 0) { std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; fStatusIsClean = false; } if (theInputVars.size() != fNvars) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " << theInputVars.size() << " != " << fNvars << std::endl; fStatusIsClean = false; } // validate input variables for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { if (theInputVars[ivar] != inputVars[ivar]) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; fStatusIsClean = false; } } // initialize min and max vectors (for normalisation) fVmin[0] = -1; fVmax[0] = 1; fVmin[1] = -1; fVmax[1] = 0.99999988079071; fVmin[2] = -1; fVmax[2] = 1; fVmin[3] = -1; fVmax[3] = 1; fVmin[4] = -1; fVmax[4] = 0.99999988079071; fVmin[5] = -1; fVmax[5] = 1; fVmin[6] = -1; fVmax[6] = 1; fVmin[7] = -1; fVmax[7] = 1; fVmin[8] = -1; fVmax[8] = 1; fVmin[9] = -1; fVmax[9] = 1; fVmin[10] = -1; fVmax[10] = 1; fVmin[11] = -1; fVmax[11] = 1; fVmin[12] = -1; fVmax[12] = 1; fVmin[13] = -1; fVmax[13] = 1; fVmin[14] = -1; fVmax[14] = 1; fVmin[15] = -1; fVmax[15] = 1; fVmin[16] = -1; fVmax[16] = 1; fVmin[17] = -1; fVmax[17] = 1; // initialize input variable types fType[0] = 'F'; fType[1] = 'F'; fType[2] = 'F'; fType[3] = 'I'; fType[4] = 'F'; fType[5] = 'F'; fType[6] = 'I'; fType[7] = 'I'; fType[8] = 'I'; fType[9] = 'I'; fType[10] = 'I'; fType[11] = 'F'; fType[12] = 'F'; fType[13] = 'F'; fType[14] = 'F'; fType[15] = 'F'; fType[16] = 'I'; fType[17] = 'F'; // initialize constants Initialize(); // initialize transformation InitTransform(); } // destructor virtual ~ReadGhost_Upstream_TMVA() { Clear(); // method-specific } // the classifier response // "inputValues" is a vector of input values in the same order as the // variables given to the constructor double GetMvaValue( const std::vector& inputValues ) const; private: // method-specific destructor void Clear(); // input variable transformation double fMin_1[3][18]; double fMax_1[3][18]; void InitTransform_1(); void Transform_1( std::vector & iv, int sigOrBgd ) const; void InitTransform(); void Transform( std::vector & iv, int sigOrBgd ) const; // common member variables const char* fClassName; const size_t fNvars; size_t GetNvar() const { return fNvars; } char GetType( int ivar ) const { return fType[ivar]; } // normalisation of input variables const bool fIsNormalised; bool IsNormalised() const { return fIsNormalised; } double fVmin[18]; double fVmax[18]; double NormVariable( double x, double xmin, double xmax ) const { // normalise to output range: [-1, 1] return 2*(x - xmin)/(xmax - xmin) - 1.0; } // type of input variable: 'F' or 'I' char fType[18]; // initialize internal variables void Initialize(); double GetMvaValue__( const std::vector& inputValues ) const; // private members (method specific) double ActivationFnc(double x) const; double OutputActivationFnc(double x) const; int fLayers; int fLayerSize[3]; double fWeightMatrix0to1[24][19]; // weight matrix from layer 0 to 1 double fWeightMatrix1to2[1][24]; // weight matrix from layer 1 to 2 double * fWeights[3]; }; inline void ReadGhost_Upstream_TMVA::Initialize() { // build network structure fLayers = 3; fLayerSize[0] = 19; fWeights[0] = new double[19]; fLayerSize[1] = 24; fWeights[1] = new double[24]; fLayerSize[2] = 1; fWeights[2] = new double[1]; // weight matrix from layer 0 to 1 fWeightMatrix0to1[0][0] = -28.8235276402351; fWeightMatrix0to1[1][0] = 18.1179601357501; fWeightMatrix0to1[2][0] = -8.52689528193455; fWeightMatrix0to1[3][0] = 0.238715718622262; fWeightMatrix0to1[4][0] = -3.12686766022984; fWeightMatrix0to1[5][0] = -0.813444862724644; fWeightMatrix0to1[6][0] = -5.04933747643488; fWeightMatrix0to1[7][0] = 0.796968446946299; fWeightMatrix0to1[8][0] = -5.51034510487075; fWeightMatrix0to1[9][0] = -1.92915465680577; fWeightMatrix0to1[10][0] = -6.61850432176457; fWeightMatrix0to1[11][0] = -0.8539362574005; fWeightMatrix0to1[12][0] = -4.90610011250877; fWeightMatrix0to1[13][0] = -6.77920702538568; fWeightMatrix0to1[14][0] = 4.46463044297702; fWeightMatrix0to1[15][0] = 6.08200736375721; fWeightMatrix0to1[16][0] = -4.80329455998095; fWeightMatrix0to1[17][0] = 9.34707444920875; fWeightMatrix0to1[18][0] = 101.576062903747; fWeightMatrix0to1[19][0] = 9.31863555438497; fWeightMatrix0to1[20][0] = -2.95172538155447; fWeightMatrix0to1[21][0] = -3.78305101101327; fWeightMatrix0to1[22][0] = 3.72496528958454; fWeightMatrix0to1[0][1] = -9.33322490779389; fWeightMatrix0to1[1][1] = -2.11551706812704; fWeightMatrix0to1[2][1] = 9.89698954995123; fWeightMatrix0to1[3][1] = 10.6792520708773; fWeightMatrix0to1[4][1] = -3.71912605530765; fWeightMatrix0to1[5][1] = 4.52560112218402; fWeightMatrix0to1[6][1] = 7.40666380066313; fWeightMatrix0to1[7][1] = -2.43776417781483; fWeightMatrix0to1[8][1] = 5.77549675975998; fWeightMatrix0to1[9][1] = -0.633390758204077; fWeightMatrix0to1[10][1] = -0.543855950502001; fWeightMatrix0to1[11][1] = 1.44416662448065; fWeightMatrix0to1[12][1] = -5.82228851309656; fWeightMatrix0to1[13][1] = -11.2001331799996; fWeightMatrix0to1[14][1] = 4.11186540442602; fWeightMatrix0to1[15][1] = 2.81873843276544; fWeightMatrix0to1[16][1] = 4.50734300653147; fWeightMatrix0to1[17][1] = -4.82462552310321; fWeightMatrix0to1[18][1] = 9.68127417541106; fWeightMatrix0to1[19][1] = -4.96774306711291; fWeightMatrix0to1[20][1] = 5.16047495600305; fWeightMatrix0to1[21][1] = -2.66819617365737; fWeightMatrix0to1[22][1] = 1.90960733543184; fWeightMatrix0to1[0][2] = 1.20841004108954; fWeightMatrix0to1[1][2] = 1.2856139347032; fWeightMatrix0to1[2][2] = -0.218244886232357; fWeightMatrix0to1[3][2] = 0.532657033198121; fWeightMatrix0to1[4][2] = 0.407016111749752; fWeightMatrix0to1[5][2] = 1.67413601571311; fWeightMatrix0to1[6][2] = -0.855892969692506; fWeightMatrix0to1[7][2] = 0.0891684262598578; fWeightMatrix0to1[8][2] = -3.56815561553495; fWeightMatrix0to1[9][2] = -0.548797672492775; fWeightMatrix0to1[10][2] = -1.85681105228998; fWeightMatrix0to1[11][2] = 1.05718151700927; fWeightMatrix0to1[12][2] = 0.297650418208365; fWeightMatrix0to1[13][2] = 2.42897897404609; fWeightMatrix0to1[14][2] = -1.9223252267845; fWeightMatrix0to1[15][2] = -1.12538269574368; fWeightMatrix0to1[16][2] = 0.211785301282892; fWeightMatrix0to1[17][2] = 2.37104496928907; fWeightMatrix0to1[18][2] = 0.234439838498426; fWeightMatrix0to1[19][2] = -0.530028023204687; fWeightMatrix0to1[20][2] = 0.72164198385016; fWeightMatrix0to1[21][2] = 1.17691065839371; fWeightMatrix0to1[22][2] = -0.543502132571592; fWeightMatrix0to1[0][3] = -2.04311026469124; fWeightMatrix0to1[1][3] = -1.20669204090619; fWeightMatrix0to1[2][3] = 2.84599560502273; fWeightMatrix0to1[3][3] = -0.0428768838023468; fWeightMatrix0to1[4][3] = 0.764981446141033; fWeightMatrix0to1[5][3] = 13.254480649856; fWeightMatrix0to1[6][3] = 12.7179297886987; fWeightMatrix0to1[7][3] = -0.738027494997737; fWeightMatrix0to1[8][3] = 9.65195879775912; fWeightMatrix0to1[9][3] = 0.729439816081347; fWeightMatrix0to1[10][3] = 3.54698380408994; fWeightMatrix0to1[11][3] = 0.722389335868264; fWeightMatrix0to1[12][3] = 3.8116227888659; fWeightMatrix0to1[13][3] = 1.29519723982304; fWeightMatrix0to1[14][3] = -8.00536286376089; fWeightMatrix0to1[15][3] = 0.382319441007033; fWeightMatrix0to1[16][3] = -0.620489936725906; fWeightMatrix0to1[17][3] = -0.594032430143821; fWeightMatrix0to1[18][3] = -1.24639686530961; fWeightMatrix0to1[19][3] = -8.71099523835395; fWeightMatrix0to1[20][3] = -21.8260598087776; fWeightMatrix0to1[21][3] = -0.905990470026797; fWeightMatrix0to1[22][3] = 2.84720242525331; fWeightMatrix0to1[0][4] = -0.128451061458668; fWeightMatrix0to1[1][4] = -0.738513088569237; fWeightMatrix0to1[2][4] = -0.0825866260035262; fWeightMatrix0to1[3][4] = -0.0712505734471451; fWeightMatrix0to1[4][4] = 0.15701222819364; fWeightMatrix0to1[5][4] = -0.532063492778823; fWeightMatrix0to1[6][4] = -0.0249874117382801; fWeightMatrix0to1[7][4] = 0.297619824736495; fWeightMatrix0to1[8][4] = 0.44218670466297; fWeightMatrix0to1[9][4] = -0.0484410163962655; fWeightMatrix0to1[10][4] = 0.165145172176373; fWeightMatrix0to1[11][4] = 0.124990780150127; fWeightMatrix0to1[12][4] = 0.0251106329175592; fWeightMatrix0to1[13][4] = 23.2911053864737; fWeightMatrix0to1[14][4] = 0.0870123706241847; fWeightMatrix0to1[15][4] = -1.74559222576903; fWeightMatrix0to1[16][4] = -0.519289869475094; fWeightMatrix0to1[17][4] = 0.0399217597959512; fWeightMatrix0to1[18][4] = 0.00051066486788519; fWeightMatrix0to1[19][4] = -0.594375807368349; fWeightMatrix0to1[20][4] = -0.33967527746065; fWeightMatrix0to1[21][4] = 4.97537434246333; fWeightMatrix0to1[22][4] = 0.293044991670871; fWeightMatrix0to1[0][5] = -2.95172967604919; fWeightMatrix0to1[1][5] = -3.5263451558428; fWeightMatrix0to1[2][5] = -0.104988454456669; fWeightMatrix0to1[3][5] = -1.15061253439687; fWeightMatrix0to1[4][5] = -0.0715594411502014; fWeightMatrix0to1[5][5] = -2.86708033419629; fWeightMatrix0to1[6][5] = 1.10447656169782; fWeightMatrix0to1[7][5] = 0.621292714879421; fWeightMatrix0to1[8][5] = 6.41817954524736; fWeightMatrix0to1[9][5] = 0.667761368656389; fWeightMatrix0to1[10][5] = 1.36775387527847; fWeightMatrix0to1[11][5] = 0.112038172340811; fWeightMatrix0to1[12][5] = -0.203563838168373; fWeightMatrix0to1[13][5] = -2.60184011197919; fWeightMatrix0to1[14][5] = 9.2051230344433; fWeightMatrix0to1[15][5] = 0.0524808339472907; fWeightMatrix0to1[16][5] = -2.35027451855934; fWeightMatrix0to1[17][5] = -2.76921662688559; fWeightMatrix0to1[18][5] = -0.314862249743789; fWeightMatrix0to1[19][5] = 0.759622312404639; fWeightMatrix0to1[20][5] = -0.871870784581863; fWeightMatrix0to1[21][5] = -0.695669442739959; fWeightMatrix0to1[22][5] = 0.739377812201173; fWeightMatrix0to1[0][6] = 2.32670542713004; fWeightMatrix0to1[1][6] = 5.10393412568441; fWeightMatrix0to1[2][6] = -4.08088896094672; fWeightMatrix0to1[3][6] = 5.30677559774394; fWeightMatrix0to1[4][6] = 0.609124263112554; fWeightMatrix0to1[5][6] = -10.1662198753156; fWeightMatrix0to1[6][6] = -11.7488485439802; fWeightMatrix0to1[7][6] = 2.3288600406762; fWeightMatrix0to1[8][6] = -10.3405923035609; fWeightMatrix0to1[9][6] = -1.12570702167963; fWeightMatrix0to1[10][6] = -2.86135469420225; fWeightMatrix0to1[11][6] = -1.21981110702533; fWeightMatrix0to1[12][6] = -2.78748116745333; fWeightMatrix0to1[13][6] = -0.942261773333106; fWeightMatrix0to1[14][6] = 9.01263496577765; fWeightMatrix0to1[15][6] = -0.676648040028556; fWeightMatrix0to1[16][6] = 2.37933866028368; fWeightMatrix0to1[17][6] = 3.25946275744495; fWeightMatrix0to1[18][6] = 0.722995029541239; fWeightMatrix0to1[19][6] = 7.22806885534227; fWeightMatrix0to1[20][6] = 19.6726737149188; fWeightMatrix0to1[21][6] = 1.27123516722087; fWeightMatrix0to1[22][6] = -1.76577157566855; fWeightMatrix0to1[0][7] = -4.53250763399869; fWeightMatrix0to1[1][7] = 1.07731097933625; fWeightMatrix0to1[2][7] = -1.28312699053419; fWeightMatrix0to1[3][7] = 8.53559666002778; fWeightMatrix0to1[4][7] = 0.768046995834967; fWeightMatrix0to1[5][7] = 2.63776039846738; fWeightMatrix0to1[6][7] = 10.8351630754303; fWeightMatrix0to1[7][7] = -0.040149495759664; fWeightMatrix0to1[8][7] = 1.90953191788529; fWeightMatrix0to1[9][7] = 0.482345306969967; fWeightMatrix0to1[10][7] = 8.47775986139964; fWeightMatrix0to1[11][7] = -0.0715229067551338; fWeightMatrix0to1[12][7] = 5.5365199107072; fWeightMatrix0to1[13][7] = -7.6536861774725; fWeightMatrix0to1[14][7] = -1.53569330206581; fWeightMatrix0to1[15][7] = 2.8228874196151; fWeightMatrix0to1[16][7] = 1.22168370157107; fWeightMatrix0to1[17][7] = 1.27473210206898; fWeightMatrix0to1[18][7] = -12.6049978136943; fWeightMatrix0to1[19][7] = -12.5801619094304; fWeightMatrix0to1[20][7] = 4.59069092307855; fWeightMatrix0to1[21][7] = -5.97889830222176; fWeightMatrix0to1[22][7] = -2.15706226635161; fWeightMatrix0to1[0][8] = 0.244266503521318; fWeightMatrix0to1[1][8] = 0.374002155986517; fWeightMatrix0to1[2][8] = -0.246207984824095; fWeightMatrix0to1[3][8] = -0.00926102605044792; fWeightMatrix0to1[4][8] = -2.3803440928526; fWeightMatrix0to1[5][8] = -0.0464569821242879; fWeightMatrix0to1[6][8] = -0.233543365534403; fWeightMatrix0to1[7][8] = 1.22612963268979; fWeightMatrix0to1[8][8] = 0.374761940191732; fWeightMatrix0to1[9][8] = 0.180197358176372; fWeightMatrix0to1[10][8] = -2.3081891159118; fWeightMatrix0to1[11][8] = 0.0930530281694091; fWeightMatrix0to1[12][8] = 0.135218178596279; fWeightMatrix0to1[13][8] = -0.128773111767146; fWeightMatrix0to1[14][8] = -4.96440261003749; fWeightMatrix0to1[15][8] = 0.211308722001751; fWeightMatrix0to1[16][8] = 0.05350927229231; fWeightMatrix0to1[17][8] = 1.60071738648069; fWeightMatrix0to1[18][8] = -10.7980550376298; fWeightMatrix0to1[19][8] = 0.458147645910678; fWeightMatrix0to1[20][8] = 0.394231385200735; fWeightMatrix0to1[21][8] = -0.226366366428004; fWeightMatrix0to1[22][8] = -1.88224720460216; fWeightMatrix0to1[0][9] = 0.995393267216435; fWeightMatrix0to1[1][9] = 3.3742016012253; fWeightMatrix0to1[2][9] = 5.59735965781804; fWeightMatrix0to1[3][9] = 0.127566854051597; fWeightMatrix0to1[4][9] = 1.85134147789409; fWeightMatrix0to1[5][9] = 0.277730824627698; fWeightMatrix0to1[6][9] = -0.0436891106587404; fWeightMatrix0to1[7][9] = 0.567222106941223; fWeightMatrix0to1[8][9] = -0.155307567494789; fWeightMatrix0to1[9][9] = 0.470899858894981; fWeightMatrix0to1[10][9] = -1.47476157860231; fWeightMatrix0to1[11][9] = -1.31077140513991; fWeightMatrix0to1[12][9] = -5.7905387452349; fWeightMatrix0to1[13][9] = -0.252081869581582; fWeightMatrix0to1[14][9] = -2.03305648842447; fWeightMatrix0to1[15][9] = 0.184675585330675; fWeightMatrix0to1[16][9] = -0.460066516552684; fWeightMatrix0to1[17][9] = 5.55965852806746; fWeightMatrix0to1[18][9] = -12.1406757784777; fWeightMatrix0to1[19][9] = 3.43873867882442; fWeightMatrix0to1[20][9] = -0.0602437808379762; fWeightMatrix0to1[21][9] = -0.258257212550598; fWeightMatrix0to1[22][9] = -0.840476742981897; fWeightMatrix0to1[0][10] = 20.8465413073813; fWeightMatrix0to1[1][10] = -0.423307742370857; fWeightMatrix0to1[2][10] = 0.897403203967452; fWeightMatrix0to1[3][10] = -6.43169903405557; fWeightMatrix0to1[4][10] = 1.59778895080251; fWeightMatrix0to1[5][10] = 0.348133557290235; fWeightMatrix0to1[6][10] = -8.98466659337426; fWeightMatrix0to1[7][10] = 0.24160859211655; fWeightMatrix0to1[8][10] = -0.162524336699689; fWeightMatrix0to1[9][10] = -0.0911168403281854; fWeightMatrix0to1[10][10] = -0.51518044061348; fWeightMatrix0to1[11][10] = -0.570505534005657; fWeightMatrix0to1[12][10] = -5.90034894745909; fWeightMatrix0to1[13][10] = -0.145587150974906; fWeightMatrix0to1[14][10] = -4.39640865281968; fWeightMatrix0to1[15][10] = 0.106873452301595; fWeightMatrix0to1[16][10] = 0.0234452211940486; fWeightMatrix0to1[17][10] = 0.0220288877032798; fWeightMatrix0to1[18][10] = -9.46838861226607; fWeightMatrix0to1[19][10] = -3.5302574051721; fWeightMatrix0to1[20][10] = 2.26691019931282; fWeightMatrix0to1[21][10] = -0.307301273883581; fWeightMatrix0to1[22][10] = 0.887911995711123; fWeightMatrix0to1[0][11] = 55.388708164381; fWeightMatrix0to1[1][11] = -1.10652647555923; fWeightMatrix0to1[2][11] = -41.0688736330646; fWeightMatrix0to1[3][11] = -25.140321673858; fWeightMatrix0to1[4][11] = -10.5209093076642; fWeightMatrix0to1[5][11] = 12.0190734096547; fWeightMatrix0to1[6][11] = -41.9998613277334; fWeightMatrix0to1[7][11] = 0.13300080903575; fWeightMatrix0to1[8][11] = 12.6930653386532; fWeightMatrix0to1[9][11] = 2.29109654016353; fWeightMatrix0to1[10][11] = -5.90143140101845; fWeightMatrix0to1[11][11] = -0.260756031337357; fWeightMatrix0to1[12][11] = -19.0582587261531; fWeightMatrix0to1[13][11] = 7.74046634666238; fWeightMatrix0to1[14][11] = 7.13734170118194; fWeightMatrix0to1[15][11] = 4.17519959406967; fWeightMatrix0to1[16][11] = 0.973993047062735; fWeightMatrix0to1[17][11] = -2.52658003905935; fWeightMatrix0to1[18][11] = -9.3291998176057; fWeightMatrix0to1[19][11] = 60.3797311077784; fWeightMatrix0to1[20][11] = -4.73338175168905; fWeightMatrix0to1[21][11] = 4.00190968117137; fWeightMatrix0to1[22][11] = 0.415775350325495; fWeightMatrix0to1[0][12] = 9.17483038175887; fWeightMatrix0to1[1][12] = 1.56929894742262; fWeightMatrix0to1[2][12] = -19.9541465883728; fWeightMatrix0to1[3][12] = -2.48800194856765; fWeightMatrix0to1[4][12] = -0.102119163035276; fWeightMatrix0to1[5][12] = -0.437613798765989; fWeightMatrix0to1[6][12] = 1.68828930400712; fWeightMatrix0to1[7][12] = -0.642375145703339; fWeightMatrix0to1[8][12] = -2.46598250363954; fWeightMatrix0to1[9][12] = -1.43665624783761; fWeightMatrix0to1[10][12] = -0.756884787320195; fWeightMatrix0to1[11][12] = 2.2353435159932; fWeightMatrix0to1[12][12] = -2.51656021630644; fWeightMatrix0to1[13][12] = 2.44165577758771; fWeightMatrix0to1[14][12] = -7.23098721711636; fWeightMatrix0to1[15][12] = 0.16758090575202; fWeightMatrix0to1[16][12] = 0.934502540094296; fWeightMatrix0to1[17][12] = -1.49507491186157; fWeightMatrix0to1[18][12] = -9.54773940799648; fWeightMatrix0to1[19][12] = -1.51884169672856; fWeightMatrix0to1[20][12] = -3.63241702228926; fWeightMatrix0to1[21][12] = 1.80773979078041; fWeightMatrix0to1[22][12] = -0.103674827233444; fWeightMatrix0to1[0][13] = -16.6031832877286; fWeightMatrix0to1[1][13] = -1.35911011438676; fWeightMatrix0to1[2][13] = 17.3622616955956; fWeightMatrix0to1[3][13] = 10.5837493877297; fWeightMatrix0to1[4][13] = -1.17047358351183; fWeightMatrix0to1[5][13] = -4.04917115626239; fWeightMatrix0to1[6][13] = 2.11848847452499; fWeightMatrix0to1[7][13] = -0.876880436595914; fWeightMatrix0to1[8][13] = -3.85975930632706; fWeightMatrix0to1[9][13] = -0.762494784617702; fWeightMatrix0to1[10][13] = -13.3683849525218; fWeightMatrix0to1[11][13] = 1.29245691674924; fWeightMatrix0to1[12][13] = -6.482116906586; fWeightMatrix0to1[13][13] = -2.14864705455009; fWeightMatrix0to1[14][13] = 1.87227828911405; fWeightMatrix0to1[15][13] = -2.28549515160688; fWeightMatrix0to1[16][13] = -0.571311582064844; fWeightMatrix0to1[17][13] = 0.486732347602508; fWeightMatrix0to1[18][13] = -12.1176985558209; fWeightMatrix0to1[19][13] = -5.36114439817321; fWeightMatrix0to1[20][13] = 2.16495089562338; fWeightMatrix0to1[21][13] = -0.641843522543631; fWeightMatrix0to1[22][13] = -0.714864844285281; fWeightMatrix0to1[0][14] = -17.8036646778996; fWeightMatrix0to1[1][14] = -2.27103662989255; fWeightMatrix0to1[2][14] = 18.9058585223317; fWeightMatrix0to1[3][14] = 9.41539947086539; fWeightMatrix0to1[4][14] = 0.890237459963133; fWeightMatrix0to1[5][14] = -3.88902253147735; fWeightMatrix0to1[6][14] = -1.73313797691596; fWeightMatrix0to1[7][14] = 1.33202995977689; fWeightMatrix0to1[8][14] = -5.46454032920736; fWeightMatrix0to1[9][14] = 0.757071087547616; fWeightMatrix0to1[10][14] = -12.4422758237133; fWeightMatrix0to1[11][14] = -1.51909807177771; fWeightMatrix0to1[12][14] = -5.18246107795957; fWeightMatrix0to1[13][14] = 0.926879398231622; fWeightMatrix0to1[14][14] = 0.537378342706348; fWeightMatrix0to1[15][14] = -1.91191173965803; fWeightMatrix0to1[16][14] = -0.073864942181643; fWeightMatrix0to1[17][14] = -3.1626913650149; fWeightMatrix0to1[18][14] = -11.0956670471305; fWeightMatrix0to1[19][14] = -7.7346366728782; fWeightMatrix0to1[20][14] = 2.6223372540234; fWeightMatrix0to1[21][14] = 0.0244764258354605; fWeightMatrix0to1[22][14] = 0.305118399670689; fWeightMatrix0to1[0][15] = -21.2261601712676; fWeightMatrix0to1[1][15] = -2.7477238258157; fWeightMatrix0to1[2][15] = 19.9720359007437; fWeightMatrix0to1[3][15] = 8.33197987342972; fWeightMatrix0to1[4][15] = -1.0328093904855; fWeightMatrix0to1[5][15] = -5.96241285024341; fWeightMatrix0to1[6][15] = 2.01607485403676; fWeightMatrix0to1[7][15] = 1.15200480862602; fWeightMatrix0to1[8][15] = -6.24771230179492; fWeightMatrix0to1[9][15] = -0.91002958045562; fWeightMatrix0to1[10][15] = -14.0618956952686; fWeightMatrix0to1[11][15] = -1.39032435815094; fWeightMatrix0to1[12][15] = -4.14700448312969; fWeightMatrix0to1[13][15] = 0.00410586588410581; fWeightMatrix0to1[14][15] = -0.858107884658289; fWeightMatrix0to1[15][15] = -3.19832454410533; fWeightMatrix0to1[16][15] = -2.66985556349478; fWeightMatrix0to1[17][15] = -2.54862581200876; fWeightMatrix0to1[18][15] = -12.2082498864498; fWeightMatrix0to1[19][15] = -6.21722683830921; fWeightMatrix0to1[20][15] = -0.638180688118146; fWeightMatrix0to1[21][15] = 1.77771411072742; fWeightMatrix0to1[22][15] = 0.338536047358787; fWeightMatrix0to1[0][16] = -4.13878513501723; fWeightMatrix0to1[1][16] = 6.62265239789763; fWeightMatrix0to1[2][16] = 1.1436249395859; fWeightMatrix0to1[3][16] = -1.9582912108346; fWeightMatrix0to1[4][16] = 0.245437113667408; fWeightMatrix0to1[5][16] = 2.71121703906476; fWeightMatrix0to1[6][16] = 1.67308334419192; fWeightMatrix0to1[7][16] = -2.30570527172042; fWeightMatrix0to1[8][16] = 2.90906432348328; fWeightMatrix0to1[9][16] = -0.233031450854262; fWeightMatrix0to1[10][16] = 8.83743829186402; fWeightMatrix0to1[11][16] = 0.16699429862634; fWeightMatrix0to1[12][16] = -0.0825629952911697; fWeightMatrix0to1[13][16] = -1.95383590657576; fWeightMatrix0to1[14][16] = 3.84122331280171; fWeightMatrix0to1[15][16] = 1.91232853199067; fWeightMatrix0to1[16][16] = 0.721513028389783; fWeightMatrix0to1[17][16] = -0.4999064546984; fWeightMatrix0to1[18][16] = -2.00730190760852; fWeightMatrix0to1[19][16] = -2.1563946894131; fWeightMatrix0to1[20][16] = 3.97498547512085; fWeightMatrix0to1[21][16] = -2.1895799442439; fWeightMatrix0to1[22][16] = 0.737601966957068; fWeightMatrix0to1[0][17] = 4.08321374012104; fWeightMatrix0to1[1][17] = 2.17412055356222; fWeightMatrix0to1[2][17] = -1.47022490461453; fWeightMatrix0to1[3][17] = -4.32616092695204; fWeightMatrix0to1[4][17] = 3.11257926643948; fWeightMatrix0to1[5][17] = -2.29511738351582; fWeightMatrix0to1[6][17] = -1.90658953605043; fWeightMatrix0to1[7][17] = -0.545660374300149; fWeightMatrix0to1[8][17] = 0.0531147735406193; fWeightMatrix0to1[9][17] = 0.769409560021437; fWeightMatrix0to1[10][17] = 5.92618384339273; fWeightMatrix0to1[11][17] = -0.436510676678523; fWeightMatrix0to1[12][17] = -0.167538375854154; fWeightMatrix0to1[13][17] = 2.10368474738084; fWeightMatrix0to1[14][17] = 0.678691660543915; fWeightMatrix0to1[15][17] = -2.10200046398851; fWeightMatrix0to1[16][17] = -0.4052394641543; fWeightMatrix0to1[17][17] = 0.479051760127; fWeightMatrix0to1[18][17] = -5.44485772684408; fWeightMatrix0to1[19][17] = 1.90371862608527; fWeightMatrix0to1[20][17] = -3.96820495687473; fWeightMatrix0to1[21][17] = 2.25102721533759; fWeightMatrix0to1[22][17] = -1.07836167143533; fWeightMatrix0to1[0][18] = -13.842024672139; fWeightMatrix0to1[1][18] = 9.56520898681502; fWeightMatrix0to1[2][18] = 5.40848129508261; fWeightMatrix0to1[3][18] = 2.70668212395763; fWeightMatrix0to1[4][18] = 3.02573024759065; fWeightMatrix0to1[5][18] = 4.06687496621383; fWeightMatrix0to1[6][18] = 9.2797246799676; fWeightMatrix0to1[7][18] = 3.11259306353421; fWeightMatrix0to1[8][18] = 3.70474201808795; fWeightMatrix0to1[9][18] = -3.40080147207341; fWeightMatrix0to1[10][18] = 5.98310374117341; fWeightMatrix0to1[11][18] = -1.14815366990839; fWeightMatrix0to1[12][18] = 2.50997725745526; fWeightMatrix0to1[13][18] = 5.36883642591599; fWeightMatrix0to1[14][18] = 1.06183753849383; fWeightMatrix0to1[15][18] = 8.04971634150929; fWeightMatrix0to1[16][18] = -1.01820538638485; fWeightMatrix0to1[17][18] = 2.33611568895203; fWeightMatrix0to1[18][18] = 4.10990780040237; fWeightMatrix0to1[19][18] = -11.265553136156; fWeightMatrix0to1[20][18] = 3.12050769839619; fWeightMatrix0to1[21][18] = -1.76629432404731; fWeightMatrix0to1[22][18] = 4.98819810608674; // weight matrix from layer 1 to 2 fWeightMatrix1to2[0][0] = 0.783074724622431; fWeightMatrix1to2[0][1] = -1.81982429447114; fWeightMatrix1to2[0][2] = -0.846268884123519; fWeightMatrix1to2[0][3] = -0.67243930760346; fWeightMatrix1to2[0][4] = 1.74332367161372; fWeightMatrix1to2[0][5] = -0.575332404691166; fWeightMatrix1to2[0][6] = -0.619763706181825; fWeightMatrix1to2[0][7] = 0.936434564384421; fWeightMatrix1to2[0][8] = -0.610948875566717; fWeightMatrix1to2[0][9] = 1.11793467987501; fWeightMatrix1to2[0][10] = -0.975166091709296; fWeightMatrix1to2[0][11] = -0.69046059748438; fWeightMatrix1to2[0][12] = 1.65859101039029; fWeightMatrix1to2[0][13] = 0.841774185101912; fWeightMatrix1to2[0][14] = -0.337234722410429; fWeightMatrix1to2[0][15] = -0.497599094224417; fWeightMatrix1to2[0][16] = -1.07971046839963; fWeightMatrix1to2[0][17] = 1.02102413421805; fWeightMatrix1to2[0][18] = -4.8195656486813; fWeightMatrix1to2[0][19] = 0.651976969292217; fWeightMatrix1to2[0][20] = -0.707778816690049; fWeightMatrix1to2[0][21] = 0.687719986799629; fWeightMatrix1to2[0][22] = 3.31766165628267; fWeightMatrix1to2[0][23] = -4.80754190827471; } inline double ReadGhost_Upstream_TMVA::GetMvaValue__( const std::vector& inputValues ) const { if (inputValues.size() != (unsigned int)fLayerSize[0]-1) { std::cout << "Input vector needs to be of size " << fLayerSize[0]-1 << std::endl; return 0; } for (int l=0; l& inputValues ) const { // classifier response value double retval = 0; // classifier response, sanity check first if (!IsStatusClean()) { std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response" << " because status is dirty" << std::endl; retval = 0; } else { if (IsNormalised()) { // normalise variables std::vector iV; iV.reserve(inputValues.size()); int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] )); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } else { std::vector iV; int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(*varIt); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } } return retval; } //_______________________________________________________________________ inline void ReadGhost_Upstream_TMVA::InitTransform_1() { // Normalization transformation, initialisation fMin_1[0][0] = 115.480003357; fMax_1[0][0] = 4998705.5; fMin_1[1][0] = 125.38999939; fMax_1[1][0] = 4954328; fMin_1[2][0] = 115.480003357; fMax_1[2][0] = 4998705.5; fMin_1[0][1] = 16.465473175; fMax_1[0][1] = 867623.6875; fMin_1[1][1] = 12.8830137253; fMax_1[1][1] = 724337; fMin_1[2][1] = 12.8830137253; fMax_1[2][1] = 867623.6875; fMin_1[0][2] = 0.00195923517458; fMax_1[0][2] = 3.99805927277; fMin_1[1][2] = 0.00263477140106; fMax_1[1][2] = 3.99922418594; fMin_1[2][2] = 0.00195923517458; fMax_1[2][2] = 3.99922418594; fMin_1[0][3] = 4; fMax_1[0][3] = 27; fMin_1[1][3] = 4; fMax_1[1][3] = 27; fMin_1[2][3] = 4; fMax_1[2][3] = 27; fMin_1[0][4] = 0.00192928232718; fMax_1[0][4] = 0.399999499321; fMin_1[1][4] = 0.0022268560715; fMax_1[1][4] = 0.399999022484; fMin_1[2][4] = 0.00192928232718; fMax_1[2][4] = 0.399999499321; fMin_1[0][5] = 7.34402965463e-06; fMax_1[0][5] = 67.3927001953; fMin_1[1][5] = 7.90732883615e-05; fMax_1[1][5] = 63.098651886; fMin_1[2][5] = 7.34402965463e-06; fMax_1[2][5] = 67.3927001953; fMin_1[0][6] = 1; fMax_1[0][6] = 23; fMin_1[1][6] = 1; fMax_1[1][6] = 23; fMin_1[2][6] = 1; fMax_1[2][6] = 23; fMin_1[0][7] = 0; fMax_1[0][7] = 1; fMin_1[1][7] = 0; fMax_1[1][7] = 1; fMin_1[2][7] = 0; fMax_1[2][7] = 1; fMin_1[0][8] = 0; fMax_1[0][8] = 1; fMin_1[1][8] = 0; fMax_1[1][8] = 1; fMin_1[2][8] = 0; fMax_1[2][8] = 1; fMin_1[0][9] = 0; fMax_1[0][9] = 1; fMin_1[1][9] = 0; fMax_1[1][9] = 1; fMin_1[2][9] = 0; fMax_1[2][9] = 1; fMin_1[0][10] = 0; fMax_1[0][10] = 1; fMin_1[1][10] = 0; fMax_1[1][10] = 1; fMin_1[2][10] = 0; fMax_1[2][10] = 1; fMin_1[0][11] = -999; fMax_1[0][11] = 168.813705444; fMin_1[1][11] = -999; fMax_1[1][11] = 137.75289917; fMin_1[2][11] = -999; fMax_1[2][11] = 168.813705444; fMin_1[0][12] = -999; fMax_1[0][12] = 114.397399902; fMin_1[1][12] = -999; fMax_1[1][12] = 90.4578018188; fMin_1[2][12] = -999; fMax_1[2][12] = 114.397399902; fMin_1[0][13] = -999; fMax_1[0][13] = 135.562103271; fMin_1[1][13] = -999; fMax_1[1][13] = 87.2134017944; fMin_1[2][13] = -999; fMax_1[2][13] = 135.562103271; fMin_1[0][14] = -999; fMax_1[0][14] = 123.221702576; fMin_1[1][14] = -999; fMax_1[1][14] = 94.3625030518; fMin_1[2][14] = -999; fMax_1[2][14] = 123.221702576; fMin_1[0][15] = -999; fMax_1[0][15] = 51.1268997192; fMin_1[1][15] = -999; fMax_1[1][15] = 51.957901001; fMin_1[2][15] = -999; fMax_1[2][15] = 51.957901001; fMin_1[0][16] = 0; fMax_1[0][16] = 1; fMin_1[1][16] = 0; fMax_1[1][16] = 1; fMin_1[2][16] = 0; fMax_1[2][16] = 1; fMin_1[0][17] = -999; fMax_1[0][17] = 4.79148387909; fMin_1[1][17] = -999; fMax_1[1][17] = 4.79148387909; fMin_1[2][17] = -999; fMax_1[2][17] = 4.79148387909; } //_______________________________________________________________________ inline void ReadGhost_Upstream_TMVA::Transform_1( std::vector& iv, int cls) const { // Normalization transformation if (cls < 0 || cls > 2) { if (2 > 1 ) cls = 2; else cls = 2; } const int nVar = 18; // get indices of used variables // define the indices of the variables which are transformed by this transformation static std::vector indicesGet; static std::vector indicesPut; if ( indicesGet.empty() ) { indicesGet.reserve(fNvars); indicesGet.push_back( 0); indicesGet.push_back( 1); indicesGet.push_back( 2); indicesGet.push_back( 3); indicesGet.push_back( 4); indicesGet.push_back( 5); indicesGet.push_back( 6); indicesGet.push_back( 7); indicesGet.push_back( 8); indicesGet.push_back( 9); indicesGet.push_back( 10); indicesGet.push_back( 11); indicesGet.push_back( 12); indicesGet.push_back( 13); indicesGet.push_back( 14); indicesGet.push_back( 15); indicesGet.push_back( 16); indicesGet.push_back( 17); } if ( indicesPut.empty() ) { indicesPut.reserve(fNvars); indicesPut.push_back( 0); indicesPut.push_back( 1); indicesPut.push_back( 2); indicesPut.push_back( 3); indicesPut.push_back( 4); indicesPut.push_back( 5); indicesPut.push_back( 6); indicesPut.push_back( 7); indicesPut.push_back( 8); indicesPut.push_back( 9); indicesPut.push_back( 10); indicesPut.push_back( 11); indicesPut.push_back( 12); indicesPut.push_back( 13); indicesPut.push_back( 14); indicesPut.push_back( 15); indicesPut.push_back( 16); indicesPut.push_back( 17); } static std::vector dv; dv.resize(nVar); for (int ivar=0; ivar& iv, int sigOrBgd ) const { Transform_1( iv, sigOrBgd ); }