// Class: ReadMuon_Upstream_TMVA // Automatically generated by MethodBase::MakeClass // /* configuration options ===================================================== #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*- Method : MLP::Muon_Upstream_TMVA TMVA Release : 4.2.0 [262656] ROOT Release : 6.02/01 [393729] Creator : jonesc Date : Sun Feb 7 16:58:42 2016 Host : Linux lcgapp-slc6-physical1.cern.ch 2.6.32-431.29.2.el6.x86_64 #1 SMP Wed Sep 10 11:13:12 CEST 2014 x86_64 x86_64 x86_64 GNU/Linux Dir : /var/pcfst/r03/lhcb/jonesc/ANNPID/results/MC2015Sim09Dev03/TrainMixture/TrainPhysTks-EvalPhysTks-NoReweight/GhostAccFrac1.0/ProtonAccFrac1.0/KaonAccFrac1.0/TMVA-Run2-NoTkLikCDVelodEdx/MLP/Norm/ScaleF1.4/BP/NCycles750/CE/tanh/CVTest15/CVImp1e-16/NotUseRegulator/Muon/Upstream Training events: 2319076 Analysis type : [Classification] #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*- # Set by User: NCycles: "750" [Number of training cycles] HiddenLayers: "23" [Specification of hidden layer architecture] NeuronType: "tanh" [Neuron activation function type] EstimatorType: "CE" [MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood] V: "True" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)] VarTransform: "Norm" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"] H: "True" [Print method-specific help message] TrainingMethod: "BP" [Train with Back-Propagation (BP), BFGS Algorithm (BFGS), or Genetic Algorithm (GA - slower and worse)] EpochMonitoring: "True" [Provide epoch-wise monitoring plots according to TestRate (caution: causes big ROOT output file!)] ConvergenceImprove: "1.000000e-16" [Minimum improvement which counts as improvement (<0 means automatic convergence check is turned off)] ConvergenceTests: "15" [Number of steps (without improvement) required for convergence (<0 means automatic convergence check is turned off)] UseRegulator: "False" [Use regulator to avoid over-training] # Default: RandomSeed: "1" [Random seed for initial synapse weights (0 means unique seed for each run; default value '1')] NeuronInputType: "sum" [Neuron input function type] VerbosityLevel: "Verbose" [Verbosity level] CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)] IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)] LearningRate: "2.000000e-02" [ANN learning rate parameter] DecayRate: "1.000000e-02" [Decay rate for learning parameter] TestRate: "10" [Test for overtraining performed at each #th epochs] Sampling: "1.000000e+00" [Only 'Sampling' (randomly selected) events are trained each epoch] SamplingEpoch: "1.000000e+00" [Sampling is used for the first 'SamplingEpoch' epochs, afterwards, all events are taken for training] SamplingImportance: "1.000000e+00" [ The sampling weights of events in epochs which successful (worse estimator than before) are multiplied with SamplingImportance, else they are divided.] SamplingTraining: "True" [The training sample is sampled] SamplingTesting: "False" [The testing sample is sampled] ResetStep: "50" [How often BFGS should reset history] Tau: "3.000000e+00" [LineSearch "size step"] BPMode: "sequential" [Back-propagation learning mode: sequential or batch] BatchSize: "-1" [Batch size: number of events/batch, only set if in Batch Mode, -1 for BatchSize=number_of_events] UpdateLimit: "10000" [Maximum times of regulator update] CalculateErrors: "False" [Calculates inverse Hessian matrix at the end of the training to be able to calculate the uncertainties of an MVA value] WeightRange: "1.000000e+00" [Take the events for the estimator calculations from small deviations from the desired value to large deviations only over the weight range] ## #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*- NVar 17 TrackP TrackP TrackP TrackP 'F' [115.480003357,4998705.5] TrackPt TrackPt TrackPt TrackPt 'F' [12.8830137253,867623.6875] TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof TrackChi2PerDof 'F' [0.00195923517458,3.99922418594] TrackNumDof TrackNumDof TrackNumDof TrackNumDof 'I' [4,27] TrackGhostProbability TrackGhostProbability TrackGhostProbability TrackGhostProbability 'F' [0.00192928232718,0.399999499321] TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 TrackFitVeloChi2 'F' [7.34402965463e-06,67.3927001953] TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF TrackFitVeloNDoF 'I' [1,23] RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas RichUsedR1Gas 'I' [0,1] RichAboveMuThres RichAboveMuThres RichAboveMuThres RichAboveMuThres 'I' [0,1] RichAboveKaThres RichAboveKaThres RichAboveKaThres RichAboveKaThres 'I' [0,1] RichDLLe RichDLLe RichDLLe RichDLLe 'F' [-999,168.813705444] RichDLLmu RichDLLmu RichDLLmu RichDLLmu 'F' [-999,114.397399902] RichDLLk RichDLLk RichDLLk RichDLLk 'F' [-999,135.562103271] RichDLLp RichDLLp RichDLLp RichDLLp 'F' [-999,123.221702576] RichDLLbt RichDLLbt RichDLLbt RichDLLbt 'F' [-999,51.957901001] InAccBrem InAccBrem InAccBrem InAccBrem 'I' [0,1] BremPIDe BremPIDe BremPIDe BremPIDe 'F' [-999,4.79148387909] NSpec 0 ============================================================================ */ #include #include #include #include #ifndef IClassifierReader__def #define IClassifierReader__def class IClassifierReader { public: // constructor IClassifierReader() : fStatusIsClean( true ) {} virtual ~IClassifierReader() {} // return classifier response virtual double GetMvaValue( const std::vector& inputValues ) const = 0; // returns classifier status bool IsStatusClean() const { return fStatusIsClean; } protected: bool fStatusIsClean; }; #endif class ReadMuon_Upstream_TMVA : public IClassifierReader { public: // constructor ReadMuon_Upstream_TMVA( std::vector& theInputVars ) : IClassifierReader(), fClassName( "ReadMuon_Upstream_TMVA" ), fNvars( 17 ), fIsNormalised( false ) { // the training input variables const char* inputVars[] = { "TrackP", "TrackPt", "TrackChi2PerDof", "TrackNumDof", "TrackGhostProbability", "TrackFitVeloChi2", "TrackFitVeloNDoF", "RichUsedR1Gas", "RichAboveMuThres", "RichAboveKaThres", "RichDLLe", "RichDLLmu", "RichDLLk", "RichDLLp", "RichDLLbt", "InAccBrem", "BremPIDe" }; // sanity checks if (theInputVars.size() <= 0) { std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl; fStatusIsClean = false; } if (theInputVars.size() != fNvars) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: " << theInputVars.size() << " != " << fNvars << std::endl; fStatusIsClean = false; } // validate input variables for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) { if (theInputVars[ivar] != inputVars[ivar]) { std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl; fStatusIsClean = false; } } // initialize min and max vectors (for normalisation) fVmin[0] = -1; fVmax[0] = 1; fVmin[1] = -1; fVmax[1] = 0.99999988079071; fVmin[2] = -1; fVmax[2] = 1; fVmin[3] = -1; fVmax[3] = 1; fVmin[4] = -1; fVmax[4] = 0.99999988079071; fVmin[5] = -1; fVmax[5] = 1; fVmin[6] = -1; fVmax[6] = 1; fVmin[7] = -1; fVmax[7] = 1; fVmin[8] = -1; fVmax[8] = 1; fVmin[9] = -1; fVmax[9] = 1; fVmin[10] = -1; fVmax[10] = 1; fVmin[11] = -1; fVmax[11] = 1; fVmin[12] = -1; fVmax[12] = 1; fVmin[13] = -1; fVmax[13] = 1; fVmin[14] = -1; fVmax[14] = 1; fVmin[15] = -1; fVmax[15] = 1; fVmin[16] = -1; fVmax[16] = 1; // initialize input variable types fType[0] = 'F'; fType[1] = 'F'; fType[2] = 'F'; fType[3] = 'I'; fType[4] = 'F'; fType[5] = 'F'; fType[6] = 'I'; fType[7] = 'I'; fType[8] = 'I'; fType[9] = 'I'; fType[10] = 'F'; fType[11] = 'F'; fType[12] = 'F'; fType[13] = 'F'; fType[14] = 'F'; fType[15] = 'I'; fType[16] = 'F'; // initialize constants Initialize(); // initialize transformation InitTransform(); } // destructor virtual ~ReadMuon_Upstream_TMVA() { Clear(); // method-specific } // the classifier response // "inputValues" is a vector of input values in the same order as the // variables given to the constructor double GetMvaValue( const std::vector& inputValues ) const; private: // method-specific destructor void Clear(); // input variable transformation double fMin_1[3][17]; double fMax_1[3][17]; void InitTransform_1(); void Transform_1( std::vector & iv, int sigOrBgd ) const; void InitTransform(); void Transform( std::vector & iv, int sigOrBgd ) const; // common member variables const char* fClassName; const size_t fNvars; size_t GetNvar() const { return fNvars; } char GetType( int ivar ) const { return fType[ivar]; } // normalisation of input variables const bool fIsNormalised; bool IsNormalised() const { return fIsNormalised; } double fVmin[17]; double fVmax[17]; double NormVariable( double x, double xmin, double xmax ) const { // normalise to output range: [-1, 1] return 2*(x - xmin)/(xmax - xmin) - 1.0; } // type of input variable: 'F' or 'I' char fType[17]; // initialize internal variables void Initialize(); double GetMvaValue__( const std::vector& inputValues ) const; // private members (method specific) double ActivationFnc(double x) const; double OutputActivationFnc(double x) const; int fLayers; int fLayerSize[3]; double fWeightMatrix0to1[24][18]; // weight matrix from layer 0 to 1 double fWeightMatrix1to2[1][24]; // weight matrix from layer 1 to 2 double * fWeights[3]; }; inline void ReadMuon_Upstream_TMVA::Initialize() { // build network structure fLayers = 3; fLayerSize[0] = 18; fWeights[0] = new double[18]; fLayerSize[1] = 24; fWeights[1] = new double[24]; fLayerSize[2] = 1; fWeights[2] = new double[1]; // weight matrix from layer 0 to 1 fWeightMatrix0to1[0][0] = 8.27844604156071; fWeightMatrix0to1[1][0] = 12.4930138857967; fWeightMatrix0to1[2][0] = 0.292320139283172; fWeightMatrix0to1[3][0] = 1.37174202623769; fWeightMatrix0to1[4][0] = -2.22231088558501; fWeightMatrix0to1[5][0] = -1.40493687075812; fWeightMatrix0to1[6][0] = -1.30510683590076; fWeightMatrix0to1[7][0] = 14.9628609238625; fWeightMatrix0to1[8][0] = -2.53037637129475; fWeightMatrix0to1[9][0] = 0.477450367510379; fWeightMatrix0to1[10][0] = -1.41017678721692; fWeightMatrix0to1[11][0] = -1.59698587942164; fWeightMatrix0to1[12][0] = -2.30035848020844; fWeightMatrix0to1[13][0] = -3.6645667099464; fWeightMatrix0to1[14][0] = 0.192463498008239; fWeightMatrix0to1[15][0] = 4.87690360948491; fWeightMatrix0to1[16][0] = -0.864311182835443; fWeightMatrix0to1[17][0] = 4.38212149036151; fWeightMatrix0to1[18][0] = -0.00545282277496099; fWeightMatrix0to1[19][0] = 7.90882023624667; fWeightMatrix0to1[20][0] = -0.312319353091476; fWeightMatrix0to1[21][0] = -1.47918262347716; fWeightMatrix0to1[22][0] = 5.47465864877851; fWeightMatrix0to1[0][1] = 7.76971286077512; fWeightMatrix0to1[1][1] = -2.42172897711482; fWeightMatrix0to1[2][1] = 0.681714087561752; fWeightMatrix0to1[3][1] = 1.09171360982114; fWeightMatrix0to1[4][1] = -1.81238954043273; fWeightMatrix0to1[5][1] = -2.22796419534448; fWeightMatrix0to1[6][1] = 0.146000789015397; fWeightMatrix0to1[7][1] = 13.1478915928417; fWeightMatrix0to1[8][1] = 2.59812493627642; fWeightMatrix0to1[9][1] = -0.0465472525688054; fWeightMatrix0to1[10][1] = -0.0246795112175639; fWeightMatrix0to1[11][1] = 0.94989769873935; fWeightMatrix0to1[12][1] = -1.86989017900336; fWeightMatrix0to1[13][1] = -4.40145019517728; fWeightMatrix0to1[14][1] = 1.63889228676698; fWeightMatrix0to1[15][1] = -1.96475473149392; fWeightMatrix0to1[16][1] = 1.91726317358645; fWeightMatrix0to1[17][1] = 1.4034518688722; fWeightMatrix0to1[18][1] = 0.699094797242323; fWeightMatrix0to1[19][1] = 0.930843909171159; fWeightMatrix0to1[20][1] = 1.76328830993545; fWeightMatrix0to1[21][1] = 1.97612588378236; fWeightMatrix0to1[22][1] = -1.52975272374944; fWeightMatrix0to1[0][2] = 0.0558818850663311; fWeightMatrix0to1[1][2] = 0.550153454478626; fWeightMatrix0to1[2][2] = 0.61652779406227; fWeightMatrix0to1[3][2] = 0.804214956433605; fWeightMatrix0to1[4][2] = 1.5954660518811; fWeightMatrix0to1[5][2] = 1.02026814142241; fWeightMatrix0to1[6][2] = -2.01007756698079; fWeightMatrix0to1[7][2] = -0.0472550182852937; fWeightMatrix0to1[8][2] = 0.609423996415693; fWeightMatrix0to1[9][2] = -0.614107070298528; fWeightMatrix0to1[10][2] = -0.272254375015121; fWeightMatrix0to1[11][2] = -2.64777823805848; fWeightMatrix0to1[12][2] = -1.15957769812559; fWeightMatrix0to1[13][2] = -1.64469520938987; fWeightMatrix0to1[14][2] = -3.03788279671327; fWeightMatrix0to1[15][2] = 0.69293285504818; fWeightMatrix0to1[16][2] = 2.73954357674412; fWeightMatrix0to1[17][2] = 0.799410494910091; fWeightMatrix0to1[18][2] = 2.08842553688671; fWeightMatrix0to1[19][2] = 0.616511465100171; fWeightMatrix0to1[20][2] = 0.178776983324494; fWeightMatrix0to1[21][2] = 0.270053363353669; fWeightMatrix0to1[22][2] = 0.032737270916426; fWeightMatrix0to1[0][3] = -0.021360692341871; fWeightMatrix0to1[1][3] = -4.09230954365723; fWeightMatrix0to1[2][3] = -2.91824753324614; fWeightMatrix0to1[3][3] = 1.77379547505791; fWeightMatrix0to1[4][3] = 1.05107663516995; fWeightMatrix0to1[5][3] = 2.7017327367253; fWeightMatrix0to1[6][3] = -4.71993015646242; fWeightMatrix0to1[7][3] = 1.03826857779098; fWeightMatrix0to1[8][3] = -5.00420325762582; fWeightMatrix0to1[9][3] = 4.7739290646645; fWeightMatrix0to1[10][3] = -1.2019594253449; fWeightMatrix0to1[11][3] = 3.01447472819111; fWeightMatrix0to1[12][3] = 3.36031873432589; fWeightMatrix0to1[13][3] = 0.121386526356351; fWeightMatrix0to1[14][3] = -2.86764394994862; fWeightMatrix0to1[15][3] = -0.757496092084928; fWeightMatrix0to1[16][3] = 0.709656617948875; fWeightMatrix0to1[17][3] = 0.233030005788407; fWeightMatrix0to1[18][3] = -0.822735710535691; fWeightMatrix0to1[19][3] = -4.28808385081635; fWeightMatrix0to1[20][3] = -5.08717516581476; fWeightMatrix0to1[21][3] = -2.21130799975283; fWeightMatrix0to1[22][3] = -3.65781221114911; fWeightMatrix0to1[0][4] = 1.6023532637822; fWeightMatrix0to1[1][4] = -0.259939110325997; fWeightMatrix0to1[2][4] = 0.140604360809207; fWeightMatrix0to1[3][4] = 1.70376865512201; fWeightMatrix0to1[4][4] = 0.133273022773045; fWeightMatrix0to1[5][4] = -2.07311219006572; fWeightMatrix0to1[6][4] = -0.171240181516633; fWeightMatrix0to1[7][4] = -0.359787702469201; fWeightMatrix0to1[8][4] = -0.589239562892261; fWeightMatrix0to1[9][4] = -0.511576673607275; fWeightMatrix0to1[10][4] = 0.374438902911342; fWeightMatrix0to1[11][4] = 1.64180035533432; fWeightMatrix0to1[12][4] = -0.330546141680179; fWeightMatrix0to1[13][4] = 11.5261967671856; fWeightMatrix0to1[14][4] = -0.687736900441418; fWeightMatrix0to1[15][4] = 0.189901679164845; fWeightMatrix0to1[16][4] = 0.398726595187026; fWeightMatrix0to1[17][4] = 0.438910619876599; fWeightMatrix0to1[18][4] = 0.712795263514285; fWeightMatrix0to1[19][4] = -0.269533228000266; fWeightMatrix0to1[20][4] = -0.123196443382705; fWeightMatrix0to1[21][4] = -0.670018249870702; fWeightMatrix0to1[22][4] = -0.837949355295196; fWeightMatrix0to1[0][5] = 0.200459640426408; fWeightMatrix0to1[1][5] = -1.59714712076258; fWeightMatrix0to1[2][5] = -2.39471592364855; fWeightMatrix0to1[3][5] = 1.52748280779309; fWeightMatrix0to1[4][5] = -0.592719870545992; fWeightMatrix0to1[5][5] = -2.28380612256549; fWeightMatrix0to1[6][5] = -0.883268609301212; fWeightMatrix0to1[7][5] = 0.799522291805452; fWeightMatrix0to1[8][5] = -1.24978325680404; fWeightMatrix0to1[9][5] = 0.903852177411386; fWeightMatrix0to1[10][5] = -0.207555106038755; fWeightMatrix0to1[11][5] = 0.923111783201632; fWeightMatrix0to1[12][5] = 0.356914144546428; fWeightMatrix0to1[13][5] = 1.66071677973216; fWeightMatrix0to1[14][5] = 2.10585071863447; fWeightMatrix0to1[15][5] = -1.53721183175072; fWeightMatrix0to1[16][5] = -2.37593032711293; fWeightMatrix0to1[17][5] = -1.48363293354866; fWeightMatrix0to1[18][5] = -1.27030632019651; fWeightMatrix0to1[19][5] = -1.26321348937011; fWeightMatrix0to1[20][5] = -0.115430573081817; fWeightMatrix0to1[21][5] = -0.633471068432915; fWeightMatrix0to1[22][5] = -0.197050168797235; fWeightMatrix0to1[0][6] = 0.254948687802426; fWeightMatrix0to1[1][6] = 4.70446018228277; fWeightMatrix0to1[2][6] = 3.53666005428682; fWeightMatrix0to1[3][6] = 1.26811830037061; fWeightMatrix0to1[4][6] = -1.1148341920816; fWeightMatrix0to1[5][6] = 0.440492808312253; fWeightMatrix0to1[6][6] = -2.43277984158286; fWeightMatrix0to1[7][6] = -1.3163165821786; fWeightMatrix0to1[8][6] = 1.11699417591325; fWeightMatrix0to1[9][6] = -5.7456734091468; fWeightMatrix0to1[10][6] = -2.57404303323507; fWeightMatrix0to1[11][6] = -0.375290662882221; fWeightMatrix0to1[12][6] = -0.165030675078395; fWeightMatrix0to1[13][6] = -0.120847569405356; fWeightMatrix0to1[14][6] = 1.47867845384646; fWeightMatrix0to1[15][6] = 0.061348879065846; fWeightMatrix0to1[16][6] = 2.45265222680852; fWeightMatrix0to1[17][6] = 0.801374047028574; fWeightMatrix0to1[18][6] = 1.29882616647271; fWeightMatrix0to1[19][6] = 1.58607564084611; fWeightMatrix0to1[20][6] = -0.77109644976718; fWeightMatrix0to1[21][6] = -1.93173333516321; fWeightMatrix0to1[22][6] = 4.13882424877021; fWeightMatrix0to1[0][7] = 0.109343934394787; fWeightMatrix0to1[1][7] = 0.429469886866112; fWeightMatrix0to1[2][7] = 1.04003698295856; fWeightMatrix0to1[3][7] = 0.646020800746719; fWeightMatrix0to1[4][7] = -1.10414123088043; fWeightMatrix0to1[5][7] = -0.64430231703683; fWeightMatrix0to1[6][7] = 1.23244813594478; fWeightMatrix0to1[7][7] = -0.53865720222091; fWeightMatrix0to1[8][7] = 0.533238948120572; fWeightMatrix0to1[9][7] = 3.08172804319531; fWeightMatrix0to1[10][7] = 1.38422937626558; fWeightMatrix0to1[11][7] = -0.580318827474502; fWeightMatrix0to1[12][7] = 2.59387505092251; fWeightMatrix0to1[13][7] = 4.60963483641308; fWeightMatrix0to1[14][7] = -2.30543956712405; fWeightMatrix0to1[15][7] = 0.145792738282646; fWeightMatrix0to1[16][7] = 0.300092696505985; fWeightMatrix0to1[17][7] = 0.520163223961437; fWeightMatrix0to1[18][7] = -2.19009949183937; fWeightMatrix0to1[19][7] = -0.979536035785639; fWeightMatrix0to1[20][7] = 1.21797286853831; fWeightMatrix0to1[21][7] = -2.96066685115528; fWeightMatrix0to1[22][7] = -1.92375326559202; fWeightMatrix0to1[0][8] = 2.6974156506256; fWeightMatrix0to1[1][8] = -0.524521237898501; fWeightMatrix0to1[2][8] = -1.61168986373948; fWeightMatrix0to1[3][8] = 1.41222601974301; fWeightMatrix0to1[4][8] = 0.0425036028030682; fWeightMatrix0to1[5][8] = -0.932008729331906; fWeightMatrix0to1[6][8] = 0.336013511111394; fWeightMatrix0to1[7][8] = 2.5704004048601; fWeightMatrix0to1[8][8] = 2.71831786064701; fWeightMatrix0to1[9][8] = 0.205894601657144; fWeightMatrix0to1[10][8] = 1.6216725301387; fWeightMatrix0to1[11][8] = -0.149196197599877; fWeightMatrix0to1[12][8] = -2.59620219456242; fWeightMatrix0to1[13][8] = -0.19020201589807; fWeightMatrix0to1[14][8] = 0.150693839121365; fWeightMatrix0to1[15][8] = -1.89661796953752; fWeightMatrix0to1[16][8] = 0.0489520363673157; fWeightMatrix0to1[17][8] = 0.340327470883727; fWeightMatrix0to1[18][8] = -1.18734899748192; fWeightMatrix0to1[19][8] = -3.52016681146738; fWeightMatrix0to1[20][8] = 1.48011280968206; fWeightMatrix0to1[21][8] = 1.2716114338585; fWeightMatrix0to1[22][8] = -0.542551404464083; fWeightMatrix0to1[0][9] = -0.569146964035778; fWeightMatrix0to1[1][9] = 0.564003650635449; fWeightMatrix0to1[2][9] = 2.31271507373822; fWeightMatrix0to1[3][9] = 0.741429232820061; fWeightMatrix0to1[4][9] = 0.780921225609781; fWeightMatrix0to1[5][9] = -1.02593118729794; fWeightMatrix0to1[6][9] = -0.649847893996072; fWeightMatrix0to1[7][9] = 0.2426878956473; fWeightMatrix0to1[8][9] = -0.135707182131085; fWeightMatrix0to1[9][9] = -0.0579005791995549; fWeightMatrix0to1[10][9] = 0.930288678518278; fWeightMatrix0to1[11][9] = 0.689516033945267; fWeightMatrix0to1[12][9] = -2.75051794630389; fWeightMatrix0to1[13][9] = 0.16018046918559; fWeightMatrix0to1[14][9] = 0.116101997898761; fWeightMatrix0to1[15][9] = -1.95990656503066; fWeightMatrix0to1[16][9] = -0.0471273635370161; fWeightMatrix0to1[17][9] = 1.72608418544214; fWeightMatrix0to1[18][9] = -1.21020800513794; fWeightMatrix0to1[19][9] = -0.82391208456918; fWeightMatrix0to1[20][9] = 1.69822360939009; fWeightMatrix0to1[21][9] = -0.726725460092839; fWeightMatrix0to1[22][9] = 0.219117562896446; fWeightMatrix0to1[0][10] = 3.83197021559303; fWeightMatrix0to1[1][10] = -1.75709735792081; fWeightMatrix0to1[2][10] = -2.29880751116655; fWeightMatrix0to1[3][10] = -1.41724482936754; fWeightMatrix0to1[4][10] = 1.86292869210975; fWeightMatrix0to1[5][10] = -1.66987984570532; fWeightMatrix0to1[6][10] = -3.15228941333419; fWeightMatrix0to1[7][10] = 8.26817713265503; fWeightMatrix0to1[8][10] = 6.83247837732025; fWeightMatrix0to1[9][10] = -10.850638303295; fWeightMatrix0to1[10][10] = -0.829000625366227; fWeightMatrix0to1[11][10] = -0.0607869439737502; fWeightMatrix0to1[12][10] = -1.1797959503419; fWeightMatrix0to1[13][10] = -2.49341328431632; fWeightMatrix0to1[14][10] = -2.24081197317974; fWeightMatrix0to1[15][10] = 0.352683677554339; fWeightMatrix0to1[16][10] = -2.88104983519647; fWeightMatrix0to1[17][10] = -1.72844520798586; fWeightMatrix0to1[18][10] = 1.05749073124793; fWeightMatrix0to1[19][10] = -5.33223125186186; fWeightMatrix0to1[20][10] = -3.19067803636892; fWeightMatrix0to1[21][10] = -21.9612085380141; fWeightMatrix0to1[22][10] = -12.2448585582624; fWeightMatrix0to1[0][11] = 20.4656244392294; fWeightMatrix0to1[1][11] = 0.915712970924146; fWeightMatrix0to1[2][11] = -2.55543567127084; fWeightMatrix0to1[3][11] = -1.78595549081913; fWeightMatrix0to1[4][11] = -1.21792611987465; fWeightMatrix0to1[5][11] = -1.03101037093457; fWeightMatrix0to1[6][11] = -0.0445331091709564; fWeightMatrix0to1[7][11] = 29.6695934103018; fWeightMatrix0to1[8][11] = 8.50223330268271; fWeightMatrix0to1[9][11] = 4.08891108577643; fWeightMatrix0to1[10][11] = -0.0637249366861839; fWeightMatrix0to1[11][11] = 0.0845771523312635; fWeightMatrix0to1[12][11] = -2.27722281707144; fWeightMatrix0to1[13][11] = -3.1965041443248; fWeightMatrix0to1[14][11] = 1.17256856991093; fWeightMatrix0to1[15][11] = 0.887516363005408; fWeightMatrix0to1[16][11] = 1.00609595852092; fWeightMatrix0to1[17][11] = -1.14065894280196; fWeightMatrix0to1[18][11] = 0.326794795540469; fWeightMatrix0to1[19][11] = 1.55338055330676; fWeightMatrix0to1[20][11] = 1.09488268870525; fWeightMatrix0to1[21][11] = 21.733249301976; fWeightMatrix0to1[22][11] = 24.582802937064; fWeightMatrix0to1[0][12] = 4.89525041048898; fWeightMatrix0to1[1][12] = 1.63346824779654; fWeightMatrix0to1[2][12] = 2.36637589191845; fWeightMatrix0to1[3][12] = -0.601246397694267; fWeightMatrix0to1[4][12] = -0.400627884227822; fWeightMatrix0to1[5][12] = 0.369246971931988; fWeightMatrix0to1[6][12] = -1.34374935715071; fWeightMatrix0to1[7][12] = 1.944470035356; fWeightMatrix0to1[8][12] = -7.16732347630869; fWeightMatrix0to1[9][12] = -1.00732475721663; fWeightMatrix0to1[10][12] = -1.24544828931182; fWeightMatrix0to1[11][12] = 1.82516432404908; fWeightMatrix0to1[12][12] = -2.92823777238158; fWeightMatrix0to1[13][12] = 0.800600863353131; fWeightMatrix0to1[14][12] = -1.79580215710265; fWeightMatrix0to1[15][12] = 1.44201322090794; fWeightMatrix0to1[16][12] = 0.203712777954821; fWeightMatrix0to1[17][12] = 0.723608607509791; fWeightMatrix0to1[18][12] = 0.503756571112511; fWeightMatrix0to1[19][12] = -1.57722625713562; fWeightMatrix0to1[20][12] = 0.404444751904935; fWeightMatrix0to1[21][12] = 0.619352470790272; fWeightMatrix0to1[22][12] = -2.76261882939383; fWeightMatrix0to1[0][13] = -0.252879356125427; fWeightMatrix0to1[1][13] = -0.437612065022452; fWeightMatrix0to1[2][13] = 1.04581916518244; fWeightMatrix0to1[3][13] = 1.53585786619245; fWeightMatrix0to1[4][13] = -0.811271025867375; fWeightMatrix0to1[5][13] = 0.709321365941228; fWeightMatrix0to1[6][13] = -1.32010094453533; fWeightMatrix0to1[7][13] = -0.983617247175305; fWeightMatrix0to1[8][13] = -8.72035686226833; fWeightMatrix0to1[9][13] = -0.358945303200967; fWeightMatrix0to1[10][13] = -1.11715457760152; fWeightMatrix0to1[11][13] = 0.875448571395364; fWeightMatrix0to1[12][13] = -2.46248602642156; fWeightMatrix0to1[13][13] = -1.81449279666613; fWeightMatrix0to1[14][13] = 1.54639600098854; fWeightMatrix0to1[15][13] = -0.0132155216860636; fWeightMatrix0to1[16][13] = 0.419144315040368; fWeightMatrix0to1[17][13] = 2.05935292976382; fWeightMatrix0to1[18][13] = -2.15105510592669; fWeightMatrix0to1[19][13] = 1.68949481634342; fWeightMatrix0to1[20][13] = 1.25097539702709; fWeightMatrix0to1[21][13] = -1.31746683288566; fWeightMatrix0to1[22][13] = -2.63589609465273; fWeightMatrix0to1[0][14] = -3.70227859107051; fWeightMatrix0to1[1][14] = -0.377161203524253; fWeightMatrix0to1[2][14] = 1.98009596089814; fWeightMatrix0to1[3][14] = 1.49770924594275; fWeightMatrix0to1[4][14] = 1.9119277327335; fWeightMatrix0to1[5][14] = 2.8373619501422; fWeightMatrix0to1[6][14] = -2.21198088872186; fWeightMatrix0to1[7][14] = -7.1827434421322; fWeightMatrix0to1[8][14] = -10.3508586221326; fWeightMatrix0to1[9][14] = 1.61079226196464; fWeightMatrix0to1[10][14] = -1.36392798650645; fWeightMatrix0to1[11][14] = -1.74748572897522; fWeightMatrix0to1[12][14] = -1.16566535308082; fWeightMatrix0to1[13][14] = 1.66861344569314; fWeightMatrix0to1[14][14] = 0.143806250743708; fWeightMatrix0to1[15][14] = -0.467595914571476; fWeightMatrix0to1[16][14] = 0.0219058275751764; fWeightMatrix0to1[17][14] = 0.932144437995619; fWeightMatrix0to1[18][14] = -1.09459512293176; fWeightMatrix0to1[19][14] = 0.532558937871113; fWeightMatrix0to1[20][14] = 2.09990755181243; fWeightMatrix0to1[21][14] = -2.45161950864974; fWeightMatrix0to1[22][14] = -5.19373970342936; fWeightMatrix0to1[0][15] = -2.04312867658086; fWeightMatrix0to1[1][15] = -4.42655557366236; fWeightMatrix0to1[2][15] = 0.499001798166936; fWeightMatrix0to1[3][15] = 0.635559732953146; fWeightMatrix0to1[4][15] = -2.24735600538785; fWeightMatrix0to1[5][15] = 1.17763843992775; fWeightMatrix0to1[6][15] = 3.28553010121297; fWeightMatrix0to1[7][15] = 0.313644042183877; fWeightMatrix0to1[8][15] = 0.160151568740332; fWeightMatrix0to1[9][15] = -1.62441947650136; fWeightMatrix0to1[10][15] = -0.896559901208501; fWeightMatrix0to1[11][15] = -1.79629567224793; fWeightMatrix0to1[12][15] = 1.31069680774303; fWeightMatrix0to1[13][15] = 0.0680866138982693; fWeightMatrix0to1[14][15] = -0.402190025991293; fWeightMatrix0to1[15][15] = 0.751969162403045; fWeightMatrix0to1[16][15] = -1.4196152181409; fWeightMatrix0to1[17][15] = -0.725535721259109; fWeightMatrix0to1[18][15] = -1.76756471809015; fWeightMatrix0to1[19][15] = 1.87673319395126; fWeightMatrix0to1[20][15] = 0.437716265597463; fWeightMatrix0to1[21][15] = -1.72138733314324; fWeightMatrix0to1[22][15] = -1.01117092675373; fWeightMatrix0to1[0][16] = -1.72762619514679; fWeightMatrix0to1[1][16] = -5.55654943604515; fWeightMatrix0to1[2][16] = 1.50988985233005; fWeightMatrix0to1[3][16] = -0.993457103084397; fWeightMatrix0to1[4][16] = -2.23208764502155; fWeightMatrix0to1[5][16] = 1.91333820235182; fWeightMatrix0to1[6][16] = 2.16271176824734; fWeightMatrix0to1[7][16] = -0.247776389796031; fWeightMatrix0to1[8][16] = 0.0882249073537904; fWeightMatrix0to1[9][16] = 0.948098805167743; fWeightMatrix0to1[10][16] = 2.26916284408841; fWeightMatrix0to1[11][16] = 0.445130305857064; fWeightMatrix0to1[12][16] = -0.613168864695872; fWeightMatrix0to1[13][16] = 0.0270572178376619; fWeightMatrix0to1[14][16] = -1.00914149720418; fWeightMatrix0to1[15][16] = 1.15126125176857; fWeightMatrix0to1[16][16] = 1.12275897247649; fWeightMatrix0to1[17][16] = 0.51004096911343; fWeightMatrix0to1[18][16] = 0.94382165415713; fWeightMatrix0to1[19][16] = 1.6908521235975; fWeightMatrix0to1[20][16] = 3.38855543515955; fWeightMatrix0to1[21][16] = -1.46336141106172; fWeightMatrix0to1[22][16] = 0.959377520577906; fWeightMatrix0to1[0][17] = -0.0498511619851799; fWeightMatrix0to1[1][17] = 0.0982594005613848; fWeightMatrix0to1[2][17] = -0.606689030007913; fWeightMatrix0to1[3][17] = -1.42012527046113; fWeightMatrix0to1[4][17] = 0.0416544335585856; fWeightMatrix0to1[5][17] = 1.78434660514065; fWeightMatrix0to1[6][17] = 1.19356433817476; fWeightMatrix0to1[7][17] = 1.8567239072981; fWeightMatrix0to1[8][17] = 0.73835981039782; fWeightMatrix0to1[9][17] = 3.89632716942624; fWeightMatrix0to1[10][17] = 2.20178936868578; fWeightMatrix0to1[11][17] = -0.833460395468362; fWeightMatrix0to1[12][17] = -0.117450345220823; fWeightMatrix0to1[13][17] = 2.92549934514453; fWeightMatrix0to1[14][17] = -1.8749054347486; fWeightMatrix0to1[15][17] = 2.99376978749094; fWeightMatrix0to1[16][17] = 2.38684028995556; fWeightMatrix0to1[17][17] = 0.114847471855227; fWeightMatrix0to1[18][17] = -0.660626388980805; fWeightMatrix0to1[19][17] = 1.66417808675913; fWeightMatrix0to1[20][17] = -1.71674773855279; fWeightMatrix0to1[21][17] = 1.15989948362539; fWeightMatrix0to1[22][17] = 4.18526619273097; // weight matrix from layer 1 to 2 fWeightMatrix1to2[0][0] = 0.760778583847534; fWeightMatrix1to2[0][1] = 2.125010518574; fWeightMatrix1to2[0][2] = 1.32528059287184; fWeightMatrix1to2[0][3] = 0.393026373936325; fWeightMatrix1to2[0][4] = -0.847122965512068; fWeightMatrix1to2[0][5] = -0.951783756915468; fWeightMatrix1to2[0][6] = 0.940107569682303; fWeightMatrix1to2[0][7] = 1.33170443522257; fWeightMatrix1to2[0][8] = 1.36582675742056; fWeightMatrix1to2[0][9] = -3.81323937228236; fWeightMatrix1to2[0][10] = -1.03050219512319; fWeightMatrix1to2[0][11] = -0.391679244433009; fWeightMatrix1to2[0][12] = -0.635554020816884; fWeightMatrix1to2[0][13] = -0.646274602840118; fWeightMatrix1to2[0][14] = 0.503218254498515; fWeightMatrix1to2[0][15] = 1.51688094663124; fWeightMatrix1to2[0][16] = 0.526783309455154; fWeightMatrix1to2[0][17] = -3.68728944088083; fWeightMatrix1to2[0][18] = -1.21978632671606; fWeightMatrix1to2[0][19] = 0.897005623992399; fWeightMatrix1to2[0][20] = 0.997784205608567; fWeightMatrix1to2[0][21] = 1.18967850417869; fWeightMatrix1to2[0][22] = 0.748299613883214; fWeightMatrix1to2[0][23] = -2.20359161677778; } inline double ReadMuon_Upstream_TMVA::GetMvaValue__( const std::vector& inputValues ) const { if (inputValues.size() != (unsigned int)fLayerSize[0]-1) { std::cout << "Input vector needs to be of size " << fLayerSize[0]-1 << std::endl; return 0; } for (int l=0; l& inputValues ) const { // classifier response value double retval = 0; // classifier response, sanity check first if (!IsStatusClean()) { std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response" << " because status is dirty" << std::endl; retval = 0; } else { if (IsNormalised()) { // normalise variables std::vector iV; iV.reserve(inputValues.size()); int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] )); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } else { std::vector iV; int ivar = 0; for (std::vector::const_iterator varIt = inputValues.begin(); varIt != inputValues.end(); varIt++, ivar++) { iV.push_back(*varIt); } Transform( iV, -1 ); retval = GetMvaValue__( iV ); } } return retval; } //_______________________________________________________________________ inline void ReadMuon_Upstream_TMVA::InitTransform_1() { // Normalization transformation, initialisation fMin_1[0][0] = 115.480003357; fMax_1[0][0] = 4998705.5; fMin_1[1][0] = 319.540008545; fMax_1[1][0] = 4997450.5; fMin_1[2][0] = 115.480003357; fMax_1[2][0] = 4998705.5; fMin_1[0][1] = 12.8830137253; fMax_1[0][1] = 867623.6875; fMin_1[1][1] = 27.7397117615; fMax_1[1][1] = 303759.5; fMin_1[2][1] = 12.8830137253; fMax_1[2][1] = 867623.6875; fMin_1[0][2] = 0.00195923517458; fMax_1[0][2] = 3.99922418594; fMin_1[1][2] = 0.00685651274398; fMax_1[1][2] = 3.98896718025; fMin_1[2][2] = 0.00195923517458; fMax_1[2][2] = 3.99922418594; fMin_1[0][3] = 4; fMax_1[0][3] = 27; fMin_1[1][3] = 4; fMax_1[1][3] = 23; fMin_1[2][3] = 4; fMax_1[2][3] = 27; fMin_1[0][4] = 0.00192928232718; fMax_1[0][4] = 0.399999499321; fMin_1[1][4] = 0.00202400749549; fMax_1[1][4] = 0.399997442961; fMin_1[2][4] = 0.00192928232718; fMax_1[2][4] = 0.399999499321; fMin_1[0][5] = 7.34402965463e-06; fMax_1[0][5] = 67.3927001953; fMin_1[1][5] = 0.00113507476635; fMax_1[1][5] = 35.4613189697; fMin_1[2][5] = 7.34402965463e-06; fMax_1[2][5] = 67.3927001953; fMin_1[0][6] = 1; fMax_1[0][6] = 23; fMin_1[1][6] = 1; fMax_1[1][6] = 19; fMin_1[2][6] = 1; fMax_1[2][6] = 23; fMin_1[0][7] = 0; fMax_1[0][7] = 1; fMin_1[1][7] = 0; fMax_1[1][7] = 1; fMin_1[2][7] = 0; fMax_1[2][7] = 1; fMin_1[0][8] = 0; fMax_1[0][8] = 1; fMin_1[1][8] = 0; fMax_1[1][8] = 1; fMin_1[2][8] = 0; fMax_1[2][8] = 1; fMin_1[0][9] = 0; fMax_1[0][9] = 1; fMin_1[1][9] = 0; fMax_1[1][9] = 1; fMin_1[2][9] = 0; fMax_1[2][9] = 1; fMin_1[0][10] = -999; fMax_1[0][10] = 168.813705444; fMin_1[1][10] = -999; fMax_1[1][10] = 118.038902283; fMin_1[2][10] = -999; fMax_1[2][10] = 168.813705444; fMin_1[0][11] = -999; fMax_1[0][11] = 101.03150177; fMin_1[1][11] = -999; fMax_1[1][11] = 114.397399902; fMin_1[2][11] = -999; fMax_1[2][11] = 114.397399902; fMin_1[0][12] = -999; fMax_1[0][12] = 135.562103271; fMin_1[1][12] = -999; fMax_1[1][12] = 87.7425994873; fMin_1[2][12] = -999; fMax_1[2][12] = 135.562103271; fMin_1[0][13] = -999; fMax_1[0][13] = 123.221702576; fMin_1[1][13] = -999; fMax_1[1][13] = 104.361602783; fMin_1[2][13] = -999; fMax_1[2][13] = 123.221702576; fMin_1[0][14] = -999; fMax_1[0][14] = 51.957901001; fMin_1[1][14] = -999; fMax_1[1][14] = 45.4137001038; fMin_1[2][14] = -999; fMax_1[2][14] = 51.957901001; fMin_1[0][15] = 0; fMax_1[0][15] = 1; fMin_1[1][15] = 0; fMax_1[1][15] = 1; fMin_1[2][15] = 0; fMax_1[2][15] = 1; fMin_1[0][16] = -999; fMax_1[0][16] = 4.79148387909; fMin_1[1][16] = -999; fMax_1[1][16] = 4.49353694916; fMin_1[2][16] = -999; fMax_1[2][16] = 4.79148387909; } //_______________________________________________________________________ inline void ReadMuon_Upstream_TMVA::Transform_1( std::vector& iv, int cls) const { // Normalization transformation if (cls < 0 || cls > 2) { if (2 > 1 ) cls = 2; else cls = 2; } const int nVar = 17; // get indices of used variables // define the indices of the variables which are transformed by this transformation static std::vector indicesGet; static std::vector indicesPut; if ( indicesGet.empty() ) { indicesGet.reserve(fNvars); indicesGet.push_back( 0); indicesGet.push_back( 1); indicesGet.push_back( 2); indicesGet.push_back( 3); indicesGet.push_back( 4); indicesGet.push_back( 5); indicesGet.push_back( 6); indicesGet.push_back( 7); indicesGet.push_back( 8); indicesGet.push_back( 9); indicesGet.push_back( 10); indicesGet.push_back( 11); indicesGet.push_back( 12); indicesGet.push_back( 13); indicesGet.push_back( 14); indicesGet.push_back( 15); indicesGet.push_back( 16); } if ( indicesPut.empty() ) { indicesPut.reserve(fNvars); indicesPut.push_back( 0); indicesPut.push_back( 1); indicesPut.push_back( 2); indicesPut.push_back( 3); indicesPut.push_back( 4); indicesPut.push_back( 5); indicesPut.push_back( 6); indicesPut.push_back( 7); indicesPut.push_back( 8); indicesPut.push_back( 9); indicesPut.push_back( 10); indicesPut.push_back( 11); indicesPut.push_back( 12); indicesPut.push_back( 13); indicesPut.push_back( 14); indicesPut.push_back( 15); indicesPut.push_back( 16); } static std::vector dv; dv.resize(nVar); for (int ivar=0; ivar& iv, int sigOrBgd ) const { Transform_1( iv, sigOrBgd ); }