/usr/include/root/TMVA/MethodTMlpANN.h is in libroot-tmva-dev 5.34.30-0ubuntu8.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 | // @(#)root/tmva $Id$
// Author: Andreas Hoecker, Joerg Stelzer, Helge Voss, Kai Voss
/**********************************************************************************
* Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
* Package: TMVA *
* Class : MethodTMlpANN *
* Web : http://tmva.sourceforge.net *
* *
* Description: *
* Implementation of interface for Root-integrated artificial neural *
* network: TMultiLayerPerceptron, author: Christophe.Delaere@cern.ch *
* for a manual, see *
* http://root.cern.ch/root/html/TMultiLayerPerceptron.html *
* *
* Authors (alphabetical): *
* Andreas Hoecker <Andreas.Hocker@cern.ch> - CERN, Switzerland *
* Helge Voss <Helge.Voss@cern.ch> - MPI-K Heidelberg, Germany *
* Kai Voss <Kai.Voss@cern.ch> - U. of Victoria, Canada *
* *
* Copyright (c) 2005: *
* CERN, Switzerland *
* U. of Victoria, Canada *
* MPI-K Heidelberg, Germany *
* *
* Redistribution and use in source and binary forms, with or without *
* modification, are permitted according to the terms listed in LICENSE *
* (http://tmva.sourceforge.net/LICENSE) *
**********************************************************************************/
#ifndef ROOT_TMVA_MethodTMlpANN
#define ROOT_TMVA_MethodTMlpANN
//////////////////////////////////////////////////////////////////////////
// //
// MethodTMlpANN //
// //
// Implementation of interface for Root-integrated artificial neural //
// network: TMultiLayerPerceptron //
// //
//////////////////////////////////////////////////////////////////////////
#ifndef ROOT_TMVA_MethodBase
#include "TMVA/MethodBase.h"
#endif
class TMultiLayerPerceptron;
namespace TMVA {
class MethodTMlpANN : public MethodBase {
public:
MethodTMlpANN( const TString& jobName,
const TString& methodTitle,
DataSetInfo& theData,
const TString& theOption = "3000:N-1:N-2",
TDirectory* theTargetDir = 0 );
MethodTMlpANN( DataSetInfo& theData,
const TString& theWeightFile,
TDirectory* theTargetDir = NULL );
virtual ~MethodTMlpANN( void );
virtual Bool_t HasAnalysisType( Types::EAnalysisType type, UInt_t numberClasses, UInt_t numberTargets );
// training method
void Train( void );
using MethodBase::ReadWeightsFromStream;
// write weights to file
void AddWeightsXMLTo( void* parent ) const;
// read weights from file
void ReadWeightsFromStream( std::istream& istr );
void ReadWeightsFromXML(void* wghtnode);
// calculate the MVA value ...
// - here it is just a dummy, as it is done in the overwritten
// - PrepareEvaluationtree... ugly but necessary due to the strucure
// of TMultiLayerPercepton in ROOT grr... :-(
Double_t GetMvaValue( Double_t* err = 0, Double_t* errUpper = 0 );
void SetHiddenLayer(TString hiddenlayer = "" ) { fHiddenLayer=hiddenlayer; }
// ranking of input variables
const Ranking* CreateRanking() { return 0; }
// make ROOT-independent C++ class
void MakeClass( const TString& classFileName = TString("") ) const;
protected:
// make ROOT-independent C++ class for classifier response (classifier-specific implementation)
void MakeClassSpecific( std::ostream&, const TString& ) const;
// get help message text
void GetHelpMessage() const;
private:
// the option handling methods
void DeclareOptions();
void ProcessOptions();
void CreateMLPOptions( TString );
// option string
TString fLayerSpec; // Layer specification option
TMultiLayerPerceptron* fMLP; // the TMLP
TTree* fLocalTrainingTree; // local copy of training tree
TString fHiddenLayer; // string containig the hidden layer structure
Int_t fNcycles; // number of training cylcles
Double_t fValidationFraction; // fraction of events in training tree used for cross validation
TString fMLPBuildOptions; // option string to build the mlp
TString fLearningMethod; // the learning method (given via option string)
// default initialisation called by all constructors
void Init( void );
ClassDef(MethodTMlpANN,0) // Implementation of interface for TMultiLayerPerceptron
};
} // namespace TMVA
#endif
|