parametric_relu.hpp
Go to the documentation of this file.
1 
15 #ifndef MLPACK_METHODS_ANN_LAYER_PReLU_HPP
16 #define MLPACK_METHODS_ANN_LAYER_PReLU_HPP
17 
18 #include <mlpack/prereqs.hpp>
19 
20 namespace mlpack {
21 namespace ann {
22 
41 template <
42  typename InputDataType = arma::mat,
43  typename OutputDataType = arma::mat
44 >
45 class PReLU
46 {
47  public:
56  PReLU(const double userAlpha = 0.03);
57 
58  /*
59  * Reset the layer parameter.
60  */
61  void Reset();
62 
70  template<typename InputType, typename OutputType>
71  void Forward(const InputType& input, OutputType& output);
72 
82  template<typename DataType>
83  void Backward(const DataType& input, const DataType& gy, DataType& g);
84 
92  template<typename eT>
93  void Gradient(const arma::Mat<eT>& input,
94  const arma::Mat<eT>& error,
95  arma::Mat<eT>& gradient);
96 
98  OutputDataType const& Parameters() const { return alpha; }
100  OutputDataType& Parameters() { return alpha; }
101 
103  OutputDataType const& OutputParameter() const { return outputParameter; }
105  OutputDataType& OutputParameter() { return outputParameter; }
106 
108  OutputDataType const& Delta() const { return delta; }
110  OutputDataType& Delta() { return delta; }
111 
113  OutputDataType const& Gradient() const { return gradient; }
115  OutputDataType& Gradient() { return gradient; }
116 
118  double const& Alpha() const { return alpha(0); }
120  double& Alpha() { return alpha(0); }
121 
123  size_t WeightSize() const { return 1; }
124 
128  template<typename Archive>
129  void serialize(Archive& ar, const uint32_t /* version */);
130 
131  private:
133  OutputDataType delta;
134 
136  OutputDataType outputParameter;
137 
139  OutputDataType alpha;
140 
142  OutputDataType gradient;
143 
145  double userAlpha;
146 }; // class PReLU
147 
148 } // namespace ann
149 } // namespace mlpack
150 
151 // Include implementation.
152 #include "parametric_relu_impl.hpp"
153 
154 #endif
void serialize(Archive &ar, const uint32_t)
Serialize the layer.
double & Alpha()
Modify the non zero gradient.
PReLU(const double userAlpha=0.03)
Create the PReLU object using the specified parameters.
Linear algebra utility functions, generally performed on matrices or vectors.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType const & Parameters() const
Get the parameters.
OutputDataType const & Gradient() const
Get the gradient.
The PReLU activation function, defined by (where alpha is trainable)
OutputDataType const & Delta() const
Get the delta.
OutputDataType & Parameters()
Modify the parameters.
void Backward(const DataType &input, const DataType &gy, DataType &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & OutputParameter()
Modify the output parameter.
OutputDataType & Delta()
Modify the delta.
OutputDataType & Gradient()
Modify the gradient.
void Forward(const InputType &input, OutputType &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & OutputParameter() const
Get the output parameter.
double const & Alpha() const
Get the non zero gradient.
size_t WeightSize() const
Get size of weights.