flexible_relu.hpp
Go to the documentation of this file.
1 
16 #ifndef MLPACK_METHODS_ANN_LAYER_FLEXIBLERELU_HPP
17 #define MLPACK_METHODS_ANN_LAYER_FLEXIBLERELU_HPP
18 
19 #include <mlpack/prereqs.hpp>
20 
21 namespace mlpack {
22 namespace ann {
23 
55 template <
56  typename InputDataType = arma::mat,
57  typename OutputDataType = arma::mat
58 >
60 {
61  public:
72  FlexibleReLU(const double alpha = 0);
73 
77  void Reset();
78 
86  template<typename InputType, typename OutputType>
87  void Forward(const InputType& input, OutputType& output);
88 
98  template<typename DataType>
99  void Backward(const DataType& input, const DataType& gy, DataType& g);
100 
108  template<typename eT>
109  void Gradient(const arma::Mat<eT>& input,
110  const arma::Mat<eT>& error,
111  arma::Mat<eT>& gradient);
112 
114  OutputDataType const& Parameters() const { return alpha; }
116  OutputDataType& Parameters() { return alpha; }
117 
119  OutputDataType const& OutputParameter() const { return outputParameter; }
121  OutputDataType& OutputParameter() { return outputParameter; }
122 
124  OutputDataType const& Delta() const { return delta; }
126  OutputDataType& Delta() { return delta;}
127 
129  OutputDataType const& Gradient() const { return gradient; }
131  OutputDataType& Gradient() { return gradient; }
132 
134  double const& Alpha() const { return alpha; }
136  double& Alpha() { return alpha; }
137 
141  template<typename Archive>
142  void serialize(Archive& ar, const uint32_t /* version*/);
143 
144  private:
146  OutputDataType delta;
147 
149  OutputDataType outputParameter;
150 
152  OutputDataType alpha;
153 
155  OutputDataType gradient;
156 
158  double userAlpha;
159 }; // class FlexibleReLU
160 
161 } // namespace ann
162 } // namespace mlpack
163 
164 // Include implementation
165 #include "flexible_relu_impl.hpp"
166 
167 #endif
FlexibleReLU(const double alpha=0)
Create the FlexibleReLU object using the specified parameters.
Linear algebra utility functions, generally performed on matrices or vectors.
The FlexibleReLU activation function, defined by.
OutputDataType const & Delta() const
Get the delta.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double & Alpha()
Modify the parameter controlling the range of the relu function.
void serialize(Archive &ar, const uint32_t)
Serialize the layer.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & Gradient() const
Get the gradient.
double const & Alpha() const
Get the parameter controlling the range of the relu function.
OutputDataType const & Parameters() const
Get the parameters.
void Reset()
Reset the layer parameter.
void Backward(const DataType &input, const DataType &gy, DataType &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & OutputParameter()
Modify the output parameter.
void Forward(const InputType &input, OutputType &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & Parameters()
Modify the parameters.
OutputDataType & Delta()
Modify the delta.