24 #ifndef MLPACK_METHODS_ANN_LAYER_ELU_HPP 25 #define MLPACK_METHODS_ANN_LAYER_ELU_HPP 108 typename InputDataType = arma::mat,
109 typename OutputDataType = arma::mat
129 ELU(
const double alpha);
138 template<
typename InputType,
typename OutputType>
139 void Forward(
const InputType& input, OutputType& output);
150 template<
typename DataType>
151 void Backward(
const DataType& input,
const DataType& gy, DataType& g);
159 OutputDataType
const&
Delta()
const {
return delta; }
161 OutputDataType&
Delta() {
return delta; }
164 double const&
Alpha()
const {
return alpha; }
174 double const&
Lambda()
const {
return lambda; }
179 template<
typename Archive>
180 void serialize(Archive& ar,
const uint32_t );
184 OutputDataType delta;
187 OutputDataType outputParameter;
190 arma::mat derivative;
213 #include "elu_impl.hpp" double const & Alpha() const
Get the non zero gradient.
Linear algebra utility functions, generally performed on matrices or vectors.
OutputDataType & Delta()
Modify the delta.
bool & Deterministic()
Modify the value of deterministic parameter.
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool Deterministic() const
Get the value of deterministic parameter.
OutputDataType const & OutputParameter() const
Get the output parameter.
void Forward(const InputType &input, OutputType &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
void Backward(const DataType &input, const DataType &gy, DataType &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
ELU()
Create the ELU object.
void serialize(Archive &ar, const uint32_t)
Serialize the layer.
double & Alpha()
Modify the non zero gradient.
The ELU activation function, defined by.
OutputDataType & OutputParameter()
Modify the output parameter.
double const & Lambda() const
Get the lambda parameter.
OutputDataType const & Delta() const
Get the delta.