layer_norm.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYERNORM_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYERNORM_HPP
14 
15 #include <mlpack/prereqs.hpp>
16 
17 namespace mlpack {
18 namespace ann {
19 
61 template <
62  typename InputDataType = arma::mat,
63  typename OutputDataType = arma::mat
64 >
65 class LayerNorm
66 {
67  public:
69  LayerNorm();
70 
77  LayerNorm(const size_t size, const double eps = 1e-8);
78 
82  void Reset();
83 
92  template<typename eT>
93  void Forward(const arma::Mat<eT>& input, arma::Mat<eT>& output);
94 
102  template<typename eT>
103  void Backward(const arma::Mat<eT>& input,
104  const arma::Mat<eT>& gy,
105  arma::Mat<eT>& g);
106 
114  template<typename eT>
115  void Gradient(const arma::Mat<eT>& input,
116  const arma::Mat<eT>& error,
117  arma::Mat<eT>& gradient);
118 
120  OutputDataType const& Parameters() const { return weights; }
122  OutputDataType& Parameters() { return weights; }
123 
125  OutputDataType const& OutputParameter() const { return outputParameter; }
127  OutputDataType& OutputParameter() { return outputParameter; }
128 
130  OutputDataType const& Delta() const { return delta; }
132  OutputDataType& Delta() { return delta; }
133 
135  OutputDataType const& Gradient() const { return gradient; }
137  OutputDataType& Gradient() { return gradient; }
138 
140  OutputDataType Mean() { return mean; }
141 
143  OutputDataType Variance() { return variance; }
144 
146  size_t InSize() const { return size; }
147 
149  double Epsilon() const { return eps; }
150 
152  size_t InputShape() const
153  {
154  return size;
155  }
156 
160  template<typename Archive>
161  void serialize(Archive& ar, const uint32_t /* version */);
162 
163  private:
165  size_t size;
166 
168  double eps;
169 
171  bool loading;
172 
174  OutputDataType gamma;
175 
177  OutputDataType beta;
178 
180  OutputDataType weights;
181 
183  OutputDataType mean;
184 
186  OutputDataType variance;
187 
189  OutputDataType gradient;
190 
192  OutputDataType delta;
193 
195  OutputDataType outputParameter;
196 
198  OutputDataType normalized;
199 
201  OutputDataType inputMean;
202 }; // class LayerNorm
203 
204 } // namespace ann
205 } // namespace mlpack
206 
207 // Include the implementation.
208 #include "layer_norm_impl.hpp"
209 
210 #endif
OutputDataType Mean()
Get the mean across single training data.
Definition: layer_norm.hpp:140
OutputDataType const & OutputParameter() const
Get the output parameter.
Definition: layer_norm.hpp:125
void serialize(Archive &ar, const uint32_t)
Serialize the layer.
Linear algebra utility functions, generally performed on matrices or vectors.
size_t InSize() const
Get the number of input units.
Definition: layer_norm.hpp:146
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType const & Delta() const
Get the delta.
Definition: layer_norm.hpp:130
OutputDataType & Parameters()
Modify the parameters.
Definition: layer_norm.hpp:122
LayerNorm()
Create the LayerNorm object.
OutputDataType & OutputParameter()
Modify the output parameter.
Definition: layer_norm.hpp:127
OutputDataType & Delta()
Modify the delta.
Definition: layer_norm.hpp:132
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
OutputDataType & Gradient()
Modify the gradient.
Definition: layer_norm.hpp:137
double Epsilon() const
Get the value of epsilon.
Definition: layer_norm.hpp:149
OutputDataType const & Parameters() const
Get the parameters.
Definition: layer_norm.hpp:120
OutputDataType Variance()
Get the variance across single training data.
Definition: layer_norm.hpp:143
OutputDataType const & Gradient() const
Get the gradient.
Definition: layer_norm.hpp:135
void Backward(const arma::Mat< eT > &input, const arma::Mat< eT > &gy, arma::Mat< eT > &g)
Backward pass through the layer.
void Forward(const arma::Mat< eT > &input, arma::Mat< eT > &output)
Forward pass of Layer Normalization.
void Reset()
Reset the layer parameters.
size_t InputShape() const
Get the shape of the input.
Definition: layer_norm.hpp:152