layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
65 
66 // Convolution modules.
70 
71 // Regularizers.
73 
74 // Loss function modules.
76 
77 namespace mlpack {
78 namespace ann {
79 
80 template<typename InputDataType, typename OutputDataType> class BatchNorm;
81 template<typename InputDataType, typename OutputDataType> class DropConnect;
82 template<typename InputDataType, typename OutputDataType> class Glimpse;
83 template<typename InputDataType, typename OutputDataType> class LayerNorm;
84 template<typename InputDataType, typename OutputDataType> class LSTM;
85 template<typename InputDataType, typename OutputDataType> class GRU;
86 template<typename InputDataType, typename OutputDataType> class FastLSTM;
87 template<typename InputDataType, typename OutputDataType> class VRClassReward;
88 template<typename InputDataType, typename OutputDataType> class Concatenate;
89 template<typename InputDataType, typename OutputDataType> class Padding;
90 template<typename InputDataType, typename OutputDataType> class ReLU6;
91 
92 template<typename InputDataType,
93  typename OutputDataType,
94  typename RegularizerType>
95 class Linear;
96 
97 template<typename InputDataType,
98  typename OutputDataType,
99  typename Activation>
100 class RBF;
101 
102 template<typename InputDataType,
103  typename OutputDataType,
104  typename RegularizerType>
106 
107 template<typename InputDataType,
108  typename OutputDataType>
110 
111 template<typename InputDataType,
112  typename OutputDataType,
113  typename RegularizerType>
114 class Linear3D;
115 
116 template<typename InputDataType,
117  typename OutputDataType
118 >
120 
121 template<typename InputDataType,
122  typename OutputDataType
123 >
125 
126 template <typename InputDataType,
127  typename OutputDataType,
128  typename RegularizerType>
130 
131 template<typename InputDataType,
132  typename OutputDataType
133 >
135 
136 template<typename InputDataType,
137  typename OutputDataType,
138  typename... CustomLayers
139 >
140 class AddMerge;
141 
142 template<typename InputDataType,
143  typename OutputDataType,
144  bool residual,
145  typename... CustomLayers
146 >
148 
149 template<typename InputDataType,
150  typename OutputDataType,
151  typename... CustomLayers
152 >
153 class Highway;
154 
155 template<typename InputDataType,
156  typename OutputDataType,
157  typename... CustomLayers
158 >
159 class Recurrent;
160 
161 template<typename InputDataType,
162  typename OutputDataType,
163  typename... CustomLayers
164 >
165 class Concat;
166 
167 template<
168  typename OutputLayerType,
169  typename InputDataType,
170  typename OutputDataType
171 >
172 class ConcatPerformance;
173 
174 template<
175  typename ForwardConvolutionRule,
176  typename BackwardConvolutionRule,
177  typename GradientConvolutionRule,
178  typename InputDataType,
179  typename OutputDataType
180 >
181 class Convolution;
182 
183 template<
184  typename ForwardConvolutionRule,
185  typename BackwardConvolutionRule,
186  typename GradientConvolutionRule,
187  typename InputDataType,
188  typename OutputDataType
189 >
191 
192 template<
193  typename ForwardConvolutionRule,
194  typename BackwardConvolutionRule,
195  typename GradientConvolutionRule,
196  typename InputDataType,
197  typename OutputDataType
198 >
199 class AtrousConvolution;
200 
201 template<
202  typename InputDataType,
203  typename OutputDataType
204 >
206 
207 template<typename InputDataType,
208  typename OutputDataType,
209  typename... CustomLayers
210 >
212 
213 template <typename InputDataType,
214  typename OutputDataType,
215  typename... CustomLayers
216 >
218 
219 template <typename InputDataType,
220  typename OutputDataType
221 >
222 class AdaptiveMaxPooling;
223 
224 template <typename InputDataType,
225  typename OutputDataType
226 >
227 class AdaptiveMeanPooling;
228 
229 using MoreTypes = boost::variant<
255 >;
256 
257 template <typename... CustomLayers>
258 using LayerTypes = boost::variant<
267  arma::mat, arma::mat>*,
279  arma::mat, arma::mat>*,
282  NaiveConvolution<FullConvolution>,
283  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
312  NaiveConvolution<ValidConvolution>,
313  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
315  MoreTypes,
316  CustomLayers*...
317 >;
318 
319 } // namespace ann
320 } // namespace mlpack
321 
322 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:87
Implementation of the Add module class.
Definition: add.hpp:34
Implementation of the AdaptiveMaxPooling layer.
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
The ISRLU activation function, defined by.
Definition: isrlu.hpp:60
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Definition and Implementation of the Nearest Interpolation Layer.
Linear algebra utility functions, generally performed on matrices or vectors.
Implementation of the Padding module class.
Definition: layer_types.hpp:89
Declaration of the VirtualBatchNorm layer class.
The FlexibleReLU activation function, defined by.
Implementation of the Transposed Convolution class.
Implementation of the reinforce normal layer.
Implementation of the LPPooling.
Definition: lp_pooling.hpp:32
Implementation of the Linear layer class.
Definition: layer_types.hpp:95
Declaration of the Group Normalization class.
Definition: group_norm.hpp:50
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:77
Positional Encoding injects some information about the relative or absolute position of the tokens in...
Implementation of the MeanPooling.
Implementation of the Reparametrization layer class.
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
Declaration of the WeightNorm layer class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
Implementation of the Softmax layer.
Definition: softmax.hpp:38
Multihead Attention allows the model to jointly attend to information from different representation s...
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the AdaptiveMeanPooling.
Implementation of the base layer.
Definition: base_layer.hpp:71
Implementation of the PixelShuffle layer.
Implementation of the Concat class.
Definition: concat.hpp:43
Implementation of the Highway layer.
Definition: highway.hpp:58
Implementation of the LSTM module class.
Definition: layer_types.hpp:84
Implementation of the Linear3D layer class.
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
The Lookup class stores word embeddings and retrieves them using tokens.
Definition: lookup.hpp:41
Implementation of the NoisyLinear layer class.
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Implementation of the MultiplyMerge module class.
Implementation of the LinearNoBias class.
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
Definition and Implementation of the Bicubic Interpolation Layer.
Implementation of the multiply constant layer.
Definition and implementation of the Channel Shuffle Layer.
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Softmax< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
The CELU activation function, defined by.
Definition: celu.hpp:60
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Implementation of the Sequential class.
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Implementation of the Radial Basis Function layer.
boost::variant< FlexibleReLU< arma::mat, arma::mat > *, Linear3D< arma::mat, arma::mat, NoRegularizer > *, LpPooling< arma::mat, arma::mat > *, PixelShuffle< arma::mat, arma::mat > *, ChannelShuffle< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, MultiheadAttention< arma::mat, arma::mat, NoRegularizer > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, ReLU6< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, SpatialDropout< arma::mat, arma::mat > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > *, RBF< arma::mat, arma::mat, GaussianFunction > *, BaseLayer< GaussianFunction, arma::mat, arma::mat > *, PositionalEncoding< arma::mat, arma::mat > *, ISRLU< arma::mat, arma::mat > *, BicubicInterpolation< arma::mat, arma::mat > *, NearestInterpolation< arma::mat, arma::mat > *, GroupNorm< arma::mat, arma::mat > *> MoreTypes
Implementation of the SpatialDropout layer.
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.