ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 #include "visitor/loss_visitor.hpp"
27 
29 
34 #include <ensmallen.hpp>
35 
36 namespace mlpack {
37 namespace ann {
38 
47 template<
48  typename OutputLayerType = NegativeLogLikelihood<>,
49  typename InitializationRuleType = RandomInitialization,
50  typename... CustomLayers
51 >
52 class FFN
53 {
54  public:
57 
71  FFN(OutputLayerType outputLayer = OutputLayerType(),
72  InitializationRuleType initializeRule = InitializationRuleType());
73 
75  FFN(const FFN&);
76 
78  FFN(FFN&&);
79 
82 
84  ~FFN();
85 
95  template<typename OptimizerType>
96  typename std::enable_if<
97  HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
98  ::value, void>::type
99  WarnMessageMaxIterations(OptimizerType& optimizer, size_t samples) const;
100 
109  template<typename OptimizerType>
110  typename std::enable_if<
111  !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
112  ::value, void>::type
113  WarnMessageMaxIterations(OptimizerType& optimizer, size_t samples) const;
114 
135  template<typename OptimizerType, typename... CallbackTypes>
136  double Train(arma::mat predictors,
137  arma::mat responses,
138  OptimizerType& optimizer,
139  CallbackTypes&&... callbacks);
140 
161  template<typename OptimizerType = ens::RMSProp, typename... CallbackTypes>
162  double Train(arma::mat predictors,
163  arma::mat responses,
164  CallbackTypes&&... callbacks);
165 
177  void Predict(arma::mat predictors, arma::mat& results);
178 
186  template<typename PredictorsType, typename ResponsesType>
187  double Evaluate(const PredictorsType& predictors,
188  const ResponsesType& responses);
189 
196  double Evaluate(const arma::mat& parameters);
197 
211  double Evaluate(const arma::mat& parameters,
212  const size_t begin,
213  const size_t batchSize,
214  const bool deterministic);
215 
228  double Evaluate(const arma::mat& parameters,
229  const size_t begin,
230  const size_t batchSize);
231 
240  template<typename GradType>
241  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
242 
255  template<typename GradType>
256  double EvaluateWithGradient(const arma::mat& parameters,
257  const size_t begin,
258  GradType& gradient,
259  const size_t batchSize);
260 
273  void Gradient(const arma::mat& parameters,
274  const size_t begin,
275  arma::mat& gradient,
276  const size_t batchSize);
277 
282  void Shuffle();
283 
284  /*
285  * Add a new module to the model.
286  *
287  * @param args The layer parameter.
288  */
289  template <class LayerType, class... Args>
290  void Add(Args... args) { network.push_back(new LayerType(args...)); }
291 
292  /*
293  * Add a new module to the model.
294  *
295  * @param layer The Layer to be added to the model.
296  */
297  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
298 
300  const std::vector<LayerTypes<CustomLayers...> >& Model() const
301  {
302  return network;
303  }
307  std::vector<LayerTypes<CustomLayers...> >& Model() { return network; }
308 
310  size_t NumFunctions() const { return numFunctions; }
311 
313  const arma::mat& Parameters() const { return parameter; }
315  arma::mat& Parameters() { return parameter; }
316 
318  const arma::mat& Responses() const { return responses; }
320  arma::mat& Responses() { return responses; }
321 
323  const arma::mat& Predictors() const { return predictors; }
325  arma::mat& Predictors() { return predictors; }
326 
330  void ResetParameters();
331 
333  template<typename Archive>
334  void serialize(Archive& ar, const uint32_t /* version */);
335 
346  template<typename PredictorsType, typename ResponsesType>
347  void Forward(const PredictorsType& inputs, ResponsesType& results);
348 
360  template<typename PredictorsType, typename ResponsesType>
361  void Forward(const PredictorsType& inputs ,
362  ResponsesType& results,
363  const size_t begin,
364  const size_t end);
365 
378  template<typename PredictorsType,
379  typename TargetsType,
380  typename GradientsType>
381  double Backward(const PredictorsType& inputs,
382  const TargetsType& targets,
383  GradientsType& gradients);
384 
385  private:
386  // Helper functions.
393  template<typename InputType>
394  void Forward(const InputType& input);
395 
403  void ResetData(arma::mat predictors, arma::mat responses);
404 
409  void Backward();
410 
415  template<typename InputType>
416  void Gradient(const InputType& input);
417 
422  void ResetDeterministic();
423 
427  void ResetGradients(arma::mat& gradient);
428 
434  void Swap(FFN& network);
435 
437  OutputLayerType outputLayer;
438 
441  InitializationRuleType initializeRule;
442 
444  size_t width;
445 
447  size_t height;
448 
450  bool reset;
451 
453  std::vector<LayerTypes<CustomLayers...> > network;
454 
456  arma::mat predictors;
457 
459  arma::mat responses;
460 
462  arma::mat parameter;
463 
465  size_t numFunctions;
466 
468  arma::mat error;
469 
471  DeltaVisitor deltaVisitor;
472 
474  OutputParameterVisitor outputParameterVisitor;
475 
477  WeightSizeVisitor weightSizeVisitor;
478 
480  OutputWidthVisitor outputWidthVisitor;
481 
483  OutputHeightVisitor outputHeightVisitor;
484 
486  LossVisitor lossVisitor;
487 
489  ResetVisitor resetVisitor;
490 
492  DeleteVisitor deleteVisitor;
493 
495  bool deterministic;
496 
498  arma::mat delta;
499 
501  arma::mat inputParameter;
502 
504  arma::mat outputParameter;
505 
507  arma::mat gradient;
508 
510  CopyVisitor<CustomLayers...> copyVisitor;
511 
512  // The GAN class should have access to internal members.
513  template<
514  typename Model,
515  typename InitializerType,
516  typename NoiseType,
517  typename PolicyType
518  >
519  friend class GAN;
520 }; // class FFN
521 
522 } // namespace ann
523 } // namespace mlpack
524 
525 // Include implementation.
526 #include "ffn_impl.hpp"
527 
528 #endif
std::vector< LayerTypes< CustomLayers... > > & Model()
Modify the network model.
Definition: ffn.hpp:307
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it&#39;s value is less tha...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:320
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:310
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Linear algebra utility functions, generally performed on matrices or vectors.
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
void Add(Args... args)
Definition: ffn.hpp:290
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:323
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:318
void Forward(const PredictorsType &inputs, ResponsesType &results)
Perform the forward pass of the data in real batch mode.
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
Definition: ffn.hpp:297
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:315
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:325
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:313
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
Definition: gan.hpp:63
const std::vector< LayerTypes< CustomLayers... > > & Model() const
Get the network model.
Definition: ffn.hpp:300
Implementation of a standard feed forward network.
Definition: ffn.hpp:52
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Softmax< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(const PredictorsType &inputs, const TargetsType &targets, GradientsType &gradients)
Perform the backward pass of the data in real batch mode.
void serialize(Archive &ar, const uint32_t)
Serialize the model.
double Evaluate(const PredictorsType &predictors, const ResponsesType &responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.