13 #ifndef MLPACK_METHODS_ANN_FFN_HPP 14 #define MLPACK_METHODS_ANN_FFN_HPP 34 #include <ensmallen.hpp> 48 typename OutputLayerType = NegativeLogLikelihood<>,
49 typename InitializationRuleType = RandomInitialization,
50 typename... CustomLayers
71 FFN(OutputLayerType outputLayer = OutputLayerType(),
72 InitializationRuleType initializeRule = InitializationRuleType());
95 template<
typename OptimizerType>
96 typename std::enable_if<
97 HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
109 template<
typename OptimizerType>
110 typename std::enable_if<
111 !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
135 template<
typename OptimizerType,
typename... CallbackTypes>
136 double Train(arma::mat predictors,
138 OptimizerType& optimizer,
139 CallbackTypes&&... callbacks);
161 template<
typename OptimizerType = ens::RMSProp,
typename... CallbackTypes>
162 double Train(arma::mat predictors,
164 CallbackTypes&&... callbacks);
177 void Predict(arma::mat predictors, arma::mat& results);
186 template<
typename PredictorsType,
typename ResponsesType>
187 double Evaluate(
const PredictorsType& predictors,
188 const ResponsesType& responses);
196 double Evaluate(
const arma::mat& parameters);
211 double Evaluate(
const arma::mat& parameters,
213 const size_t batchSize,
214 const bool deterministic);
228 double Evaluate(
const arma::mat& parameters,
230 const size_t batchSize);
240 template<
typename GradType>
255 template<
typename GradType>
259 const size_t batchSize);
273 void Gradient(
const arma::mat& parameters,
276 const size_t batchSize);
289 template <
class LayerType,
class... Args>
290 void Add(Args... args) { network.push_back(
new LayerType(args...)); }
318 const arma::mat&
Responses()
const {
return responses; }
333 template<
typename Archive>
334 void serialize(Archive& ar,
const uint32_t );
346 template<
typename PredictorsType,
typename ResponsesType>
347 void Forward(
const PredictorsType& inputs, ResponsesType& results);
360 template<
typename PredictorsType,
typename ResponsesType>
361 void Forward(
const PredictorsType& inputs ,
362 ResponsesType& results,
378 template<
typename PredictorsType,
379 typename TargetsType,
380 typename GradientsType>
381 double Backward(
const PredictorsType& inputs,
382 const TargetsType& targets,
383 GradientsType& gradients);
393 template<
typename InputType>
394 void Forward(
const InputType& input);
403 void ResetData(arma::mat predictors, arma::mat responses);
415 template<
typename InputType>
416 void Gradient(
const InputType& input);
422 void ResetDeterministic();
427 void ResetGradients(arma::mat& gradient);
434 void Swap(
FFN& network);
437 OutputLayerType outputLayer;
441 InitializationRuleType initializeRule;
453 std::vector<
LayerTypes<CustomLayers...> > network;
456 arma::mat predictors;
501 arma::mat inputParameter;
504 arma::mat outputParameter;
515 typename InitializerType,
526 #include "ffn_impl.hpp" std::vector< LayerTypes< CustomLayers... > > & Model()
Modify the network model.
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it's value is less tha...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
Linear algebra utility functions, generally performed on matrices or vectors.
LossVisitor exposes the Loss() method of the given module.
This visitor is to support copy constructor for neural network module.
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
void Forward(const PredictorsType &inputs, ResponsesType &results)
Perform the forward pass of the data in real batch mode.
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat ¶meters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers... > layer)
arma::mat & Parameters()
Modify the initial point for the optimization.
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
const arma::mat & Parameters() const
Return the initial point for the optimization.
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
const std::vector< LayerTypes< CustomLayers... > > & Model() const
Get the network model.
Implementation of a standard feed forward network.
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Softmax< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Backward(const PredictorsType &inputs, const TargetsType &targets, GradientsType &gradients)
Perform the backward pass of the data in real batch mode.
void serialize(Archive &ar, const uint32_t)
Serialize the model.
double Evaluate(const PredictorsType &predictors, const ResponsesType &responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&... callbacks)
Train the feedforward network on the given input data using the given optimizer.