13 #ifndef MLPACK_METHODS_ANN_BRNN_HPP 14 #define MLPACK_METHODS_ANN_BRNN_HPP 30 #include <ensmallen.hpp> 42 typename OutputLayerType = NegativeLogLikelihood<>,
43 typename MergeLayerType = Concat<>,
44 typename MergeOutputType = LogSoftMax<>,
45 typename InitializationRuleType = RandomInitialization,
46 typename... CustomLayers
55 InitializationRuleType,
75 BRNN(
const size_t rho,
76 const bool single =
false,
77 OutputLayerType outputLayer = OutputLayerType(),
78 MergeLayerType* mergeLayer =
new MergeLayerType(),
79 MergeOutputType* mergeOutput =
new MergeOutputType(),
80 InitializationRuleType initializeRule = InitializationRuleType());
93 template<
typename OptimizerType>
94 typename std::enable_if<
95 HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
107 template<
typename OptimizerType>
108 typename std::enable_if<
109 !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
136 template<
typename OptimizerType>
137 double Train(arma::cube predictors,
138 arma::cube responses,
139 OptimizerType& optimizer);
164 template<
typename OptimizerType = ens::StandardSGD>
165 double Train(arma::cube predictors, arma::cube responses);
186 void Predict(arma::cube predictors,
188 const size_t batchSize = 256);
203 double Evaluate(
const arma::mat& parameters,
205 const size_t batchSize,
206 const bool deterministic);
220 double Evaluate(
const arma::mat& parameters,
222 const size_t batchSize);
237 template<
typename GradType>
241 const size_t batchSize);
256 void Gradient(
const arma::mat& parameters,
259 const size_t batchSize);
272 template <
class LayerType,
class... Args>
273 void Add(Args... args);
291 const size_t&
Rho()
const {
return rho; }
293 size_t&
Rho() {
return rho; }
296 const arma::cube&
Responses()
const {
return responses; }
318 template<
typename Archive>
319 void serialize(Archive& ar,
const uint32_t );
327 void ResetDeterministic();
333 OutputLayerType outputLayer;
343 InitializationRuleType initializeRule;
361 arma::cube predictors;
364 arma::cube responses;
382 std::vector<arma::mat> forwardRNNOutputParameter;
385 std::vector<arma::mat> backwardRNNOutputParameter;
403 arma::mat forwardGradient;
406 arma::mat backwardGradient;
409 arma::mat totalGradient;
412 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> forwardRNN;
415 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> backwardRNN;
422 #include "brnn_impl.hpp" DeleteVisitor executes the destructor of the instantiated object.
void ResetParameters()
Reset the module information (weights/parameters).
Linear algebra utility functions, generally performed on matrices or vectors.
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it's value is less tha...
arma::mat & Parameters()
Modify the initial point for the optimization.
const size_t & Rho() const
Return the maximum length of backpropagation through time.
double Train(arma::cube predictors, arma::cube responses, OptimizerType &optimizer)
Train the bidirectional recurrent neural network on the given input data using the given optimizer...
This visitor is to support copy constructor for neural network module.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double EvaluateWithGradient(const arma::mat ¶meters, const size_t begin, GradType &gradient, const size_t batchSize)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the bidirectional recurrent neural network with the given parameters...
WeightSizeVisitor returns the number of weights of the given module.
arma::cube & Responses()
Modify the matrix of responses to the input data points.
const arma::mat & Parameters() const
Return the initial point for the optimization.
Implementation of a standard recurrent neural network container.
void Reset()
Reset the state of the network.
void Predict(arma::cube predictors, arma::cube &results, const size_t batchSize=256)
Predict the responses to a given set of predictors.
const arma::cube & Predictors() const
Get the matrix of data points (predictors).
ResetVisitor executes the Reset() function.
OutputParameterVisitor exposes the output parameter of the given module.
double Evaluate(const arma::mat ¶meters, const size_t begin, const size_t batchSize, const bool deterministic)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Shuffle()
Shuffle the order of function visitation.
size_t NumFunctions() const
Return the number of separable functions. (number of predictor points).
void serialize(Archive &ar, const uint32_t)
Serialize the model.
const arma::cube & Responses() const
Get the matrix of responses to the input data points.
Implementation of a standard bidirectional recurrent neural network container.
BRNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), MergeLayerType *mergeLayer=new MergeLayerType(), MergeOutputType *mergeOutput=new MergeOutputType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the BRNN object.
arma::cube & Predictors()
Modify the matrix of data points (predictors).
DeltaVisitor exposes the delta parameter of the given module.
boost::variant< AdaptiveMaxPooling< arma::mat, arma::mat > *, AdaptiveMeanPooling< arma::mat, arma::mat > *, Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, NoisyLinear< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Softmax< arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
size_t & Rho()
Modify the maximum length of backpropagation through time.