13 #ifndef MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 14 #define MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP 42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
63 template<
typename PredictionType,
typename TargetType>
64 typename PredictionType::elem_type
Forward(
const PredictionType& prediction,
65 const TargetType& target);
75 template<
typename PredictionType,
typename TargetType,
typename LossType>
76 void Backward(
const PredictionType& prediction,
77 const TargetType& target,
93 template<
typename Archive>
94 void serialize(Archive& ar,
const uint32_t );
98 OutputDataType outputParameter;
108 #include "kl_divergence_impl.hpp" Linear algebra utility functions, generally performed on matrices or vectors.
The core includes that mlpack expects; standard C++ includes and Armadillo.
bool TakeMean() const
Get the value of takeMean.
OutputDataType & OutputParameter()
Modify the output parameter.
void Backward(const PredictionType &prediction, const TargetType &target, LossType &loss)
Ordinary feed backward pass of a neural network.
bool & TakeMean()
Modify the value of takeMean.
KLDivergence(const bool takeMean=false)
Create the Kullback–Leibler Divergence object with the specified parameters.
void serialize(Archive &ar, const uint32_t)
Serialize the loss function.
PredictionType::elem_type Forward(const PredictionType &prediction, const TargetType &target)
Computes the Kullback–Leibler divergence error function.
The Kullback–Leibler divergence is often used for continuous distributions (direct regression)...
OutputDataType & OutputParameter() const
Get the output parameter.