13#ifndef MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP
14#define MLPACK_METHODS_ANN_LOSS_FUNCTION_KL_DIVERGENCE_HPP
42 typename InputDataType = arma::mat,
43 typename OutputDataType = arma::mat
62 template<
typename InputType,
typename TargetType>
63 typename InputType::elem_type
Forward(
const InputType& input,
64 const TargetType& target);
73 template<
typename InputType,
typename TargetType,
typename OutputType>
75 const TargetType& target,
91 template<
typename Archive>
96 OutputDataType outputParameter;
106#include "kl_divergence_impl.hpp"
The Kullback–Leibler divergence is often used for continuous distributions (direct regression).
KLDivergence(const bool takeMean=false)
Create the Kullback–Leibler Divergence object with the specified parameters.
OutputDataType & OutputParameter() const
Get the output parameter.
void Backward(const InputType &input, const TargetType &target, OutputType &output)
Ordinary feed backward pass of a neural network.
InputType::elem_type Forward(const InputType &input, const TargetType &target)
Computes the Kullback–Leibler divergence error function.
bool TakeMean() const
Get the value of takeMean.
OutputDataType & OutputParameter()
Modify the output parameter.
bool & TakeMean()
Modify the value of takeMean.
void serialize(Archive &ar, const unsigned int)
Serialize the loss function.
Linear algebra utility functions, generally performed on matrices or vectors.
The core includes that mlpack expects; standard C++ includes and Armadillo.