16#include <boost/variant/static_visitor.hpp>
41 return "adaptivemaxpooling";
52 return "adaptivemeanpooling";
63 return "atrousconvolution";
74 return "alphadropout";
107 return "convolution";
118 return "dropconnect";
140 return "flexiblerelu";
173 return "linearnobias";
184 return "noisylinear";
206 return "meanpooling";
217 return "multiplyconstant";
240 return "transposedconvolution";
417 return "unsupported";
423 return layer.apply_visitor(*
this);
427 template<
typename LayerType>
Implementation of a class that returns the string representation of the name of the given layer.
std::string LayerString(NoisyLinear<> *) const
Return the name of the given layer of type NoisyLinear as a string.
std::string LayerString(HardTanH<> *) const
Return the name of the given layer of type HardTanH as a string.
std::string LayerString(MaxPooling<> *) const
Return the name of the given layer of type MaxPooling as a string.
std::string LayerString(Glimpse<> *) const
Return the name of the given layer of type Glimpse as a string.
std::string LayerString(BatchNorm<> *) const
Return the name of the given layer of type BatchNorm as a string.
LayerNameVisitor()
Create the LayerNameVisitor object.
std::string LayerString(Convolution<> *) const
Return the name of the given layer of type Convolution as a string.
std::string LayerString(CReLU<> *) const
Return the name of the given layer of type CReLU as a string.
std::string LayerString(Highway<> *) const
Return the name of the given layer of type Highway as a string.
std::string LayerString(Dropout<> *) const
Return the name of the given layer of type Dropout as a string.
std::string LayerString(Constant<> *) const
Return the name of the given layer of type Constant as a string.
std::string LayerString(LinearNoBias<> *) const
Return the name of the given layer of type LinearNoBias as a string.
std::string LayerString(ReLULayer<> *) const
Return the name of the given layer of type ReLULayer as a string.
std::string operator()(MoreTypes layer) const
Overload function call.
std::string LayerString(PReLU<> *) const
Return the name of the given layer of type PReLU as a string.
std::string LayerString(T *) const
Return the name of the layer of specified type as a string.
std::string LayerString(TransposedConvolution<> *) const
Return the name of the given layer of type TransposedConvolution as a string.
std::string LayerString(LeakyReLU<> *) const
Return the name of the given layer of type LeakyReLU as a string.
std::string operator()(LayerType *layer) const
Overload function call.
std::string LayerString(IdentityLayer<> *) const
Return the name of the given layer of type IdentityLayer as a string.
std::string LayerString(LayerNorm<> *) const
Return the name of the given layer of type LayerNorm as a string.
std::string LayerString(DropConnect<> *) const
Return the name of the given layer of type DropConnect as a string.
std::string LayerString(GRU<> *) const
Return the name of the given layer of type GRU as a string.
std::string LayerString(MultiplyConstant<> *) const
Return the name of the given layer of type MultiplyConstant as a string.
std::string LayerString(SigmoidLayer<> *) const
Return the name of the given layer of type SigmoidLayer as a string.
std::string LayerString(FastLSTM<> *) const
Return the name of the given layer of type FastLSTM as a string.
std::string LayerString(AlphaDropout<> *) const
Return the name of the given layer of type AlphaDropout as a string.
std::string LayerString(Linear<> *) const
Return the name of the given layer of type Linear as a string.
std::string LayerString(FlexibleReLU<> *) const
Return the name of the given layer of type FlexibleReLU as a string.
std::string LayerString(WeightNorm<> *) const
Return the name of the given layer of type WeightNorm as a string.
std::string LayerString(ELU<> *) const
Return the name of the given layer of type ELU as a string.
std::string LayerString(MeanPooling<> *) const
Return the name of the given layer of type MeanPooling as a string.
std::string LayerString(AdaptiveMaxPooling<> *) const
Return the name of the given layer of type AdaptiveMaxPooling as string.
std::string LayerString(AtrousConvolution<> *) const
Return the name of the given layer of type AtrousConvolution as a string.
std::string LayerString(LSTM<> *) const
std::string LayerString(TanHLayer<> *) const
Return the name of the given layer of type TanHLayer as a string.
std::string LayerString(LogSoftMax<> *) const
Return the name of the given layer of type LogSoftMax as a string.
std::string LayerString(AdaptiveMeanPooling<> *) const
Return the name of the given layer of type AdaptiveMeanPooling as string.
Implementation of the AdaptiveMaxPooling layer.
Implementation of the AdaptiveMeanPooling.
The alpha - dropout layer is a regularizer that randomly with probability 'ratio' sets input values t...
Implementation of the Atrous Convolution class.
Implementation of the base layer.
Declaration of the Batch Normalization layer class.
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together.
Implementation of the constant layer.
Implementation of the Convolution class.
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
The dropout layer is a regularizer that randomly with probability 'ratio' sets input values to zero a...
The ELU activation function, defined by.
An implementation of a faster version of the Fast LSTM network layer.
The FlexibleReLU activation function, defined by.
An implementation of a gru network layer.
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
The Hard Tanh activation function, defined by.
Implementation of the Highway layer.
Implementation of the LSTM module class.
Declaration of the Layer Normalization class.
The LeakyReLU activation function, defined by.
Implementation of the LinearNoBias class.
Implementation of the Linear layer class.
Implementation of the log softmax layer.
Implementation of the MaxPooling layer.
Implementation of the MeanPooling.
Implementation of the multiply constant layer.
Implementation of the NoisyLinear layer class.
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the Transposed Convolution class.
Declaration of the WeightNorm layer class.
Include all of the base components required to write mlpack methods, and the main mlpack Doxygen docu...
Artificial Neural Network.
boost::variant< Linear3D< arma::mat, arma::mat, NoRegularizer > *, Glimpse< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, MultiheadAttention< arma::mat, arma::mat, NoRegularizer > *, Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > *, RBF< arma::mat, arma::mat, GaussianFunction > *, BaseLayer< GaussianFunction, arma::mat, arma::mat > *, PositionalEncoding< arma::mat, arma::mat > * > MoreTypes