24#ifndef MLPACK_METHODS_ANN_LAYER_MULTIHEAD_ATTENTION_HPP
25#define MLPACK_METHODS_ANN_LAYER_MULTIHEAD_ATTENTION_HPP
58 typename InputDataType = arma::mat,
59 typename OutputDataType = arma::mat,
60 typename RegularizerType = NoRegularizer
79 const size_t srcSeqLen,
80 const size_t embedDim,
81 const size_t numHeads);
96 void Forward(
const arma::Mat<eT>& input, arma::Mat<eT>& output);
106 template<
typename eT>
108 const arma::Mat<eT>& gy,
118 template<
typename eT>
120 const arma::Mat<eT>& error,
121 arma::Mat<eT>& gradient);
126 template<
typename Archive>
165 OutputDataType
const&
Delta()
const {
return delta; }
167 OutputDataType&
Delta() {
return delta; }
170 OutputDataType
const&
Gradient()
const {
return grad; }
181 typedef typename OutputDataType::elem_type ElemType;
199 OutputDataType attnMask;
202 OutputDataType keyPaddingMask;
205 OutputDataType queryWt;
208 OutputDataType keyWt;
211 OutputDataType valueWt;
214 OutputDataType outWt;
217 OutputDataType qBias;
220 OutputDataType kBias;
223 OutputDataType vBias;
226 OutputDataType outBias;
229 OutputDataType weights;
232 arma::Cube<ElemType> qProj;
235 arma::Cube<ElemType> kProj;
238 arma::Cube<ElemType> vProj;
241 arma::Cube<ElemType> scores;
244 arma::Cube<ElemType> attnOut;
250 OutputDataType delta;
256 OutputDataType outputParameter;
259 RegularizerType regularizer;
265#include "multihead_attention_impl.hpp"
Multihead Attention allows the model to jointly attend to information from different representation s...
size_t & SrcSeqLen()
Modify the source sequence length.
OutputDataType const & Delta() const
Get the delta.
size_t & NumHeads()
Modify the number of attention heads.
size_t & EmbedDim()
Modify the embedding dimension.
OutputDataType const & Parameters() const
Get the parameters.
size_t NumHeads() const
Get the number of attention heads.
size_t TgtSeqLen() const
Get the target sequence length.
void Reset()
Reset the layer parameters.
size_t & TgtSeqLen()
Modify the target sequence length.
void Forward(const arma::Mat< eT > &input, arma::Mat< eT > &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...
OutputDataType const & OutputParameter() const
Get the output parameter.
MultiheadAttention()
Default constructor.
OutputDataType const & KeyPaddingMask() const
Get Key Padding Mask.
void Gradient(const arma::Mat< eT > &input, const arma::Mat< eT > &error, arma::Mat< eT > &gradient)
Calculate the gradient using the output delta and the input activation.
MultiheadAttention(const size_t tgtSeqLen, const size_t srcSeqLen, const size_t embedDim, const size_t numHeads)
Create the MultiheadAttention object using the specified modules.
OutputDataType const & Gradient() const
Get the gradient.
OutputDataType & AttentionMask()
Modify the two dimensional Attention Mask.
OutputDataType & Gradient()
Modify the gradient.
size_t EmbedDim() const
Get the embedding dimension.
OutputDataType const & AttentionMask() const
Get the two dimensional Attention Mask.
void Backward(const arma::Mat< eT > &, const arma::Mat< eT > &gy, arma::Mat< eT > &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType & KeyPaddingMask()
Modify the Key Padding Mask.
OutputDataType & OutputParameter()
Modify the output parameter.
size_t SrcSeqLen() const
Get the source sequence length.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
OutputDataType & Parameters()
Modify the parameters.
OutputDataType & Delta()
Modify the delta.
Implementation of the Softmax layer.
Linear algebra utility functions, generally performed on matrices or vectors.
The core includes that mlpack expects; standard C++ includes and Armadillo.