Rate this Page

Struct TransformerEncoderLayerOptions#

Page Contents

Struct Documentation#

struct TransformerEncoderLayerOptions#

Options for the TransformerEncoderLayer

Example:

auto options = TransformerEncoderLayer(512, 8).dropout(0.2);

Public Functions

TransformerEncoderLayerOptions(int64_t d_model, int64_t nhead)#
inline auto d_model(const int64_t &new_d_model) -> decltype(*this)#

the number of expected features in the input

inline auto d_model(int64_t &&new_d_model) -> decltype(*this)#
inline const int64_t &d_model() const noexcept#
inline int64_t &d_model() noexcept#
inline auto nhead(const int64_t &new_nhead) -> decltype(*this)#

the number of heads in the multiheadattention models

inline auto nhead(int64_t &&new_nhead) -> decltype(*this)#
inline const int64_t &nhead() const noexcept#
inline int64_t &nhead() noexcept#
inline auto dim_feedforward(const int64_t &new_dim_feedforward) -> decltype(*this)#

the dimension of the feedforward network model, default is 2048

inline auto dim_feedforward(int64_t &&new_dim_feedforward) -> decltype(*this)#
inline const int64_t &dim_feedforward() const noexcept#
inline int64_t &dim_feedforward() noexcept#
inline auto dropout(const double &new_dropout) -> decltype(*this)#

the dropout value, default is 0.1

inline auto dropout(double &&new_dropout) -> decltype(*this)#
inline const double &dropout() const noexcept#
inline double &dropout() noexcept#
inline auto activation(const activation_t &new_activation) -> decltype(*this)#

the activation function of intermediate layer, can be torch::kReLU, torch::GELU, or a unary callable.

Default: torch::kReLU

inline auto activation(activation_t &&new_activation) -> decltype(*this)#
inline const activation_t &activation() const noexcept#
inline activation_t &activation() noexcept#