mlpack  3.1.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
layer_types.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
13 #define MLPACK_METHODS_ANN_LAYER_LAYER_TYPES_HPP
14 
15 #include <boost/variant.hpp>
16 
17 // Layer modules.
48 
49 // Convolution modules.
53 
54 // Regularizers.
56 
57 // Loss function modules.
59 
60 namespace mlpack {
61 namespace ann {
62 
63 template<typename InputDataType, typename OutputDataType> class BatchNorm;
64 template<typename InputDataType, typename OutputDataType> class DropConnect;
65 template<typename InputDataType, typename OutputDataType> class Glimpse;
66 template<typename InputDataType, typename OutputDataType> class LayerNorm;
67 template<typename InputDataType, typename OutputDataType> class LSTM;
68 template<typename InputDataType, typename OutputDataType> class GRU;
69 template<typename InputDataType, typename OutputDataType> class FastLSTM;
70 template<typename InputDataType, typename OutputDataType> class VRClassReward;
71 template<typename InputDataType, typename OutputDataType> class Concatenate;
72 template<typename InputDataType, typename OutputDataType> class Padding;
73 
74 template<typename InputDataType,
75  typename OutputDataType,
76  typename RegularizerType>
77 class Linear;
78 
79 template<typename InputDataType,
80  typename OutputDataType,
81  typename RegularizerType>
83 
84 template<typename InputDataType,
85  typename OutputDataType
86 >
88 
89 template<typename InputDataType,
90  typename OutputDataType
91 >
93 
94 template<typename InputDataType,
95  typename OutputDataType
96 >
98 
99 template<typename InputDataType,
100  typename OutputDataType,
101  typename... CustomLayers
102 >
103 class AddMerge;
104 
105 template<typename InputDataType,
106  typename OutputDataType,
107  bool residual,
108  typename... CustomLayers
109 >
111 
112 template<typename InputDataType,
113  typename OutputDataType,
114  typename... CustomLayers
115 >
116 class Highway;
117 
118 template<typename InputDataType,
119  typename OutputDataType,
120  typename... CustomLayers
121 >
122 class Recurrent;
123 
124 template<typename InputDataType,
125  typename OutputDataType,
126  typename... CustomLayers
127 >
128 class Concat;
129 
130 template<
131  typename OutputLayerType,
132  typename InputDataType,
133  typename OutputDataType
134 >
135 class ConcatPerformance;
136 
137 template<
138  typename ForwardConvolutionRule,
139  typename BackwardConvolutionRule,
140  typename GradientConvolutionRule,
141  typename InputDataType,
142  typename OutputDataType
143 >
144 class Convolution;
145 
146 template<
147  typename ForwardConvolutionRule,
148  typename BackwardConvolutionRule,
149  typename GradientConvolutionRule,
150  typename InputDataType,
151  typename OutputDataType
152 >
154 
155 template<
156  typename ForwardConvolutionRule,
157  typename BackwardConvolutionRule,
158  typename GradientConvolutionRule,
159  typename InputDataType,
160  typename OutputDataType
161 >
162 class AtrousConvolution;
163 
164 template<
165  typename InputDataType,
166  typename OutputDataType
167 >
169 
170 template<typename InputDataType,
171  typename OutputDataType,
172  typename... CustomLayers
173 >
175 
176 template <typename InputDataType,
177  typename OutputDataType,
178  typename... CustomLayers
179 >
181 
182 using MoreTypes = boost::variant<
193 >;
194 
195 template <typename... CustomLayers>
196 using LayerTypes = boost::variant<
202  arma::mat, arma::mat>*,
213  arma::mat, arma::mat>*,
217  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
220  NaiveConvolution<ValidConvolution>, arma::mat, arma::mat>*,
250  MoreTypes,
251  CustomLayers*...
252 >;
253 
254 } // namespace ann
255 } // namespace mlpack
256 
257 #endif
Implementation of the variance reduced classification reinforcement layer.
Definition: layer_types.hpp:70
Implementation of the Add module class.
Definition: add.hpp:34
boost::variant< Recurrent< arma::mat, arma::mat > *, RecurrentAttention< arma::mat, arma::mat > *, ReinforceNormal< arma::mat, arma::mat > *, Reparametrization< arma::mat, arma::mat > *, Select< arma::mat, arma::mat > *, Sequential< arma::mat, arma::mat, false > *, Sequential< arma::mat, arma::mat, true > *, Subview< arma::mat, arma::mat > *, VRClassReward< arma::mat, arma::mat > *, VirtualBatchNorm< arma::mat, arma::mat > * > MoreTypes
Implementation of the Concatenate module class.
Definition: concatenate.hpp:36
Implementation of the log softmax layer.
Definition: log_softmax.hpp:36
Implementation of the AddMerge module class.
Definition: add_merge.hpp:42
Implementation of the Padding module class.
Definition: layer_types.hpp:72
Declaration of the VirtualBatchNorm layer class.
Definition: layer_types.hpp:87
The FlexibleReLU activation function, defined by.
Implementation of the Transposed Convolution class.
Implementation of the reinforce normal layer.
Implementation of the Linear layer class.
Definition: layer_types.hpp:77
The LeakyReLU activation function, defined by.
Definition: leaky_relu.hpp:44
This class implements the Recurrent Model for Visual Attention, using a variety of possible layer imp...
Implementation of the Convolution class.
Definition: convolution.hpp:47
Implementation of the MeanPooling.
Implementation of the Reparametrization layer class.
Definition: layer_types.hpp:97
Implementation of the Join module class.
Definition: join.hpp:33
Implementation of the concat performance class.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
Declaration of the WeightNorm layer class.
The Hard Tanh activation function, defined by.
Definition: hard_tanh.hpp:49
The select module selects the specified column from a given input matrix.
Definition: select.hpp:32
Implementation of the negative log likelihood layer.
The PReLU activation function, defined by (where alpha is trainable)
Implementation of the base layer.
Definition: base_layer.hpp:53
Implementation of the Concat class.
Definition: concat.hpp:45
Implementation of the Highway layer.
Definition: highway.hpp:60
Implementation of the LSTM module class.
Definition: layer_types.hpp:67
Declaration of the Layer Normalization class.
Definition: layer_norm.hpp:65
Implementation of the Lookup class.
Definition: lookup.hpp:35
Implementation of the subview layer.
Definition: subview.hpp:34
Implementation of the MiniBatchDiscrimination layer.
Definition: layer_types.hpp:92
Implementation of the MultiplyMerge module class.
Implementation of the LinearNoBias class.
Definition: layer_types.hpp:82
A concatenated ReLU has two outputs, one ReLU and one negative ReLU, concatenated together...
Definition: c_relu.hpp:50
Computes the two-dimensional convolution.
An implementation of a gru network layer.
Definition: gru.hpp:58
The dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values to zero a...
Definition: dropout.hpp:53
The glimpse layer returns a retina-like representation (down-scaled cropped images) of increasing sca...
Definition: glimpse.hpp:88
The DropConnect layer is a regularizer that randomly with probability ratio sets the connection value...
Definition: dropconnect.hpp:63
Implementation of the multiply constant layer.
The alpha - dropout layer is a regularizer that randomly with probability &#39;ratio&#39; sets input values t...
The CELU activation function, defined by.
Definition: celu.hpp:60
Declaration of the Batch Normalization layer class.
Definition: batch_norm.hpp:56
Implementation of the RecurrentLayer class.
Implementation of the Sequential class.
Implementation of the constant layer.
Definition: constant.hpp:34
Implementation of the MaxPooling layer.
Definition: max_pooling.hpp:52
The ELU activation function, defined by.
Definition: elu.hpp:111
Definition and Implementation of the Bilinear Interpolation Layer.
An implementation of a faster version of the Fast LSTM network layer.
Definition: fast_lstm.hpp:66
Implementation of the Atrous Convolution class.