mlpack  3.1.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
recurrent_attention.hpp
Go to the documentation of this file.
1 
12 #ifndef MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
13 #define MLPACK_METHODS_ANN_LAYER_RECURRENT_ATTENTION_HPP
14 
15 #include <mlpack/prereqs.hpp>
16 #include <boost/ptr_container/ptr_vector.hpp>
17 
18 #include "../visitor/delta_visitor.hpp"
19 #include "../visitor/output_parameter_visitor.hpp"
20 #include "../visitor/reset_visitor.hpp"
21 #include "../visitor/weight_size_visitor.hpp"
22 
23 #include "layer_types.hpp"
24 #include "add_merge.hpp"
25 #include "sequential.hpp"
26 
27 namespace mlpack {
28 namespace ann {
29 
52 template <
53  typename InputDataType = arma::mat,
54  typename OutputDataType = arma::mat
55 >
56 class RecurrentAttention
57 {
58  public:
64 
73  template<typename RNNModuleType, typename ActionModuleType>
74  RecurrentAttention(const size_t outSize,
75  const RNNModuleType& rnn,
76  const ActionModuleType& action,
77  const size_t rho);
78 
86  template<typename eT>
87  void Forward(const arma::Mat<eT>& input, arma::Mat<eT>& output);
88 
98  template<typename eT>
99  void Backward(const arma::Mat<eT>& /* input */,
100  const arma::Mat<eT>& gy,
101  arma::Mat<eT>& g);
102 
103  /*
104  * Calculate the gradient using the output delta and the input activation.
105  *
106  * @param input The input parameter used for calculating the gradient.
107  * @param error The calculated error.
108  * @param gradient The calculated gradient.
109  */
110  template<typename eT>
111  void Gradient(const arma::Mat<eT>& /* input */,
112  const arma::Mat<eT>& /* error */,
113  arma::Mat<eT>& /* gradient */);
114 
116  std::vector<LayerTypes<>>& Model() { return network; }
117 
119  bool Deterministic() const { return deterministic; }
121  bool& Deterministic() { return deterministic; }
122 
124  OutputDataType const& Parameters() const { return parameters; }
126  OutputDataType& Parameters() { return parameters; }
127 
129  OutputDataType const& OutputParameter() const { return outputParameter; }
131  OutputDataType& OutputParameter() { return outputParameter; }
132 
134  OutputDataType const& Delta() const { return delta; }
136  OutputDataType& Delta() { return delta; }
137 
139  OutputDataType const& Gradient() const { return gradient; }
141  OutputDataType& Gradient() { return gradient; }
142 
146  template<typename Archive>
147  void serialize(Archive& ar, const unsigned int /* version */);
148 
149  private:
151  void IntermediateGradient()
152  {
153  intermediateGradient.zeros();
154 
155  // Gradient of the action module.
156  if (backwardStep == (rho - 1))
157  {
158  boost::apply_visitor(GradientVisitor(initialInput, actionError),
159  actionModule);
160  }
161  else
162  {
163  boost::apply_visitor(GradientVisitor(boost::apply_visitor(
164  outputParameterVisitor, actionModule), actionError),
165  actionModule);
166  }
167 
168  // Gradient of the recurrent module.
169  boost::apply_visitor(GradientVisitor(boost::apply_visitor(
170  outputParameterVisitor, rnnModule), recurrentError),
171  rnnModule);
172 
173  attentionGradient += intermediateGradient;
174  }
175 
177  size_t outSize;
178 
180  LayerTypes<> rnnModule;
181 
183  LayerTypes<> actionModule;
184 
186  size_t rho;
187 
189  size_t forwardStep;
190 
192  size_t backwardStep;
193 
195  bool deterministic;
196 
198  OutputDataType parameters;
199 
201  std::vector<LayerTypes<>> network;
202 
204  WeightSizeVisitor weightSizeVisitor;
205 
207  DeltaVisitor deltaVisitor;
208 
210  OutputParameterVisitor outputParameterVisitor;
211 
213  std::vector<arma::mat> feedbackOutputParameter;
214 
216  std::vector<arma::mat> moduleOutputParameter;
217 
219  OutputDataType delta;
220 
222  OutputDataType gradient;
223 
225  OutputDataType outputParameter;
226 
228  arma::mat recurrentError;
229 
231  arma::mat actionError;
232 
234  arma::mat actionDelta;
235 
237  arma::mat rnnDelta;
238 
240  arma::mat initialInput;
241 
243  ResetVisitor resetVisitor;
244 
246  arma::mat attentionGradient;
247 
249  arma::mat intermediateGradient;
250 }; // class RecurrentAttention
251 
252 } // namespace ann
253 } // namespace mlpack
254 
255 // Include implementation.
256 #include "recurrent_attention_impl.hpp"
257 
258 #endif
bool & Deterministic()
Modify the value of the deterministic parameter.
bool Deterministic() const
The value of the deterministic parameter.
OutputDataType & Parameters()
Modify the parameters.
OutputDataType const & Delta() const
Get the delta.
void serialize(Archive &ar, const unsigned int)
Serialize the layer.
The core includes that mlpack expects; standard C++ includes and Armadillo.
OutputDataType & Gradient()
Modify the gradient.
OutputDataType const & OutputParameter() const
Get the output parameter.
OutputDataType & OutputParameter()
Modify the output parameter.
RecurrentAttention()
Default constructor: this will not give a usable RecurrentAttention object, so be sure to set all the...
OutputDataType & Delta()
Modify the delta.
SearchModeVisitor executes the Gradient() method of the given module using the input and delta parame...
void Backward(const arma::Mat< eT > &, const arma::Mat< eT > &gy, arma::Mat< eT > &g)
Ordinary feed backward pass of a neural network, calculating the function f(x) by propagating x backw...
OutputDataType const & Gradient() const
Get the gradient.
std::vector< LayerTypes<> > & Model()
Get the model modules.
OutputDataType const & Parameters() const
Get the parameters.
void Forward(const arma::Mat< eT > &input, arma::Mat< eT > &output)
Ordinary feed forward pass of a neural network, evaluating the function f(x) by propagating the activ...