mlpack  3.1.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
ffn.hpp
Go to the documentation of this file.
1 
13 #ifndef MLPACK_METHODS_ANN_FFN_HPP
14 #define MLPACK_METHODS_ANN_FFN_HPP
15 
16 #include <mlpack/prereqs.hpp>
17 
25 #include "visitor/copy_visitor.hpp"
26 #include "visitor/loss_visitor.hpp"
27 
29 
34 #include <ensmallen.hpp>
35 
36 namespace mlpack {
37 namespace ann {
38 
47 template<
48  typename OutputLayerType = NegativeLogLikelihood<>,
49  typename InitializationRuleType = RandomInitialization,
50  typename... CustomLayers
51 >
52 class FFN
53 {
54  public:
57 
71  FFN(OutputLayerType outputLayer = OutputLayerType(),
72  InitializationRuleType initializeRule = InitializationRuleType());
73 
75  FFN(const FFN&);
76 
78  FFN(FFN&&);
79 
82 
84  ~FFN();
85 
95  template<typename OptimizerType>
96  typename std::enable_if<
97  HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
98  ::value, void>::type
99  WarnMessageMaxIterations(OptimizerType& optimizer, size_t samples) const;
100 
109  template<typename OptimizerType>
110  typename std::enable_if<
111  !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
112  ::value, void>::type
113  WarnMessageMaxIterations(OptimizerType& optimizer, size_t samples) const;
114 
135  template<typename OptimizerType, typename... CallbackTypes>
136  double Train(arma::mat predictors,
137  arma::mat responses,
138  OptimizerType& optimizer,
139  CallbackTypes&&... callbacks);
140 
161  template<typename OptimizerType = ens::RMSProp, typename... CallbackTypes>
162  double Train(arma::mat predictors,
163  arma::mat responses,
164  CallbackTypes&&... callbacks);
165 
177  void Predict(arma::mat predictors, arma::mat& results);
178 
186  template<typename PredictorsType, typename ResponsesType>
187  double Evaluate(const PredictorsType& predictors,
188  const ResponsesType& responses);
189 
198  double Evaluate(const arma::mat& parameters);
199 
213  double Evaluate(const arma::mat& parameters,
214  const size_t begin,
215  const size_t batchSize,
216  const bool deterministic);
217 
230  double Evaluate(const arma::mat& parameters,
231  const size_t begin,
232  const size_t batchSize);
233 
242  template<typename GradType>
243  double EvaluateWithGradient(const arma::mat& parameters, GradType& gradient);
244 
257  template<typename GradType>
258  double EvaluateWithGradient(const arma::mat& parameters,
259  const size_t begin,
260  GradType& gradient,
261  const size_t batchSize);
262 
275  void Gradient(const arma::mat& parameters,
276  const size_t begin,
277  arma::mat& gradient,
278  const size_t batchSize);
279 
284  void Shuffle();
285 
286  /*
287  * Add a new module to the model.
288  *
289  * @param args The layer parameter.
290  */
291  template <class LayerType, class... Args>
292  void Add(Args... args) { network.push_back(new LayerType(args...)); }
293 
294  /*
295  * Add a new module to the model.
296  *
297  * @param layer The Layer to be added to the model.
298  */
299  void Add(LayerTypes<CustomLayers...> layer) { network.push_back(layer); }
300 
302  const std::vector<LayerTypes<CustomLayers...> >& Model() const
303  {
304  return network;
305  }
309  std::vector<LayerTypes<CustomLayers...> >& Model() { return network; }
310 
312  size_t NumFunctions() const { return numFunctions; }
313 
315  const arma::mat& Parameters() const { return parameter; }
317  arma::mat& Parameters() { return parameter; }
318 
320  const arma::mat& Responses() const { return responses; }
322  arma::mat& Responses() { return responses; }
323 
325  const arma::mat& Predictors() const { return predictors; }
327  arma::mat& Predictors() { return predictors; }
328 
332  void ResetParameters();
333 
335  template<typename Archive>
336  void serialize(Archive& ar, const unsigned int /* version */);
337 
348  template<typename PredictorsType, typename ResponsesType>
349  void Forward(const PredictorsType& inputs, ResponsesType& results);
350 
362  template<typename PredictorsType, typename ResponsesType>
363  void Forward(const PredictorsType& inputs ,
364  ResponsesType& results,
365  const size_t begin,
366  const size_t end);
367 
379  template<typename PredictorsType,
380  typename TargetsType,
381  typename GradientsType>
382  double Backward(const PredictorsType& inputs,
383  const TargetsType& targets,
384  GradientsType& gradients);
385 
386  private:
387  // Helper functions.
394  template<typename InputType>
395  void Forward(const InputType& input);
396 
404  void ResetData(arma::mat predictors, arma::mat responses);
405 
410  void Backward();
411 
416  template<typename InputType>
417  void Gradient(const InputType& input);
418 
423  void ResetDeterministic();
424 
428  void ResetGradients(arma::mat& gradient);
429 
435  void Swap(FFN& network);
436 
438  OutputLayerType outputLayer;
439 
442  InitializationRuleType initializeRule;
443 
445  size_t width;
446 
448  size_t height;
449 
451  bool reset;
452 
454  std::vector<LayerTypes<CustomLayers...> > network;
455 
457  arma::mat predictors;
458 
460  arma::mat responses;
461 
463  arma::mat parameter;
464 
466  size_t numFunctions;
467 
469  arma::mat error;
470 
472  DeltaVisitor deltaVisitor;
473 
475  OutputParameterVisitor outputParameterVisitor;
476 
478  WeightSizeVisitor weightSizeVisitor;
479 
481  OutputWidthVisitor outputWidthVisitor;
482 
484  OutputHeightVisitor outputHeightVisitor;
485 
487  LossVisitor lossVisitor;
488 
490  ResetVisitor resetVisitor;
491 
493  DeleteVisitor deleteVisitor;
494 
496  bool deterministic;
497 
499  arma::mat delta;
500 
502  arma::mat inputParameter;
503 
505  arma::mat outputParameter;
506 
508  arma::mat gradient;
509 
511  CopyVisitor<CustomLayers...> copyVisitor;
512 
513  // The GAN class should have access to internal members.
514  template<
515  typename Model,
516  typename InitializerType,
517  typename NoiseType,
518  typename PolicyType
519  >
520  friend class GAN;
521 }; // class FFN
522 
523 } // namespace ann
524 } // namespace mlpack
525 
528 namespace boost {
529 namespace serialization {
530 
531 template<typename OutputLayerType,
532  typename InitializationRuleType,
533  typename... CustomLayer>
534 struct version<
535  mlpack::ann::FFN<OutputLayerType, InitializationRuleType, CustomLayer...>>
536 {
537  BOOST_STATIC_CONSTANT(int, value = 2);
538 };
539 
540 } // namespace serialization
541 } // namespace boost
542 
543 // Include implementation.
544 #include "ffn_impl.hpp"
545 
546 #endif
std::vector< LayerTypes< CustomLayers...> > & Model()
Modify the network model.
Definition: ffn.hpp:309
DeleteVisitor executes the destructor of the instantiated object.
void Gradient(const arma::mat &parameters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the feedforward network with the given parameters, and with respect to only ...
OutputHeightVisitor exposes the OutputHeight() method of the given module.
arma::mat & Responses()
Modify the matrix of responses to the input data points.
Definition: ffn.hpp:322
void Add(Args...args)
Definition: ffn.hpp:292
void serialize(Archive &ar, const unsigned int)
Serialize the model.
void Predict(arma::mat predictors, arma::mat &results)
Predict the responses to a given set of predictors.
BaseLayer< ActivationFunction, InputDataType, OutputDataType > CustomLayer
Standard Sigmoid layer.
LossVisitor exposes the Loss() method of the given module.
double Train(arma::mat predictors, arma::mat responses, OptimizerType &optimizer, CallbackTypes &&...callbacks)
Train the feedforward network on the given input data using the given optimizer.
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it&#39;s value is less tha...
This visitor is to support copy constructor for neural network module.
The core includes that mlpack expects; standard C++ includes and Armadillo.
WeightSizeVisitor returns the number of weights of the given module.
const arma::mat & Predictors() const
Get the matrix of data points (predictors).
Definition: ffn.hpp:325
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
FFN & operator=(FFN)
Copy/move assignment operator.
void Shuffle()
Shuffle the order of function visitation.
~FFN()
Destructor to release allocated memory.
void Forward(const PredictorsType &inputs, ResponsesType &results)
Perform the forward pass of the data in real batch mode.
ResetVisitor executes the Reset() function.
double EvaluateWithGradient(const arma::mat &parameters, GradType &gradient)
Evaluate the feedforward network with the given parameters.
OutputParameterVisitor exposes the output parameter of the given module.
void Add(LayerTypes< CustomLayers...> layer)
Definition: ffn.hpp:299
const arma::mat & Parameters() const
Return the initial point for the optimization.
Definition: ffn.hpp:315
size_t NumFunctions() const
Return the number of separable functions (the number of predictor points).
Definition: ffn.hpp:312
arma::mat & Parameters()
Modify the initial point for the optimization.
Definition: ffn.hpp:317
void ResetParameters()
Reset the module infomration (weights/parameters).
arma::mat & Predictors()
Modify the matrix of data points (predictors).
Definition: ffn.hpp:327
DeltaVisitor exposes the delta parameter of the given module.
The implementation of the standard GAN module.
Definition: gan.hpp:63
const arma::mat & Responses() const
Get the matrix of responses to the input data points.
Definition: ffn.hpp:320
const std::vector< LayerTypes< CustomLayers...> > & Model() const
Get the network model.
Definition: ffn.hpp:302
Implementation of a standard feed forward network.
Definition: ffn.hpp:52
OutputWidthVisitor exposes the OutputWidth() method of the given module.
double Evaluate(const PredictorsType &predictors, const ResponsesType &responses)
Evaluate the feedforward network with the given predictors and responses.
FFN(OutputLayerType outputLayer=OutputLayerType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the FFN object.