13 #ifndef MLPACK_METHODS_ANN_BRNN_HPP
14 #define MLPACK_METHODS_ANN_BRNN_HPP
30 #include <ensmallen.hpp>
42 typename OutputLayerType = NegativeLogLikelihood<>,
43 typename MergeLayerType = Concat<>,
44 typename MergeOutputType = LogSoftMax<>,
45 typename InitializationRuleType = RandomInitialization,
46 typename... CustomLayers
55 InitializationRuleType,
73 BRNN(
const size_t rho,
74 const bool single =
false,
75 OutputLayerType outputLayer = OutputLayerType(),
76 MergeLayerType* mergeLayer =
new MergeLayerType(),
77 MergeOutputType* mergeOutput =
new MergeOutputType(),
78 InitializationRuleType initializeRule = InitializationRuleType());
91 template<
typename OptimizerType>
92 typename std::enable_if<
93 HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
105 template<
typename OptimizerType>
106 typename std::enable_if<
107 !HasMaxIterations<OptimizerType, size_t&(OptimizerType::*)()>
134 template<
typename OptimizerType>
135 double Train(arma::cube predictors,
136 arma::cube responses,
137 OptimizerType& optimizer);
162 template<
typename OptimizerType = ens::StandardSGD>
163 double Train(arma::cube predictors, arma::cube responses);
184 void Predict(arma::cube predictors,
186 const size_t batchSize = 256);
201 double Evaluate(
const arma::mat& parameters,
203 const size_t batchSize,
204 const bool deterministic);
218 double Evaluate(
const arma::mat& parameters,
220 const size_t batchSize);
235 template<
typename GradType>
239 const size_t batchSize);
254 void Gradient(
const arma::mat& parameters,
257 const size_t batchSize);
270 template <
class LayerType,
class... Args>
271 void Add(Args... args);
289 const size_t&
Rho()
const {
return rho; }
291 size_t&
Rho() {
return rho; }
294 const arma::cube&
Responses()
const {
return responses; }
316 template<
typename Archive>
317 void serialize(Archive& ar,
const unsigned int );
325 void ResetDeterministic();
331 OutputLayerType outputLayer;
341 InitializationRuleType initializeRule;
359 arma::cube predictors;
362 arma::cube responses;
380 std::vector<arma::mat> forwardRNNOutputParameter;
383 std::vector<arma::mat> backwardRNNOutputParameter;
401 arma::mat forwardGradient;
404 arma::mat backwardGradient;
407 arma::mat totalGradient;
410 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> forwardRNN;
413 RNN<OutputLayerType, InitializationRuleType, CustomLayers...> backwardRNN;
421 namespace serialization {
423 template<
typename OutputLayerType,
424 typename InitializationRuleType,
425 typename MergeLayerType,
426 typename MergeOutputType,
429 mlpack::ann::BRNN<OutputLayerType, MergeLayerType, MergeOutputType,
430 InitializationRuleType, CustomLayer...>>
432 BOOST_STATIC_CONSTANT(
int, value = 1);
439 #include "brnn_impl.hpp"
DeleteVisitor executes the destructor of the instantiated object.
void ResetParameters()
Reset the module information (weights/parameters).
BaseLayer< ActivationFunction, InputDataType, OutputDataType > CustomLayer
Standard Sigmoid layer.
size_t NumFunctions() const
Return the number of separable functions. (number of predictor points).
arma::mat & Parameters()
Modify the initial point for the optimization.
double Train(arma::cube predictors, arma::cube responses, OptimizerType &optimizer)
Train the bidirectional recurrent neural network on the given input data using the given optimizer...
This visitor is to support copy constructor for neural network module.
The core includes that mlpack expects; standard C++ includes and Armadillo.
double EvaluateWithGradient(const arma::mat ¶meters, const size_t begin, GradType &gradient, const size_t batchSize)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Gradient(const arma::mat ¶meters, const size_t begin, arma::mat &gradient, const size_t batchSize)
Evaluate the gradient of the bidirectional recurrent neural network with the given parameters...
WeightSizeVisitor returns the number of weights of the given module.
arma::cube & Responses()
Modify the matrix of responses to the input data points.
boost::variant< Add< arma::mat, arma::mat > *, AddMerge< arma::mat, arma::mat > *, AtrousConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, BaseLayer< LogisticFunction, arma::mat, arma::mat > *, BaseLayer< IdentityFunction, arma::mat, arma::mat > *, BaseLayer< TanhFunction, arma::mat, arma::mat > *, BaseLayer< RectifierFunction, arma::mat, arma::mat > *, BaseLayer< SoftplusFunction, arma::mat, arma::mat > *, BatchNorm< arma::mat, arma::mat > *, BilinearInterpolation< arma::mat, arma::mat > *, Concat< arma::mat, arma::mat > *, Concatenate< arma::mat, arma::mat > *, ConcatPerformance< NegativeLogLikelihood< arma::mat, arma::mat >, arma::mat, arma::mat > *, Constant< arma::mat, arma::mat > *, Convolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< FullConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, TransposedConvolution< NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, NaiveConvolution< ValidConvolution >, arma::mat, arma::mat > *, DropConnect< arma::mat, arma::mat > *, Dropout< arma::mat, arma::mat > *, AlphaDropout< arma::mat, arma::mat > *, ELU< arma::mat, arma::mat > *, FlexibleReLU< arma::mat, arma::mat > *, Glimpse< arma::mat, arma::mat > *, HardTanH< arma::mat, arma::mat > *, Highway< arma::mat, arma::mat > *, Join< arma::mat, arma::mat > *, LayerNorm< arma::mat, arma::mat > *, LeakyReLU< arma::mat, arma::mat > *, CReLU< arma::mat, arma::mat > *, Linear< arma::mat, arma::mat, NoRegularizer > *, LinearNoBias< arma::mat, arma::mat, NoRegularizer > *, LogSoftMax< arma::mat, arma::mat > *, Lookup< arma::mat, arma::mat > *, LSTM< arma::mat, arma::mat > *, GRU< arma::mat, arma::mat > *, FastLSTM< arma::mat, arma::mat > *, MaxPooling< arma::mat, arma::mat > *, MeanPooling< arma::mat, arma::mat > *, MiniBatchDiscrimination< arma::mat, arma::mat > *, MultiplyConstant< arma::mat, arma::mat > *, MultiplyMerge< arma::mat, arma::mat > *, NegativeLogLikelihood< arma::mat, arma::mat > *, Padding< arma::mat, arma::mat > *, PReLU< arma::mat, arma::mat > *, WeightNorm< arma::mat, arma::mat > *, CELU< arma::mat, arma::mat > *, MoreTypes, CustomLayers *... > LayerTypes
Implementation of a standard recurrent neural network container.
void Reset()
Reset the state of the network.
void Predict(arma::cube predictors, arma::cube &results, const size_t batchSize=256)
Predict the responses to a given set of predictors.
ResetVisitor executes the Reset() function.
OutputParameterVisitor exposes the output parameter of the given module.
double Evaluate(const arma::mat ¶meters, const size_t begin, const size_t batchSize, const bool deterministic)
Evaluate the bidirectional recurrent neural network with the given parameters.
void Shuffle()
Shuffle the order of function visitation.
const size_t & Rho() const
Return the maximum length of backpropagation through time.
Implementation of a standard bidirectional recurrent neural network container.
BRNN(const size_t rho, const bool single=false, OutputLayerType outputLayer=OutputLayerType(), MergeLayerType *mergeLayer=new MergeLayerType(), MergeOutputType *mergeOutput=new MergeOutputType(), InitializationRuleType initializeRule=InitializationRuleType())
Create the BRNN object.
const arma::mat & Parameters() const
Return the initial point for the optimization.
void serialize(Archive &ar, const unsigned int)
Serialize the model.
arma::cube & Predictors()
Modify the matrix of data points (predictors).
DeltaVisitor exposes the delta parameter of the given module.
const arma::cube & Responses() const
Get the matrix of responses to the input data points.
const arma::cube & Predictors() const
Get the matrix of data points (predictors).
std::enable_if< HasMaxIterations< OptimizerType, size_t &(OptimizerType::*)()>::value, void >::type WarnMessageMaxIterations(OptimizerType &optimizer, size_t samples) const
Check if the optimizer has MaxIterations() parameter, if it does then check if it's value is less tha...
size_t & Rho()
Modify the maximum length of backpropagation through time.