NeuralEngine
A Game Engine with embeded Machine Learning algorithms based on Gaussian Processes.
FgNadamSolver.h
1
11#pragma once
12
13#include <MachineLearning/BaseGradientOptimizationMethod.h>
14
15namespace NeuralEngine
16{
17 namespace MachineLearning
18 {
79 template<typename Scalar, LineSearchType LSType = MoreThuente>
80 class NE_IMPEXP NadamSolver : public BaseGradientOptimizationMethod<Scalar, LSType>
81 {
82 public:
83
93 NadamSolver(int numberOfVariables);
94
106 NadamSolver(int numberOfVariables,
107 std::function<Scalar(const af::array&, af::array&)> function);
108
117
124
132 void SetBeta1(Scalar beta1);
133
141 void SetBeta2(Scalar beta2);
142
150 void SetAlpha(Scalar alpha);
151
159 void SetEpsilon(Scalar epsilon);
160
168 void SetDecay(Scalar decay);
169
178
187
196
205
214
215 protected:
216
227 virtual bool Optimize(int* cycle = nullptr) override;
228
229 private:
230 Scalar min_step; // The minimum step length allowed in the line search.
231 Scalar max_step; // The maximum step length allowed in the line search.
232
233 Scalar sAlpha; // learning rate
234 Scalar sBeta1; // exponential decay rate for the first moment estimates (e.g. 0.9)
235 Scalar sBeta2; // exponential decay rate for the second-moment estimates (e.g. 0.999).
236 Scalar sEpsilon; // small number to prevent any division by zero in the implementation
237 Scalar sDecay;
238 Scalar delta;
239 Scalar sCumBeta1;
240 };
241 }
242}
243
Scalar GetBeta1()
Gets decay rate for the first moment estimates.
NadamSolver(NonlinearObjectiveFunction< Scalar > *function)
Creates a new instance of the L-BFGS optimization algorithm.
void SetBeta2(Scalar beta2)
Sets decay rate for the second-moment estimates.
Scalar GetAlpha()
Gets the learning rate.
virtual bool Optimize(int *cycle=nullptr) override
Implements the actual optimization algorithm. This method should try to minimize the objective functi...
void SetBeta1(Scalar beta1)
Sets decay rate for the first moment estimates.
NadamSolver(int numberOfVariables)
Creates a new instance of the L-BFGS optimization algorithm.
void SetEpsilon(Scalar epsilon)
Sets an epsilon to avoid division by zero.
void SetDecay(Scalar decay)
Sets initial decay rate.
void SetAlpha(Scalar alpha)
Sets the learning rate.
NadamSolver(int numberOfVariables, std::function< Scalar(const af::array &, af::array &)> function)
Creates a new instance of the L-BFGS optimization algorithm.
Scalar GetEpsilon()
Gets the epsilon.
Scalar GetBeta2()
Gets decay rate for the second-moment estimates.
Scalar GetDecay()
Gets the initial decay.