13#include <MachineLearning/BaseGradientOptimizationMethod.h>
17 namespace MachineLearning
79 template<
typename Scalar, LineSearchType LSType = MoreThuente>
107 std::function<
Scalar(
const af::array&, af::array&)> function);
227 virtual bool Optimize(
int* cycle =
nullptr)
override;
Base class for gradient-based optimization methods.
Scalar GetBeta1()
Gets decay rate for the first moment estimates.
NadamSolver(NonlinearObjectiveFunction< Scalar > *function)
Creates a new instance of the L-BFGS optimization algorithm.
void SetBeta2(Scalar beta2)
Sets decay rate for the second-moment estimates.
Scalar GetAlpha()
Gets the learning rate.
virtual bool Optimize(int *cycle=nullptr) override
Implements the actual optimization algorithm. This method should try to minimize the objective functi...
void SetBeta1(Scalar beta1)
Sets decay rate for the first moment estimates.
NadamSolver(int numberOfVariables)
Creates a new instance of the L-BFGS optimization algorithm.
void SetEpsilon(Scalar epsilon)
Sets an epsilon to avoid division by zero.
void SetDecay(Scalar decay)
Sets initial decay rate.
~NadamSolver()
Destructor.
void SetAlpha(Scalar alpha)
Sets the learning rate.
NadamSolver(int numberOfVariables, std::function< Scalar(const af::array &, af::array &)> function)
Creates a new instance of the L-BFGS optimization algorithm.
Scalar GetEpsilon()
Gets the epsilon.
Scalar GetBeta2()
Gets decay rate for the second-moment estimates.
Scalar GetDecay()
Gets the initial decay.