13#include <MachineLearning/BaseGradientOptimizationMethod.h>
17 namespace MachineLearning
58 template<
typename Scalar, LineSearchType LSType = MoreThuente>
86 std::function<
Scalar(
const af::array&, af::array&)> function);
206 virtual bool Optimize(
int* cycle =
nullptr)
override;
void SetBeta1(Scalar beta1)
Sets decay rate for the first moment estimates.
void SetAlpha(Scalar alpha)
Sets the learning rate.
void SetDecay(Scalar decay)
Sets initial decay rate.
Scalar GetEpsilon()
Gets the epsilon.
Scalar GetAlpha()
Gets the learning rate.
Scalar GetBeta1()
Gets decay rate for the first moment estimates.
~AdaMaxSolver()
Destructor.
void SetEpsilon(Scalar epsilon)
Sets an epsilon to avoid division by zero.
Scalar GetBeta2()
Gets decay rate for the second-moment estimates.
AdaMaxSolver(NonlinearObjectiveFunction< Scalar > *function)
Creates a new instance of the L-BFGS optimization algorithm.
Scalar GetDecay()
Gets the initial decay.
AdaMaxSolver(int numberOfVariables)
Creates a new instance of the L-BFGS optimization algorithm.
void SetBeta2(Scalar beta2)
Sets decay rate for the second-moment estimates.
virtual bool Optimize(int *cycle=nullptr) override
Implements the actual optimization algorithm. This method should try to minimize the objective functi...
AdaMaxSolver(int numberOfVariables, std::function< Scalar(const af::array &, af::array &)> function)
Creates a new instance of the L-BFGS optimization algorithm.
Base class for gradient-based optimization methods.