13#include <MachineLearning/BaseGradientOptimizationMethod.h>
17 namespace MachineLearning
73 template<
typename Scalar, LineSearchType LSType = MoreThuente>
101 std::function<
Scalar(
const af::array&, af::array&)> function);
221 virtual bool Optimize(
int* cycle =
nullptr)
override;
AdamSolver(int numberOfVariables, std::function< Scalar(const af::array &, af::array &)> function)
Creates a new instance of the L-BFGS optimization algorithm.
Scalar GetAlpha()
Gets the learning rate.
Scalar GetBeta1()
Gets decay rate for the first moment estimates.
void SetDecay(Scalar decay)
Sets initial decay rate.
void SetBeta2(Scalar beta2)
Sets decay rate for the second-moment estimates.
void SetEpsilon(Scalar epsilon)
Sets an epsilon to avoid division by zero.
void SetAlpha(Scalar alpha)
Sets the learning rate.
Scalar GetEpsilon()
Gets the epsilon.
AdamSolver(int numberOfVariables)
Creates a new instance of the L-BFGS optimization algorithm.
virtual bool Optimize(int *cycle=nullptr) override
Implements the actual optimization algorithm. This method should try to minimize the objective functi...
Scalar GetBeta2()
Gets decay rate for the second-moment estimates.
void SetBeta1(Scalar beta1)
Sets decay rate for the first moment estimates.
Scalar GetDecay()
Gets the initial decay.
AdamSolver(NonlinearObjectiveFunction< Scalar > *function)
Creates a new instance of the L-BFGS optimization algorithm.
Base class for gradient-based optimization methods.