SG++-Doxygen-Documentation
Loading...
Searching...
No Matches
sgpp::optimization::optimizer::AdaptiveGradientDescent Class Reference

Gradient descent with adaptive step size. More...

#include <AdaptiveGradientDescent.hpp>

Inheritance diagram for sgpp::optimization::optimizer::AdaptiveGradientDescent:
sgpp::optimization::optimizer::UnconstrainedOptimizer

Public Member Functions

 AdaptiveGradientDescent (const AdaptiveGradientDescent &other)
 Copy constructor.
 
 AdaptiveGradientDescent (const base::ScalarFunction &f, const base::ScalarFunctionGradient &fGradient, size_t maxItCount=DEFAULT_N, double tolerance=DEFAULT_TOLERANCE, double stepSizeIncreaseFactor=DEFAULT_STEP_SIZE_INCREASE_FACTOR, double stepSizeDecreaseFactor=DEFAULT_STEP_SIZE_DECREASE_FACTOR, double lineSearchAccuracy=DEFAULT_LINE_SEARCH_ACCURACY)
 Constructor.
 
void clone (std::unique_ptr< UnconstrainedOptimizer > &clone) const override
 
double getLineSearchAccuracy () const
 
double getStepSizeDecreaseFactor () const
 
double getStepSizeIncreaseFactor () const
 
double getTolerance () const
 
void optimize () override
 Pure virtual method for optimization of the objective function.
 
void setLineSearchAccuracy (double lineSearchAccuracy)
 
void setStepSizeDecreaseFactor (double stepSizeDecreaseFactor)
 
void setStepSizeIncreaseFactor (double stepSizeIncreaseFactor)
 
void setTolerance (double tolerance)
 
 ~AdaptiveGradientDescent () override
 Destructor.
 
- Public Member Functions inherited from sgpp::optimization::optimizer::UnconstrainedOptimizer
const base::DataMatrixgetHistoryOfOptimalPoints () const
 
const base::DataVectorgetHistoryOfOptimalValues () const
 
size_t getN () const
 
base::ScalarFunctiongetObjectiveFunction () const
 
base::ScalarFunctionGradientgetObjectiveGradient () const
 
base::ScalarFunctionHessiangetObjectiveHessian () const
 
const base::DataVectorgetOptimalPoint () const
 
double getOptimalValue () const
 
const base::DataVectorgetStartingPoint () const
 
void setN (size_t N)
 
virtual void setObjectiveFunction (const base::ScalarFunction &f)
 
virtual void setObjectiveGradient (const base::ScalarFunctionGradient *fGradient)
 
virtual void setObjectiveHessian (const base::ScalarFunctionHessian *fHessian)
 
void setStartingPoint (const base::DataVector &startingPoint)
 
 UnconstrainedOptimizer (const base::ScalarFunction &f, const base::ScalarFunctionGradient *fGradient, const base::ScalarFunctionHessian *fHessian, size_t N=DEFAULT_N)
 Constructor.
 
 UnconstrainedOptimizer (const UnconstrainedOptimizer &other)
 Copy constructor.
 
virtual ~UnconstrainedOptimizer ()
 Destructor.
 

Static Public Attributes

static constexpr double DEFAULT_LINE_SEARCH_ACCURACY = 0.01
 default line search accuracy
 
static constexpr double DEFAULT_STEP_SIZE_DECREASE_FACTOR = 0.5
 default step size decrease factor
 
static constexpr double DEFAULT_STEP_SIZE_INCREASE_FACTOR = 1.2
 default step size increase factor
 
static constexpr double DEFAULT_TOLERANCE = 1e-6
 default tolerance
 
- Static Public Attributes inherited from sgpp::optimization::optimizer::UnconstrainedOptimizer
static const size_t DEFAULT_N = 1000
 default maximal number of iterations or function evaluations
 

Protected Attributes

double rhoAlphaMinus
 step size decrease factor
 
double rhoAlphaPlus
 step size increase factor
 
double rhoLs
 line search accuracy
 
double theta
 tolerance
 
- Protected Attributes inherited from sgpp::optimization::optimizer::UnconstrainedOptimizer
std::unique_ptr< base::ScalarFunctionf
 objective function
 
std::unique_ptr< base::ScalarFunctionGradientfGradient
 objective function gradient
 
std::unique_ptr< base::ScalarFunctionHessianfHessian
 objective function Hessian
 
base::DataVector fHist
 search history vector (optimal values)
 
double fOpt
 result of optimization (optimal function value)
 
size_t N
 maximal number of iterations or function evaluations
 
base::DataVector x0
 starting point
 
base::DataMatrix xHist
 search history matrix (optimal points)
 
base::DataVector xOpt
 result of optimization (location of optimum)
 

Detailed Description

Gradient descent with adaptive step size.

Constructor & Destructor Documentation

◆ AdaptiveGradientDescent() [1/2]

sgpp::optimization::optimizer::AdaptiveGradientDescent::AdaptiveGradientDescent ( const base::ScalarFunction f,
const base::ScalarFunctionGradient fGradient,
size_t  maxItCount = DEFAULT_N,
double  tolerance = DEFAULT_TOLERANCE,
double  stepSizeIncreaseFactor = DEFAULT_STEP_SIZE_INCREASE_FACTOR,
double  stepSizeDecreaseFactor = DEFAULT_STEP_SIZE_DECREASE_FACTOR,
double  lineSearchAccuracy = DEFAULT_LINE_SEARCH_ACCURACY 
)

Constructor.

Parameters
fobjective function
fGradientobjective function gradient
maxItCountmaximal number of function evaluations
tolerancetolerance
stepSizeIncreaseFactorstep size increase factor
stepSizeDecreaseFactorstep size decrease factor
lineSearchAccuracyline search accuracy

◆ AdaptiveGradientDescent() [2/2]

sgpp::optimization::optimizer::AdaptiveGradientDescent::AdaptiveGradientDescent ( const AdaptiveGradientDescent other)

Copy constructor.

Parameters
otheroptimizer to be copied

◆ ~AdaptiveGradientDescent()

sgpp::optimization::optimizer::AdaptiveGradientDescent::~AdaptiveGradientDescent ( )
override

Destructor.

Member Function Documentation

◆ clone()

void sgpp::optimization::optimizer::AdaptiveGradientDescent::clone ( std::unique_ptr< UnconstrainedOptimizer > &  clone) const
overridevirtual
Parameters
[out]clonepointer to cloned object

Implements sgpp::optimization::optimizer::UnconstrainedOptimizer.

References clone().

Referenced by clone().

◆ getLineSearchAccuracy()

double sgpp::optimization::optimizer::AdaptiveGradientDescent::getLineSearchAccuracy ( ) const
Returns
line search accuracy

References rhoLs.

◆ getStepSizeDecreaseFactor()

double sgpp::optimization::optimizer::AdaptiveGradientDescent::getStepSizeDecreaseFactor ( ) const
Returns
step size decrease factor

References rhoAlphaMinus.

◆ getStepSizeIncreaseFactor()

double sgpp::optimization::optimizer::AdaptiveGradientDescent::getStepSizeIncreaseFactor ( ) const
Returns
step size increase factor

References rhoAlphaPlus.

◆ getTolerance()

double sgpp::optimization::optimizer::AdaptiveGradientDescent::getTolerance ( ) const
Returns
tolerance

References theta.

◆ optimize()

◆ setLineSearchAccuracy()

void sgpp::optimization::optimizer::AdaptiveGradientDescent::setLineSearchAccuracy ( double  lineSearchAccuracy)
Parameters
lineSearchAccuracyline search accuracy

References rhoLs.

◆ setStepSizeDecreaseFactor()

void sgpp::optimization::optimizer::AdaptiveGradientDescent::setStepSizeDecreaseFactor ( double  stepSizeDecreaseFactor)
Parameters
stepSizeDecreaseFactorstep size decrease factor

References rhoAlphaMinus.

◆ setStepSizeIncreaseFactor()

void sgpp::optimization::optimizer::AdaptiveGradientDescent::setStepSizeIncreaseFactor ( double  stepSizeIncreaseFactor)
Parameters
stepSizeIncreaseFactorstep size increase factor

References rhoAlphaPlus.

◆ setTolerance()

void sgpp::optimization::optimizer::AdaptiveGradientDescent::setTolerance ( double  tolerance)
Parameters
tolerancetolerance

References theta.

Member Data Documentation

◆ DEFAULT_LINE_SEARCH_ACCURACY

constexpr double sgpp::optimization::optimizer::AdaptiveGradientDescent::DEFAULT_LINE_SEARCH_ACCURACY = 0.01
staticconstexpr

default line search accuracy

◆ DEFAULT_STEP_SIZE_DECREASE_FACTOR

constexpr double sgpp::optimization::optimizer::AdaptiveGradientDescent::DEFAULT_STEP_SIZE_DECREASE_FACTOR = 0.5
staticconstexpr

default step size decrease factor

◆ DEFAULT_STEP_SIZE_INCREASE_FACTOR

constexpr double sgpp::optimization::optimizer::AdaptiveGradientDescent::DEFAULT_STEP_SIZE_INCREASE_FACTOR = 1.2
staticconstexpr

default step size increase factor

◆ DEFAULT_TOLERANCE

constexpr double sgpp::optimization::optimizer::AdaptiveGradientDescent::DEFAULT_TOLERANCE = 1e-6
staticconstexpr

default tolerance

◆ rhoAlphaMinus

double sgpp::optimization::optimizer::AdaptiveGradientDescent::rhoAlphaMinus
protected

step size decrease factor

Referenced by getStepSizeDecreaseFactor(), optimize(), and setStepSizeDecreaseFactor().

◆ rhoAlphaPlus

double sgpp::optimization::optimizer::AdaptiveGradientDescent::rhoAlphaPlus
protected

step size increase factor

Referenced by getStepSizeIncreaseFactor(), optimize(), and setStepSizeIncreaseFactor().

◆ rhoLs

double sgpp::optimization::optimizer::AdaptiveGradientDescent::rhoLs
protected

line search accuracy

Referenced by getLineSearchAccuracy(), optimize(), and setLineSearchAccuracy().

◆ theta

double sgpp::optimization::optimizer::AdaptiveGradientDescent::theta
protected

tolerance

Referenced by getTolerance(), optimize(), and setTolerance().


The documentation for this class was generated from the following files: