![]() |
Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages |
#include <itkStochasticPreconditionedGradientDescentOptimizer.h>
This class implements a gradient descent optimizer with a decaying gain and preconditioning.
If
The gain
.
It is very suitable to be used in combination with a stochastic estimate of the gradient NewSamplesEveryIteration
to "true"
to achieve this effect. For more information on this strategy, you may have a look at:
[1] S. Klein, M. Staring, J.P.W. Pluim, "Evaluation of Optimization Methods for Nonrigid Medical Image Registration using Mutual Information and B-Splines" IEEE Transactions on Image Processing, vol. 16 (12), December 2007.
This class also serves as a base class for other preconditioned GradientDescent type algorithms, like the AdaptiveStochasticPreconditionedGradientDescentOptimizer.
Definition at line 56 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
Public Member Functions | |
virtual void | AdvanceOneStep () |
virtual const char * | GetClassName () const |
virtual double | GetCurrentTime () const |
virtual double | GetInitialTime () const |
virtual double | GetParam_a () const |
virtual double | GetParam_A () const |
virtual double | GetParam_alpha () const |
ITK_DISALLOW_COPY_AND_MOVE (StochasticPreconditionedGradientDescentOptimizer) | |
virtual void | SetInitialTime (double _arg) |
virtual void | SetParam_a (double _arg) |
virtual void | SetParam_A (double _arg) |
virtual void | SetParam_alpha (double _arg) |
virtual void | StartOptimization () |
![]() | |
virtual void | AdvanceOneStep () |
const cholmod_common * | GetCholmodCommon () const |
const cholmod_factor * | GetCholmodFactor () const |
virtual const char * | GetClassName () const |
virtual double | GetConditionNumber () const |
virtual unsigned int | GetCurrentIteration () const |
virtual double | GetDiagonalWeight () const |
virtual const DerivativeType & | GetGradient () |
virtual double | GetLargestEigenValue () const |
virtual const double & | GetLearningRate () |
virtual double | GetMinimumGradientElementMagnitude () const |
virtual const unsigned long & | GetNumberOfIterations () |
virtual const DerivativeType & | GetSearchDirection () |
virtual double | GetSparsity () const |
virtual const StopConditionType & | GetStopCondition () |
virtual const double & | GetValue () |
ITK_DISALLOW_COPY_AND_MOVE (PreconditionedGradientDescentOptimizer) | |
virtual void | MetricErrorResponse (ExceptionObject &err) |
virtual void | ResumeOptimization () |
virtual void | SetDiagonalWeight (double _arg) |
virtual void | SetLearningRate (double _arg) |
virtual void | SetMinimumGradientElementMagnitude (double _arg) |
virtual void | SetNumberOfIterations (unsigned long _arg) |
virtual void | SetPreconditionMatrix (PreconditionType &precondition) |
virtual void | StartOptimization () |
virtual void | StopOptimization () |
![]() | |
virtual const char * | GetClassName () const |
const ParametersType & | GetCurrentPosition () const override |
virtual bool | GetMaximize () const |
virtual const ScaledCostFunctionType * | GetScaledCostFunction () |
virtual const ParametersType & | GetScaledCurrentPosition () |
bool | GetUseScales () const |
virtual void | InitializeScales () |
ITK_DISALLOW_COPY_AND_MOVE (ScaledSingleValuedNonLinearOptimizer) | |
virtual void | MaximizeOff () |
virtual void | MaximizeOn () |
void | SetCostFunction (CostFunctionType *costFunction) override |
virtual void | SetMaximize (bool _arg) |
virtual void | SetUseScales (bool arg) |
Static Public Member Functions | |
static Pointer | New () |
![]() | |
static Pointer | New () |
![]() | |
static Pointer | New () |
Protected Member Functions | |
virtual double | Compute_a (double k) const |
StochasticPreconditionedGradientDescentOptimizer () | |
virtual void | UpdateCurrentTime () |
virtual | ~StochasticPreconditionedGradientDescentOptimizer () |
![]() | |
virtual void | CholmodSolve (const DerivativeType &gradient, DerivativeType &searchDirection, int solveType=CHOLMOD_A) |
PreconditionedGradientDescentOptimizer () | |
void | PrintSelf (std::ostream &os, Indent indent) const |
virtual | ~PreconditionedGradientDescentOptimizer () |
![]() | |
virtual void | GetScaledDerivative (const ParametersType ¶meters, DerivativeType &derivative) const |
virtual MeasureType | GetScaledValue (const ParametersType ¶meters) const |
virtual void | GetScaledValueAndDerivative (const ParametersType ¶meters, MeasureType &value, DerivativeType &derivative) const |
void | PrintSelf (std::ostream &os, Indent indent) const override |
ScaledSingleValuedNonLinearOptimizer () | |
void | SetCurrentPosition (const ParametersType ¶m) override |
virtual void | SetScaledCurrentPosition (const ParametersType ¶meters) |
~ScaledSingleValuedNonLinearOptimizer () override=default | |
Protected Attributes | |
double | m_CurrentTime { 0.0 } |
![]() | |
cholmod_common * | m_CholmodCommon |
cholmod_factor * | m_CholmodFactor { nullptr } |
cholmod_sparse * | m_CholmodGradient { nullptr } |
double | m_ConditionNumber { 1.0 } |
DerivativeType | m_Gradient |
double | m_LargestEigenValue { 1.0 } |
double | m_LearningRate { 1.0 } |
DerivativeType | m_SearchDirection |
double | m_Sparsity { 1.0 } |
StopConditionType | m_StopCondition { MaximumNumberOfIterations } |
![]() | |
ScaledCostFunctionPointer | m_ScaledCostFunction |
ParametersType | m_ScaledCurrentPosition |
Private Attributes | |
double | m_InitialTime { 0.0 } |
double | m_Param_a { 1.0 } |
double | m_Param_A { 1.0 } |
double | m_Param_alpha { 0.602 } |
Additional Inherited Members | |
![]() | |
using | cholmod_l = int CInt |
using itk::StochasticPreconditionedGradientDescentOptimizer::ConstPointer = SmartPointer<const Self> |
Definition at line 66 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
using itk::StochasticPreconditionedGradientDescentOptimizer::Pointer = SmartPointer<Self> |
Definition at line 65 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
using itk::PreconditionedGradientDescentOptimizer::PreconditionType = vnl_sparse_matrix<PreconditionValueType> |
Definition at line 86 of file itkPreconditionedGradientDescentOptimizer.h.
using itk::PreconditionedGradientDescentOptimizer::PreconditionValueType = DerivativeType::ValueType |
Some typedefs for computing the SelfHessian
Definition at line 82 of file itkPreconditionedGradientDescentOptimizer.h.
using itk::ScaledSingleValuedNonLinearOptimizer::ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer |
Definition at line 79 of file itkScaledSingleValuedNonLinearOptimizer.h.
using itk::ScaledSingleValuedNonLinearOptimizer::ScaledCostFunctionType = ScaledSingleValuedCostFunction |
Definition at line 78 of file itkScaledSingleValuedNonLinearOptimizer.h.
using itk::ScaledSingleValuedNonLinearOptimizer::ScalesType = NonLinearOptimizer::ScalesType |
Definition at line 77 of file itkScaledSingleValuedNonLinearOptimizer.h.
using itk::StochasticPreconditionedGradientDescentOptimizer::Self = StochasticPreconditionedGradientDescentOptimizer |
Standard ITK.
Definition at line 62 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
using itk::StochasticPreconditionedGradientDescentOptimizer::Superclass = PreconditionedGradientDescentOptimizer |
Definition at line 63 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
Codes of stopping conditions The MinimumStepSize stopcondition never occurs, but may be implemented in inheriting classes.
Definition at line 92 of file itkPreconditionedGradientDescentOptimizer.h.
|
protected |
|
inlineprotectedvirtual |
Definition at line 126 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
|
virtual |
Sets a new LearningRate before calling the Superclass' implementation, and updates the current time.
Reimplemented from itk::PreconditionedGradientDescentOptimizer.
|
protectedvirtual |
Function to compute the parameter at time/iteration k.
|
virtual |
Run-time type information (and related methods).
Reimplemented from itk::PreconditionedGradientDescentOptimizer.
Reimplemented in elastix::PreconditionedGradientDescent< TElastix >, and itk::AdaptiveStochasticPreconditionedGradientDescentOptimizer.
|
virtual |
Get the current time. This equals the CurrentIteration in this base class but may be different in inheriting classes, such as the AccelerateGradientDescent.
|
virtual |
|
virtual |
|
virtual |
|
virtual |
itk::StochasticPreconditionedGradientDescentOptimizer::ITK_DISALLOW_COPY_AND_MOVE | ( | StochasticPreconditionedGradientDescentOptimizer | ) |
|
static |
Method for creation through the object factory.
|
virtual |
Set/Get the initial time. Should be >=0. This function is superfluous, since Param_A does effectively the same. However, in inheriting classes, like the AcceleratedGradientDescent the initial time may have a different function than Param_A. Default: 0.0
|
virtual |
Set/Get a.
|
virtual |
Set/Get A.
|
virtual |
Set/Get alpha.
|
virtual |
Set current time to 0 and call superclass' implementation.
Reimplemented from itk::PreconditionedGradientDescentOptimizer.
Reimplemented in elastix::PreconditionedGradientDescent< TElastix >.
|
protectedvirtual |
Function to update the current time This function just increments the CurrentTime by 1. Inheriting functions may implement something smarter, for example, dependent on the progress.
Reimplemented in itk::AdaptiveStochasticPreconditionedGradientDescentOptimizer.
|
protected |
The current time, which serves as input for Compute_a
Definition at line 141 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
|
private |
Settings
Definition at line 150 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
|
private |
Parameters, as described by Spall.
Definition at line 145 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
|
private |
Definition at line 146 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
|
private |
Definition at line 147 of file itkStochasticPreconditionedGradientDescentOptimizer.h.
Generated on 2023-01-13
for elastix by ![]() |
![]() |