go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
Data Structures | Public Types | Public Member Functions | Static Public Member Functions | Protected Types | Protected Member Functions | Protected Attributes | Private Member Functions | Static Private Member Functions | Private Attributes
itk::StochasticVarianceReducedGradientDescentOptimizer Class Reference

#include <itkStochasticVarianceReducedGradientDescentOptimizer.h>

Detailed Description

Implement a gradient descent optimizer.

StochasticVarianceReducedGradientDescentOptimizer implements a simple gradient descent optimizer. At each iteration the current position is updated according to

\[
       p_{n+1} = p_n
               + \mbox{learningRate} \, \frac{\partial f(p_n) }{\partial p_n}
\]

The learning rate is a fixed scalar defined via SetLearningRate(). The optimizer steps through a user defined number of iterations; no convergence checking is done.

Additionally, user can scale each component of the $\partial f / \partial p$ but setting a scaling vector using method SetScale().

The difference of this class with the itk::GradientDescentOptimizer is that it's based on the ScaledSingleValuedNonLinearOptimizer

See also
ScaledSingleValuedNonLinearOptimizer

Definition at line 52 of file itkStochasticVarianceReducedGradientDescentOptimizer.h.

Inheritance diagram for itk::StochasticVarianceReducedGradientDescentOptimizer:
Inheritance graph
[legend]

Data Structures

struct  MultiThreaderParameterType
 

Public Types

using ConstPointer = SmartPointer< const Self >
 
using Pointer = SmartPointer< Self >
 
using ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer
 
using ScaledCostFunctionType = ScaledSingleValuedCostFunction
 
using ScalesType = NonLinearOptimizer::ScalesType
 
using Self = StochasticVarianceReducedGradientDescentOptimizer
 
enum  StopConditionType {
  MaximumNumberOfIterations , MetricError , MinimumStepSize , InvalidDiagonalMatrix ,
  GradientMagnitudeTolerance , LineSearchError
}
 
using Superclass = ScaledSingleValuedNonLinearOptimizer
 
- Public Types inherited from itk::ScaledSingleValuedNonLinearOptimizer
using ConstPointer = SmartPointer< const Self >
 
using Pointer = SmartPointer< Self >
 
using ScaledCostFunctionPointer = ScaledCostFunctionType::Pointer
 
using ScaledCostFunctionType = ScaledSingleValuedCostFunction
 
using ScalesType = NonLinearOptimizer::ScalesType
 
using Self = ScaledSingleValuedNonLinearOptimizer
 
using Superclass = SingleValuedNonLinearOptimizer
 

Public Member Functions

virtual void AdvanceOneStep ()
 
virtual const char * GetClassName () const
 
virtual unsigned int GetCurrentInnerIteration () const
 
virtual unsigned int GetCurrentIteration () const
 
virtual const DerivativeType & GetGradient ()
 
virtual unsigned int GetLBFGSMemory () const
 
virtual const doubleGetLearningRate ()
 
virtual const unsigned long & GetNumberOfInnerIterations ()
 
virtual const unsigned long & GetNumberOfIterations ()
 
virtual const DerivativeType & GetPreviousGradient ()
 
virtual const ParametersType & GetPreviousPosition ()
 
virtual const DerivativeType & GetSearchDir ()
 
virtual const StopConditionTypeGetStopCondition ()
 
virtual const doubleGetValue ()
 
 ITK_DISALLOW_COPY_AND_MOVE (StochasticVarianceReducedGradientDescentOptimizer)
 
virtual void MetricErrorResponse (ExceptionObject &err)
 
virtual void ResumeOptimization ()
 
virtual void SetLearningRate (double _arg)
 
virtual void SetNumberOfIterations (unsigned long _arg)
 
void SetNumberOfWorkUnits (ThreadIdType numberOfThreads)
 
virtual void SetPreviousGradient (DerivativeType _arg)
 
virtual void SetPreviousPosition (ParametersType _arg)
 
virtual void SetUseEigen (bool _arg)
 
virtual void SetUseMultiThread (bool _arg)
 
virtual void SetUseOpenMP (bool _arg)
 
void StartOptimization () override
 
virtual void StopOptimization ()
 
- Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual const char * GetClassName () const
 
const ParametersType & GetCurrentPosition () const override
 
virtual bool GetMaximize () const
 
virtual const ScaledCostFunctionTypeGetScaledCostFunction ()
 
virtual const ParametersType & GetScaledCurrentPosition ()
 
bool GetUseScales () const
 
virtual void InitializeScales ()
 
 ITK_DISALLOW_COPY_AND_MOVE (ScaledSingleValuedNonLinearOptimizer)
 
virtual void MaximizeOff ()
 
virtual void MaximizeOn ()
 
void SetCostFunction (CostFunctionType *costFunction) override
 
virtual void SetMaximize (bool _arg)
 
virtual void SetUseScales (bool arg)
 

Static Public Member Functions

static Pointer New ()
 
- Static Public Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
static Pointer New ()
 

Protected Types

using ThreaderType = itk::PlatformMultiThreader
 
using ThreadInfoType = ThreaderType::WorkUnitInfo
 

Protected Member Functions

void PrintSelf (std::ostream &os, Indent indent) const override
 
 StochasticVarianceReducedGradientDescentOptimizer ()
 
 ~StochasticVarianceReducedGradientDescentOptimizer () override=default
 
- Protected Member Functions inherited from itk::ScaledSingleValuedNonLinearOptimizer
virtual void GetScaledDerivative (const ParametersType &parameters, DerivativeType &derivative) const
 
virtual MeasureType GetScaledValue (const ParametersType &parameters) const
 
virtual void GetScaledValueAndDerivative (const ParametersType &parameters, MeasureType &value, DerivativeType &derivative) const
 
void PrintSelf (std::ostream &os, Indent indent) const override
 
 ScaledSingleValuedNonLinearOptimizer ()
 
void SetCurrentPosition (const ParametersType &param) override
 
virtual void SetScaledCurrentPosition (const ParametersType &parameters)
 
 ~ScaledSingleValuedNonLinearOptimizer () override=default
 

Protected Attributes

unsigned long m_CurrentInnerIteration
 
unsigned long m_CurrentIteration { 0 }
 
DerivativeType m_Gradient
 
unsigned long m_LBFGSMemory { 0 }
 
double m_LearningRate { 1.0 }
 
ParametersType m_MeanSearchDir
 
unsigned long m_NumberOfInnerIterations
 
unsigned long m_NumberOfIterations { 100 }
 
DerivativeType m_PreviousGradient
 
ParametersType m_PreviousPosition
 
ParametersType m_PreviousSearchDir
 
ParametersType m_SearchDir
 
bool m_Stop { false }
 
StopConditionType m_StopCondition { MaximumNumberOfIterations }
 
ThreaderType::Pointer m_Threader { ThreaderType::New() }
 
double m_Value { 0.0 }
 
- Protected Attributes inherited from itk::ScaledSingleValuedNonLinearOptimizer
ScaledCostFunctionPointer m_ScaledCostFunction
 
ParametersType m_ScaledCurrentPosition
 

Private Member Functions

void ThreadedAdvanceOneStep (ThreadIdType threadId, ParametersType &newPosition)
 

Static Private Member Functions

static ITK_THREAD_RETURN_FUNCTION_CALL_CONVENTION AdvanceOneStepThreaderCallback (void *arg)
 

Private Attributes

bool m_UseEigen { false }
 
bool m_UseMultiThread { false }
 
bool m_UseOpenMP { false }
 

Member Typedef Documentation

◆ ConstPointer

◆ Pointer

◆ ScaledCostFunctionPointer

Definition at line 87 of file itkScaledSingleValuedNonLinearOptimizer.h.

◆ ScaledCostFunctionType

Definition at line 86 of file itkScaledSingleValuedNonLinearOptimizer.h.

◆ ScalesType

using itk::ScaledSingleValuedNonLinearOptimizer::ScalesType = NonLinearOptimizer::ScalesType

Definition at line 85 of file itkScaledSingleValuedNonLinearOptimizer.h.

◆ Self

Standard class typedefs.

Definition at line 58 of file itkStochasticVarianceReducedGradientDescentOptimizer.h.

◆ Superclass

◆ ThreaderType

using itk::StochasticVarianceReducedGradientDescentOptimizer::ThreaderType = itk::PlatformMultiThreader
protected

Typedefs for multi-threading.

Definition at line 180 of file itkStochasticVarianceReducedGradientDescentOptimizer.h.

◆ ThreadInfoType

using itk::StochasticVarianceReducedGradientDescentOptimizer::ThreadInfoType = ThreaderType::WorkUnitInfo
protected

Member Enumeration Documentation

◆ StopConditionType

Codes of stopping conditions The MinimumStepSize stop condition never occurs, but may be implemented in inheriting classes

Enumerator
MaximumNumberOfIterations 
MetricError 
MinimumStepSize 
InvalidDiagonalMatrix 
GradientMagnitudeTolerance 
LineSearchError 

Definition at line 81 of file itkStochasticVarianceReducedGradientDescentOptimizer.h.

Constructor & Destructor Documentation

◆ StochasticVarianceReducedGradientDescentOptimizer()

itk::StochasticVarianceReducedGradientDescentOptimizer::StochasticVarianceReducedGradientDescentOptimizer ( )
protected

◆ ~StochasticVarianceReducedGradientDescentOptimizer()

itk::StochasticVarianceReducedGradientDescentOptimizer::~StochasticVarianceReducedGradientDescentOptimizer ( )
overrideprotecteddefault

Member Function Documentation

◆ AdvanceOneStep()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::AdvanceOneStep ( )
virtual

◆ AdvanceOneStepThreaderCallback()

static ITK_THREAD_RETURN_FUNCTION_CALL_CONVENTION itk::StochasticVarianceReducedGradientDescentOptimizer::AdvanceOneStepThreaderCallback ( void *  arg)
staticprivate

The callback function.

◆ GetClassName()

virtual const char * itk::StochasticVarianceReducedGradientDescentOptimizer::GetClassName ( ) const
virtual

◆ GetCurrentInnerIteration()

virtual unsigned int itk::StochasticVarianceReducedGradientDescentOptimizer::GetCurrentInnerIteration ( ) const
virtual

Get the current inner iteration number.

◆ GetCurrentIteration()

virtual unsigned int itk::StochasticVarianceReducedGradientDescentOptimizer::GetCurrentIteration ( ) const
virtual

Get the current iteration number.

◆ GetGradient()

virtual const DerivativeType & itk::StochasticVarianceReducedGradientDescentOptimizer::GetGradient ( )
virtual

Get current gradient.

◆ GetLBFGSMemory()

virtual unsigned int itk::StochasticVarianceReducedGradientDescentOptimizer::GetLBFGSMemory ( ) const
virtual

Get the inner LBFGSMemory.

◆ GetLearningRate()

virtual const double & itk::StochasticVarianceReducedGradientDescentOptimizer::GetLearningRate ( )
virtual

Get the learning rate.

◆ GetNumberOfInnerIterations()

virtual const unsigned long & itk::StochasticVarianceReducedGradientDescentOptimizer::GetNumberOfInnerIterations ( )
virtual

Get the number of inner loop iterations.

◆ GetNumberOfIterations()

virtual const unsigned long & itk::StochasticVarianceReducedGradientDescentOptimizer::GetNumberOfIterations ( )
virtual

Get the number of iterations.

◆ GetPreviousGradient()

virtual const DerivativeType & itk::StochasticVarianceReducedGradientDescentOptimizer::GetPreviousGradient ( )
virtual

Get the Previous gradient.

◆ GetPreviousPosition()

virtual const ParametersType & itk::StochasticVarianceReducedGradientDescentOptimizer::GetPreviousPosition ( )
virtual

Get the Previous Position.

◆ GetSearchDir()

virtual const DerivativeType & itk::StochasticVarianceReducedGradientDescentOptimizer::GetSearchDir ( )
virtual

Get current search direction.

◆ GetStopCondition()

virtual const StopConditionType & itk::StochasticVarianceReducedGradientDescentOptimizer::GetStopCondition ( )
virtual

Get Stop condition.

◆ GetValue()

virtual const double & itk::StochasticVarianceReducedGradientDescentOptimizer::GetValue ( )
virtual

Get the current value.

◆ ITK_DISALLOW_COPY_AND_MOVE()

itk::StochasticVarianceReducedGradientDescentOptimizer::ITK_DISALLOW_COPY_AND_MOVE ( StochasticVarianceReducedGradientDescentOptimizer  )

◆ MetricErrorResponse()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::MetricErrorResponse ( ExceptionObject &  err)
virtual

Stop optimization and pass on exception.

◆ New()

static Pointer itk::StochasticVarianceReducedGradientDescentOptimizer::New ( )
static

Method for creation through the object factory.

◆ PrintSelf()

void itk::StochasticVarianceReducedGradientDescentOptimizer::PrintSelf ( std::ostream &  os,
Indent  indent 
) const
overrideprotected

◆ ResumeOptimization()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::ResumeOptimization ( )
virtual

Resume previously stopped optimization with current parameters

See also
StopOptimization.

Reimplemented in elastix::AdaptiveStochasticVarianceReducedGradient< TElastix >.

◆ SetLearningRate()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetLearningRate ( double  _arg)
virtual

Set the learning rate.

◆ SetNumberOfIterations()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetNumberOfIterations ( unsigned long  _arg)
virtual

Set the number of iterations.

◆ SetNumberOfWorkUnits()

void itk::StochasticVarianceReducedGradientDescentOptimizer::SetNumberOfWorkUnits ( ThreadIdType  numberOfThreads)
inline

Set the number of threads.

Definition at line 163 of file itkStochasticVarianceReducedGradientDescentOptimizer.h.

◆ SetPreviousGradient()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetPreviousGradient ( DerivativeType  _arg)
virtual

Set the Previous gradient.

◆ SetPreviousPosition()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetPreviousPosition ( ParametersType  _arg)
virtual

Set the Previous Position.

◆ SetUseEigen()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetUseEigen ( bool  _arg)
virtual

◆ SetUseMultiThread()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetUseMultiThread ( bool  _arg)
virtual

◆ SetUseOpenMP()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::SetUseOpenMP ( bool  _arg)
virtual

◆ StartOptimization()

void itk::StochasticVarianceReducedGradientDescentOptimizer::StartOptimization ( )
override

Start optimization.

◆ StopOptimization()

virtual void itk::StochasticVarianceReducedGradientDescentOptimizer::StopOptimization ( )
virtual

◆ ThreadedAdvanceOneStep()

void itk::StochasticVarianceReducedGradientDescentOptimizer::ThreadedAdvanceOneStep ( ThreadIdType  threadId,
ParametersType &  newPosition 
)
inlineprivate

The threaded implementation of AdvanceOneStep().

Field Documentation

◆ m_CurrentInnerIteration

unsigned long itk::StochasticVarianceReducedGradientDescentOptimizer::m_CurrentInnerIteration
protected

◆ m_CurrentIteration

unsigned long itk::StochasticVarianceReducedGradientDescentOptimizer::m_CurrentIteration { 0 }
protected

◆ m_Gradient

DerivativeType itk::StochasticVarianceReducedGradientDescentOptimizer::m_Gradient
protected

◆ m_LBFGSMemory

unsigned long itk::StochasticVarianceReducedGradientDescentOptimizer::m_LBFGSMemory { 0 }
protected

◆ m_LearningRate

double itk::StochasticVarianceReducedGradientDescentOptimizer::m_LearningRate { 1.0 }
protected

◆ m_MeanSearchDir

ParametersType itk::StochasticVarianceReducedGradientDescentOptimizer::m_MeanSearchDir
protected

◆ m_NumberOfInnerIterations

unsigned long itk::StochasticVarianceReducedGradientDescentOptimizer::m_NumberOfInnerIterations
protected

◆ m_NumberOfIterations

unsigned long itk::StochasticVarianceReducedGradientDescentOptimizer::m_NumberOfIterations { 100 }
protected

◆ m_PreviousGradient

DerivativeType itk::StochasticVarianceReducedGradientDescentOptimizer::m_PreviousGradient
protected

◆ m_PreviousPosition

ParametersType itk::StochasticVarianceReducedGradientDescentOptimizer::m_PreviousPosition
protected

◆ m_PreviousSearchDir

ParametersType itk::StochasticVarianceReducedGradientDescentOptimizer::m_PreviousSearchDir
protected

◆ m_SearchDir

ParametersType itk::StochasticVarianceReducedGradientDescentOptimizer::m_SearchDir
protected

◆ m_Stop

bool itk::StochasticVarianceReducedGradientDescentOptimizer::m_Stop { false }
protected

◆ m_StopCondition

StopConditionType itk::StochasticVarianceReducedGradientDescentOptimizer::m_StopCondition { MaximumNumberOfIterations }
protected

◆ m_Threader

ThreaderType::Pointer itk::StochasticVarianceReducedGradientDescentOptimizer::m_Threader { ThreaderType::New() }
protected

◆ m_UseEigen

bool itk::StochasticVarianceReducedGradientDescentOptimizer::m_UseEigen { false }
private

◆ m_UseMultiThread

bool itk::StochasticVarianceReducedGradientDescentOptimizer::m_UseMultiThread { false }
private

◆ m_UseOpenMP

bool itk::StochasticVarianceReducedGradientDescentOptimizer::m_UseOpenMP { false }
private

◆ m_Value

double itk::StochasticVarianceReducedGradientDescentOptimizer::m_Value { 0.0 }
protected


Generated on 2023-01-13 for elastix by doxygen 1.9.6 elastix logo