18#ifndef itkPreconditionedASGDOptimizer_h
19#define itkPreconditionedASGDOptimizer_h
21#include "../StandardGradientDescent/itkStandardGradientDescentOptimizer.h"
88 using Superclass::MeasureType;
89 using Superclass::ParametersType;
90 using Superclass::DerivativeType;
91 using Superclass::CostFunctionType;
98 itkSetMacro(UseAdaptiveStepSizes,
bool);
99 itkGetConstMacro(UseAdaptiveStepSizes,
bool);
103 itkSetMacro(SigmoidMax,
double);
104 itkGetConstMacro(SigmoidMax,
double);
108 itkSetMacro(SigmoidMin,
double);
109 itkGetConstMacro(SigmoidMin,
double);
113 itkSetMacro(SigmoidScale,
double);
114 itkGetConstMacro(SigmoidScale,
double);
117 itkGetConstReferenceMacro(PreconditionVector, ParametersType);
This class implements a gradient descent optimizer with adaptive gain.
void UpdateCurrentTime() override
~PreconditionedASGDOptimizer() override=default
std::string m_StepSizeStrategy
SmartPointer< const Self > ConstPointer
ParametersType m_PreconditionVector
PreconditionedASGDOptimizer()
DerivativeType m_PreviousSearchDirection
ITK_DISALLOW_COPY_AND_MOVE(PreconditionedASGDOptimizer)
bool m_UseAdaptiveStepSizes
A cost function that applies a scaling to another cost function.
This class implements a gradient descent optimizer with a decaying gain.
StandardGradientDescentOptimizer()
NonLinearOptimizer::ScalesType ScalesType