19#ifndef itkStandardStochasticGradientOptimizer_h
20#define itkStandardStochasticGradientOptimizer_h
82 using Superclass::MeasureType;
83 using Superclass::ParametersType;
84 using Superclass::DerivativeType;
85 using Superclass::CostFunctionType;
92 itkSetMacro(Param_a,
double);
93 itkGetConstMacro(Param_a,
double);
96 itkSetMacro(Param_beta,
double);
97 itkGetConstMacro(Param_beta,
double);
100 itkSetMacro(Param_A,
double);
101 itkGetConstMacro(Param_A,
double);
104 itkSetMacro(Param_alpha,
double);
105 itkGetConstMacro(Param_alpha,
double);
121 itkSetMacro(InitialTime,
double);
122 itkGetConstMacro(InitialTime,
double);
126 itkGetConstMacro(CurrentTime,
double);
A cost function that applies a scaling to another cost function.
This class implements a gradient descent optimizer with a decaying gain.
virtual double Compute_beta(double k) const
SmartPointer< const Self > ConstPointer
virtual double Compute_a(double k) const
ITK_DISALLOW_COPY_AND_MOVE(StandardStochasticGradientOptimizer)
StandardStochasticGradientOptimizer()
~StandardStochasticGradientOptimizer() override=default
virtual void ResetCurrentTimeToInitialTime()
virtual void UpdateCurrentTime()
void StartOptimization() override
void AdvanceOneStep() override
Implement a gradient descent optimizer.
StochasticGradientDescentOptimizer()
NonLinearOptimizer::ScalesType ScalesType