18#ifndef itkAdaptiveStochasticLBFGSOptimizer_h
19#define itkAdaptiveStochasticLBFGSOptimizer_h
21#include "../StandardStochasticGradientDescent/itkStandardStochasticGradientDescentOptimizer.h"
87 using Superclass::MeasureType;
88 using Superclass::ParametersType;
89 using Superclass::DerivativeType;
90 using Superclass::CostFunctionType;
97 itkSetMacro(UseAdaptiveStepSizes,
bool);
98 itkGetConstMacro(UseAdaptiveStepSizes,
bool);
101 itkSetMacro(UseSearchDirForAdaptiveStepSize,
bool);
102 itkGetConstMacro(UseSearchDirForAdaptiveStepSize,
bool);
106 itkSetMacro(SigmoidMax,
double);
107 itkGetConstMacro(SigmoidMax,
double);
111 itkSetMacro(SigmoidMin,
double);
112 itkGetConstMacro(SigmoidMin,
double);
116 itkSetMacro(SigmoidScale,
double);
117 itkGetConstMacro(SigmoidScale,
double);
std::string m_StepSizeStrategy
SmartPointer< const Self > ConstPointer
bool m_UseAdaptiveStepSizes
unsigned long m_UpdateFrequenceL
void UpdateCurrentTime() override
~AdaptiveStochasticLBFGSOptimizer() override=default
AdaptiveStochasticLBFGSOptimizer()
bool m_UseSearchDirForAdaptiveStepSize
double m_SearchLengthScale
ITK_DISALLOW_COPY_AND_MOVE(AdaptiveStochasticLBFGSOptimizer)
A cost function that applies a scaling to another cost function.
NonLinearOptimizer::ScalesType ScalesType
This class implements a gradient descent optimizer with a decaying gain.
StandardStochasticGradientOptimizer()