go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
itkAdaptiveStochasticLBFGSOptimizer.h
Go to the documentation of this file.
1/*=========================================================================
2 *
3 * Copyright UMC Utrecht and contributors
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0.txt
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 *=========================================================================*/
18#ifndef itkAdaptiveStochasticLBFGSOptimizer_h
19#define itkAdaptiveStochasticLBFGSOptimizer_h
20
21#include "../StandardStochasticGradientDescent/itkStandardStochasticGradientDescentOptimizer.h"
22
23namespace itk
24{
70{
71public:
73
77 using Pointer = SmartPointer<Self>;
78 using ConstPointer = SmartPointer<const Self>;
79
81 itkNewMacro(Self);
82
85
87 using Superclass::MeasureType;
88 using Superclass::ParametersType;
89 using Superclass::DerivativeType;
90 using Superclass::CostFunctionType;
95
97 itkSetMacro(UseAdaptiveStepSizes, bool);
98 itkGetConstMacro(UseAdaptiveStepSizes, bool);
99
101 itkSetMacro(UseSearchDirForAdaptiveStepSize, bool);
102 itkGetConstMacro(UseSearchDirForAdaptiveStepSize, bool);
103
106 itkSetMacro(SigmoidMax, double);
107 itkGetConstMacro(SigmoidMax, double);
108
111 itkSetMacro(SigmoidMin, double);
112 itkGetConstMacro(SigmoidMin, double);
113
116 itkSetMacro(SigmoidScale, double);
117 itkGetConstMacro(SigmoidScale, double);
118
119protected:
122
130 void
132
134 // m_previousGradient m_PrePreviousGradient are not used, where should I put them?
135 // DerivativeType m_previousGradient;
136 // DerivativeType m_PrePreviousGradient;
137 unsigned long m_UpdateFrequenceL;
142
143private:
146 double m_SigmoidMax{ 1.0 };
147 double m_SigmoidMin{ -0.8 };
148 double m_SigmoidScale{ 1e-8 };
149
150}; // end class AdaptiveStochasticLBFGSOptimizer
151
152} // end namespace itk
153
154#endif
~AdaptiveStochasticLBFGSOptimizer() override=default
ITK_DISALLOW_COPY_AND_MOVE(AdaptiveStochasticLBFGSOptimizer)
A cost function that applies a scaling to another cost function.
This class implements a gradient descent optimizer with a decaying gain.


Generated on 2023-01-13 for elastix by doxygen 1.9.6 elastix logo