go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
itkAdaptiveStochasticGradientDescentOptimizer.h
Go to the documentation of this file.
1/*=========================================================================
2 *
3 * Copyright UMC Utrecht and contributors
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0.txt
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 *=========================================================================*/
18
19#ifndef itkAdaptiveStochasticGradientDescentOptimizer_h
20#define itkAdaptiveStochasticGradientDescentOptimizer_h
21
22#include "../StandardGradientDescent/itkStandardGradientDescentOptimizer.h"
23
24namespace itk
25{
26
73{
74public:
76
80
81 using Pointer = SmartPointer<Self>;
82 using ConstPointer = SmartPointer<const Self>;
83
85 itkNewMacro(Self);
86
89
91 using Superclass::MeasureType;
92 using Superclass::ParametersType;
93 using Superclass::DerivativeType;
94 using Superclass::CostFunctionType;
99
101 itkSetMacro(UseAdaptiveStepSizes, bool);
102 itkGetConstMacro(UseAdaptiveStepSizes, bool);
103
106 itkSetMacro(SigmoidMax, double);
107 itkGetConstMacro(SigmoidMax, double);
108
111 itkSetMacro(SigmoidMin, double);
112 itkGetConstMacro(SigmoidMin, double);
113
116 itkSetMacro(SigmoidScale, double);
117 itkGetConstMacro(SigmoidScale, double);
118
119protected:
122
130 void
132
134 DerivativeType m_PreviousGradient;
135
136private:
139 double m_SigmoidMax{ 1.0 };
140 double m_SigmoidMin{ -0.8 };
141 double m_SigmoidScale{ 1e-8 };
142};
143
144} // end namespace itk
145
146#endif // end #ifndef itkAdaptiveStochasticGradientDescentOptimizer_h
This class implements a gradient descent optimizer with adaptive gain.
~AdaptiveStochasticGradientDescentOptimizer() override=default
ITK_DISALLOW_COPY_AND_MOVE(AdaptiveStochasticGradientDescentOptimizer)
A cost function that applies a scaling to another cost function.
This class implements a gradient descent optimizer with a decaying gain.


Generated on 2023-01-13 for elastix by doxygen 1.9.6 elastix logo