blob: 38a8bf71ee4c106c2863d3e6daec166eaff12447 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.optim.nonlinear.scalar;
import org.apache.commons.math3.analysis.MultivariateVectorFunction;
import org.apache.commons.math3.optim.ConvergenceChecker;
import org.apache.commons.math3.optim.OptimizationData;
import org.apache.commons.math3.optim.PointValuePair;
import org.apache.commons.math3.exception.TooManyEvaluationsException;
/**
* Base class for implementing optimizers for multivariate scalar
* differentiable functions.
* It contains boiler-plate code for dealing with gradient evaluation.
*
* @since 3.1
*/
public abstract class GradientMultivariateOptimizer
extends MultivariateOptimizer {
/**
* Gradient of the objective function.
*/
private MultivariateVectorFunction gradient;
/**
* @param checker Convergence checker.
*/
protected GradientMultivariateOptimizer(ConvergenceChecker<PointValuePair> checker) {
super(checker);
}
/**
* Compute the gradient vector.
*
* @param params Point at which the gradient must be evaluated.
* @return the gradient at the specified point.
*/
protected double[] computeObjectiveGradient(final double[] params) {
return gradient.value(params);
}
/**
* {@inheritDoc}
*
* @param optData Optimization data. In addition to those documented in
* {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
* MultivariateOptimizer}, this method will register the following data:
* <ul>
* <li>{@link ObjectiveFunctionGradient}</li>
* </ul>
* @return {@inheritDoc}
* @throws TooManyEvaluationsException if the maximal number of
* evaluations (of the objective function) is exceeded.
*/
@Override
public PointValuePair optimize(OptimizationData... optData)
throws TooManyEvaluationsException {
// Set up base class and perform computation.
return super.optimize(optData);
}
/**
* Scans the list of (required and optional) optimization data that
* characterize the problem.
*
* @param optData Optimization data.
* The following data will be looked for:
* <ul>
* <li>{@link ObjectiveFunctionGradient}</li>
* </ul>
*/
@Override
protected void parseOptimizationData(OptimizationData... optData) {
// Allow base class to register its own data.
super.parseOptimizationData(optData);
// The existing values (as set by the previous call) are reused if
// not provided in the argument list.
for (OptimizationData data : optData) {
if (data instanceof ObjectiveFunctionGradient) {
gradient = ((ObjectiveFunctionGradient) data).getObjectiveFunctionGradient();
// If more data must be parsed, this statement _must_ be
// changed to "continue".
break;
}
}
}
}
|