summaryrefslogtreecommitdiff
path: root/src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java
diff options
context:
space:
mode:
authorKarl Shaffer <karlshaffer@google.com>2023-08-10 22:35:48 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2023-08-10 22:35:48 +0000
commit5484895ffd3d0c8337d159667cafc127c459f677 (patch)
treeace24ba4307d4978ee3134f7da671a77ad172da0 /src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java
parentbbf9548f049f99fd8e5a593baae983532dd983f4 (diff)
parentb3715644fba79ef08acd9a2e157d078865281767 (diff)
downloadapache-commons-math-5484895ffd3d0c8337d159667cafc127c459f677.tar.gz
Check-in commons-math 3.6.1 am: 1354beaf45 am: 0018f64b87 am: b3715644fb
Original change: https://android-review.googlesource.com/c/platform/external/apache-commons-math/+/2702413 Change-Id: I5ad9b2a0822d668b5b6a62933c6d4c1f0b802001 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
Diffstat (limited to 'src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java')
-rw-r--r--src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java102
1 files changed, 102 insertions, 0 deletions
diff --git a/src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java b/src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java
new file mode 100644
index 0000000..38a8bf7
--- /dev/null
+++ b/src/main/java/org/apache/commons/math3/optim/nonlinear/scalar/GradientMultivariateOptimizer.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.commons.math3.optim.nonlinear.scalar;
+
+import org.apache.commons.math3.analysis.MultivariateVectorFunction;
+import org.apache.commons.math3.optim.ConvergenceChecker;
+import org.apache.commons.math3.optim.OptimizationData;
+import org.apache.commons.math3.optim.PointValuePair;
+import org.apache.commons.math3.exception.TooManyEvaluationsException;
+
+/**
+ * Base class for implementing optimizers for multivariate scalar
+ * differentiable functions.
+ * It contains boiler-plate code for dealing with gradient evaluation.
+ *
+ * @since 3.1
+ */
+public abstract class GradientMultivariateOptimizer
+ extends MultivariateOptimizer {
+ /**
+ * Gradient of the objective function.
+ */
+ private MultivariateVectorFunction gradient;
+
+ /**
+ * @param checker Convergence checker.
+ */
+ protected GradientMultivariateOptimizer(ConvergenceChecker<PointValuePair> checker) {
+ super(checker);
+ }
+
+ /**
+ * Compute the gradient vector.
+ *
+ * @param params Point at which the gradient must be evaluated.
+ * @return the gradient at the specified point.
+ */
+ protected double[] computeObjectiveGradient(final double[] params) {
+ return gradient.value(params);
+ }
+
+ /**
+ * {@inheritDoc}
+ *
+ * @param optData Optimization data. In addition to those documented in
+ * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
+ * MultivariateOptimizer}, this method will register the following data:
+ * <ul>
+ * <li>{@link ObjectiveFunctionGradient}</li>
+ * </ul>
+ * @return {@inheritDoc}
+ * @throws TooManyEvaluationsException if the maximal number of
+ * evaluations (of the objective function) is exceeded.
+ */
+ @Override
+ public PointValuePair optimize(OptimizationData... optData)
+ throws TooManyEvaluationsException {
+ // Set up base class and perform computation.
+ return super.optimize(optData);
+ }
+
+ /**
+ * Scans the list of (required and optional) optimization data that
+ * characterize the problem.
+ *
+ * @param optData Optimization data.
+ * The following data will be looked for:
+ * <ul>
+ * <li>{@link ObjectiveFunctionGradient}</li>
+ * </ul>
+ */
+ @Override
+ protected void parseOptimizationData(OptimizationData... optData) {
+ // Allow base class to register its own data.
+ super.parseOptimizationData(optData);
+
+ // The existing values (as set by the previous call) are reused if
+ // not provided in the argument list.
+ for (OptimizationData data : optData) {
+ if (data instanceof ObjectiveFunctionGradient) {
+ gradient = ((ObjectiveFunctionGradient) data).getObjectiveFunctionGradient();
+ // If more data must be parsed, this statement _must_ be
+ // changed to "continue".
+ break;
+ }
+ }
+ }
+}