001 /*
002 * Licensed to the Apache Software Foundation (ASF) under one or more
003 * contributor license agreements. See the NOTICE file distributed with
004 * this work for additional information regarding copyright ownership.
005 * The ASF licenses this file to You under the Apache License, Version 2.0
006 * (the "License"); you may not use this file except in compliance with
007 * the License. You may obtain a copy of the License at
008 *
009 * http://www.apache.org/licenses/LICENSE-2.0
010 *
011 * Unless required by applicable law or agreed to in writing, software
012 * distributed under the License is distributed on an "AS IS" BASIS,
013 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014 * See the License for the specific language governing permissions and
015 * limitations under the License.
016 */
017 package org.apache.commons.math3.optim.nonlinear.scalar;
018
019 import org.apache.commons.math3.analysis.MultivariateVectorFunction;
020 import org.apache.commons.math3.optim.ConvergenceChecker;
021 import org.apache.commons.math3.optim.OptimizationData;
022 import org.apache.commons.math3.optim.PointValuePair;
023 import org.apache.commons.math3.exception.TooManyEvaluationsException;
024
025 /**
026 * Base class for implementing optimizers for multivariate scalar
027 * differentiable functions.
028 * It contains boiler-plate code for dealing with gradient evaluation.
029 *
030 * @version $Id$
031 * @since 3.1
032 */
033 public abstract class GradientMultivariateOptimizer
034 extends MultivariateOptimizer {
035 /**
036 * Gradient of the objective function.
037 */
038 private MultivariateVectorFunction gradient;
039
040 /**
041 * @param checker Convergence checker.
042 */
043 protected GradientMultivariateOptimizer(ConvergenceChecker<PointValuePair> checker) {
044 super(checker);
045 }
046
047 /**
048 * Compute the gradient vector.
049 *
050 * @param params Point at which the gradient must be evaluated.
051 * @return the gradient at the specified point.
052 */
053 protected double[] computeObjectiveGradient(final double[] params) {
054 return gradient.value(params);
055 }
056
057 /**
058 * {@inheritDoc}
059 *
060 * @param optData Optimization data.
061 * The following data will be looked for:
062 * <ul>
063 * <li>{@link org.apache.commons.math3.optim.MaxEval}</li>
064 * <li>{@link org.apache.commons.math3.optim.InitialGuess}</li>
065 * <li>{@link org.apache.commons.math3.optim.SimpleBounds}</li>
066 * <li>{@link ObjectiveFunction}</li>
067 * <li>{@link GoalType}</li>
068 * <li>{@link ObjectiveFunctionGradient}</li>
069 * </ul>
070 * @return {@inheritDoc}
071 * @throws TooManyEvaluationsException if the maximal number of
072 * evaluations (of the objective function) is exceeded.
073 */
074 @Override
075 public PointValuePair optimize(OptimizationData... optData)
076 throws TooManyEvaluationsException {
077 // Retrieve settings.
078 parseOptimizationData(optData);
079 // Set up base class and perform computation.
080 return super.optimize(optData);
081 }
082
083 /**
084 * Scans the list of (required and optional) optimization data that
085 * characterize the problem.
086 *
087 * @param optData Optimization data.
088 * The following data will be looked for:
089 * <ul>
090 * <li>{@link ObjectiveFunctionGradient}</li>
091 * </ul>
092 */
093 private void parseOptimizationData(OptimizationData... optData) {
094 // The existing values (as set by the previous call) are reused if
095 // not provided in the argument list.
096 for (OptimizationData data : optData) {
097 if (data instanceof ObjectiveFunctionGradient) {
098 gradient = ((ObjectiveFunctionGradient) data).getObjectiveFunctionGradient();
099 // If more data must be parsed, this statement _must_ be
100 // changed to "continue".
101 break;
102 }
103 }
104 }
105 }