001    /*
002     * Licensed to the Apache Software Foundation (ASF) under one or more
003     * contributor license agreements.  See the NOTICE file distributed with
004     * this work for additional information regarding copyright ownership.
005     * The ASF licenses this file to You under the Apache License, Version 2.0
006     * (the "License"); you may not use this file except in compliance with
007     * the License.  You may obtain a copy of the License at
008     *
009     *      http://www.apache.org/licenses/LICENSE-2.0
010     *
011     * Unless required by applicable law or agreed to in writing, software
012     * distributed under the License is distributed on an "AS IS" BASIS,
013     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
014     * See the License for the specific language governing permissions and
015     * limitations under the License.
016     */
017    package org.apache.commons.math3.optim.nonlinear.scalar.noderiv;
018    
019    import java.util.Comparator;
020    import org.apache.commons.math3.analysis.MultivariateFunction;
021    import org.apache.commons.math3.exception.NullArgumentException;
022    import org.apache.commons.math3.optim.nonlinear.scalar.GoalType;
023    import org.apache.commons.math3.optim.ConvergenceChecker;
024    import org.apache.commons.math3.optim.PointValuePair;
025    import org.apache.commons.math3.optim.SimpleValueChecker;
026    import org.apache.commons.math3.optim.OptimizationData;
027    import org.apache.commons.math3.optim.nonlinear.scalar.MultivariateOptimizer;
028    
029    /**
030     * This class implements simplex-based direct search optimization.
031     *
032     * <p>
033     *  Direct search methods only use objective function values, they do
034     *  not need derivatives and don't either try to compute approximation
035     *  of the derivatives. According to a 1996 paper by Margaret H. Wright
036     *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
037     *  Search Methods: Once Scorned, Now Respectable</a>), they are used
038     *  when either the computation of the derivative is impossible (noisy
039     *  functions, unpredictable discontinuities) or difficult (complexity,
040     *  computation cost). In the first cases, rather than an optimum, a
041     *  <em>not too bad</em> point is desired. In the latter cases, an
042     *  optimum is desired but cannot be reasonably found. In all cases
043     *  direct search methods can be useful.
044     * </p>
045     * <p>
046     *  Simplex-based direct search methods are based on comparison of
047     *  the objective function values at the vertices of a simplex (which is a
048     *  set of n+1 points in dimension n) that is updated by the algorithms
049     *  steps.
050     * <p>
051     * <p>
052     *  The simplex update procedure ({@link NelderMeadSimplex} or
053     * {@link MultiDirectionalSimplex})  must be passed to the
054     * {@code optimize} method.
055     * </p>
056     * <p>
057     *  Each call to {@code optimize} will re-use the start configuration of
058     *  the current simplex and move it such that its first vertex is at the
059     *  provided start point of the optimization.
060     *  If the {@code optimize} method is called to solve a different problem
061     *  and the number of parameters change, the simplex must be re-initialized
062     *  to one with the appropriate dimensions.
063     * </p>
064     * <p>
065     *  Convergence is checked by providing the <em>worst</em> points of
066     *  previous and current simplex to the convergence checker, not the best
067     *  ones.
068     * </p>
069     * <p>
070     *  This simplex optimizer implementation does not directly support constrained
071     *  optimization with simple bounds; so, for such optimizations, either a more
072     *  dedicated algorithm must be used like
073     *  {@link CMAESOptimizer} or {@link BOBYQAOptimizer}, or the objective
074     *  function must be wrapped in an adapter like
075     *  {@link org.apache.commons.math3.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter
076     *  MultivariateFunctionMappingAdapter} or
077     *  {@link org.apache.commons.math3.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter
078     *  MultivariateFunctionPenaltyAdapter}.
079     * </p>
080     *
081     * @version $Id: SimplexOptimizer.java 1397759 2012-10-13 01:12:58Z erans $
082     * @since 3.0
083     */
084    public class SimplexOptimizer extends MultivariateOptimizer {
085        /** Simplex update rule. */
086        private AbstractSimplex simplex;
087    
088        /**
089         * @param checker Convergence checker.
090         */
091        public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
092            super(checker);
093        }
094    
095        /**
096         * @param rel Relative threshold.
097         * @param abs Absolute threshold.
098         */
099        public SimplexOptimizer(double rel, double abs) {
100            this(new SimpleValueChecker(rel, abs));
101        }
102    
103        /**
104         * {@inheritDoc}
105         *
106         * @param optData Optimization data.
107         * The following data will be looked for:
108         * <ul>
109         *  <li>{@link org.apache.commons.math3.optim.MaxEval}</li>
110         *  <li>{@link org.apache.commons.math3.optim.InitialGuess}</li>
111         *  <li>{@link org.apache.commons.math3.optim.SimpleBounds}</li>
112         *  <li>{@link AbstractSimplex}</li>
113         * </ul>
114         * @return {@inheritDoc}
115         */
116        @Override
117        public PointValuePair optimize(OptimizationData... optData) {
118            // Retrieve settings
119            parseOptimizationData(optData);
120            // Set up base class and perform computation.
121            return super.optimize(optData);
122        }
123    
124        /** {@inheritDoc} */
125        @Override
126        protected PointValuePair doOptimize() {
127            if (simplex == null) {
128                throw new NullArgumentException();
129            }
130    
131            // Indirect call to "computeObjectiveValue" in order to update the
132            // evaluations counter.
133            final MultivariateFunction evalFunc
134                = new MultivariateFunction() {
135                    public double value(double[] point) {
136                        return computeObjectiveValue(point);
137                    }
138                };
139    
140            final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
141            final Comparator<PointValuePair> comparator
142                = new Comparator<PointValuePair>() {
143                public int compare(final PointValuePair o1,
144                                   final PointValuePair o2) {
145                    final double v1 = o1.getValue();
146                    final double v2 = o2.getValue();
147                    return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
148                }
149            };
150    
151            // Initialize search.
152            simplex.build(getStartPoint());
153            simplex.evaluate(evalFunc, comparator);
154    
155            PointValuePair[] previous = null;
156            int iteration = 0;
157            final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
158            while (true) {
159                if (iteration > 0) {
160                    boolean converged = true;
161                    for (int i = 0; i < simplex.getSize(); i++) {
162                        PointValuePair prev = previous[i];
163                        converged = converged &&
164                            checker.converged(iteration, prev, simplex.getPoint(i));
165                    }
166                    if (converged) {
167                        // We have found an optimum.
168                        return simplex.getPoint(0);
169                    }
170                }
171    
172                // We still need to search.
173                previous = simplex.getPoints();
174                simplex.iterate(evalFunc, comparator);
175                ++iteration;
176            }
177        }
178    
179        /**
180         * Scans the list of (required and optional) optimization data that
181         * characterize the problem.
182         *
183         * @param optData Optimization data.
184         * The following data will be looked for:
185         * <ul>
186         *  <li>{@link AbstractSimplex}</li>
187         * </ul>
188         */
189        private void parseOptimizationData(OptimizationData... optData) {
190            // The existing values (as set by the previous call) are reused if
191            // not provided in the argument list.
192            for (OptimizationData data : optData) {
193                if (data instanceof AbstractSimplex) {
194                    simplex = (AbstractSimplex) data;
195                    // If more data must be parsed, this statement _must_ be
196                    // changed to "continue".
197                    break;
198                }
199            }
200        }
201    }