Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java (added) +++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java Tue Jul 30 15:04:22 2013 @@ -0,0 +1,370 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.commons.math3.fitting.leastsquares; + +import java.io.BufferedReader; +import java.io.IOException; +import java.util.ArrayList; +import org.apache.commons.math3.analysis.MultivariateVectorFunction; +import org.apache.commons.math3.analysis.MultivariateMatrixFunction; +import org.apache.commons.math3.util.MathArrays; + +/** + * This class gives access to the statistical reference datasets provided by the + * NIST (available + * <a href="http://www.itl.nist.gov/div898/strd/general/dataarchive.html">here</a>). + * Instances of this class can be created by invocation of the + * {@link StatisticalReferenceDatasetFactory}. + */ +public abstract class StatisticalReferenceDataset { + /** The name of this dataset. */ + private final String name; + /** The total number of observations (data points). */ + private final int numObservations; + /** The total number of parameters. */ + private final int numParameters; + /** The total number of starting points for the optimizations. */ + private final int numStartingPoints; + /** The values of the predictor. */ + private final double[] x; + /** The values of the response. */ + private final double[] y; + /** + * The starting values. {@code startingValues[j][i]} is the value of the + * {@code i}-th parameter in the {@code j}-th set of starting values. + */ + private final double[][] startingValues; + /** The certified values of the parameters. */ + private final double[] a; + /** The certified values of the standard deviation of the parameters. */ + private final double[] sigA; + /** The certified value of the residual sum of squares. */ + private double residualSumOfSquares; + /** The least-squares problem. */ + private final LeastSquaresProblem problem; + + /** + * Creates a new instance of this class from the specified data file. The + * file must follow the StRD format. + * + * @param in the data file + * @throws IOException if an I/O error occurs + */ + public StatisticalReferenceDataset(final BufferedReader in) + throws IOException { + + final ArrayList<String> lines = new ArrayList<String>(); + for (String line = in.readLine(); line != null; line = in.readLine()) { + lines.add(line); + } + int[] index = findLineNumbers("Data", lines); + if (index == null) { + throw new AssertionError("could not find line indices for data"); + } + this.numObservations = index[1] - index[0] + 1; + this.x = new double[this.numObservations]; + this.y = new double[this.numObservations]; + for (int i = 0; i < this.numObservations; i++) { + final String line = lines.get(index[0] + i - 1); + final String[] tokens = line.trim().split(" ++"); + // Data columns are in reverse order!!! + this.y[i] = Double.parseDouble(tokens[0]); + this.x[i] = Double.parseDouble(tokens[1]); + } + + index = findLineNumbers("Starting Values", lines); + if (index == null) { + throw new AssertionError( + "could not find line indices for starting values"); + } + this.numParameters = index[1] - index[0] + 1; + + double[][] start = null; + this.a = new double[numParameters]; + this.sigA = new double[numParameters]; + for (int i = 0; i < numParameters; i++) { + final String line = lines.get(index[0] + i - 1); + final String[] tokens = line.trim().split(" ++"); + if (start == null) { + start = new double[tokens.length - 4][numParameters]; + } + for (int j = 2; j < tokens.length - 2; j++) { + start[j - 2][i] = Double.parseDouble(tokens[j]); + } + this.a[i] = Double.parseDouble(tokens[tokens.length - 2]); + this.sigA[i] = Double.parseDouble(tokens[tokens.length - 1]); + } + if (start == null) { + throw new IOException("could not find starting values"); + } + this.numStartingPoints = start.length; + this.startingValues = start; + + double dummyDouble = Double.NaN; + String dummyString = null; + for (String line : lines) { + if (line.contains("Dataset Name:")) { + dummyString = line + .substring(line.indexOf("Dataset Name:") + 13, + line.indexOf("(")).trim(); + } + if (line.contains("Residual Sum of Squares")) { + final String[] tokens = line.split(" ++"); + dummyDouble = Double.parseDouble(tokens[4].trim()); + } + } + if (Double.isNaN(dummyDouble)) { + throw new IOException( + "could not find certified value of residual sum of squares"); + } + this.residualSumOfSquares = dummyDouble; + + if (dummyString == null) { + throw new IOException("could not find dataset name"); + } + this.name = dummyString; + + this.problem = new LeastSquaresProblem(); + } + + class LeastSquaresProblem { + public MultivariateVectorFunction getModelFunction() { + return new MultivariateVectorFunction() { + public double[] value(final double[] a) { + final int n = getNumObservations(); + final double[] yhat = new double[n]; + for (int i = 0; i < n; i++) { + yhat[i] = getModelValue(getX(i), a); + } + return yhat; + } + }; + } + + public MultivariateMatrixFunction getModelFunctionJacobian() { + return new MultivariateMatrixFunction() { + public double[][] value(final double[] a) + throws IllegalArgumentException { + final int n = getNumObservations(); + final double[][] j = new double[n][]; + for (int i = 0; i < n; i++) { + j[i] = getModelDerivatives(getX(i), a); + } + return j; + } + }; + } + } + + /** + * Returns the name of this dataset. + * + * @return the name of the dataset + */ + public String getName() { + return name; + } + + /** + * Returns the total number of observations (data points). + * + * @return the number of observations + */ + public int getNumObservations() { + return numObservations; + } + + /** + * Returns a copy of the data arrays. The data is laid out as follows <li> + * {@code data[0][i] = x[i]},</li> <li>{@code data[1][i] = y[i]},</li> + * + * @return the array of data points. + */ + public double[][] getData() { + return new double[][] { + MathArrays.copyOf(x), MathArrays.copyOf(y) + }; + } + + /** + * Returns the x-value of the {@code i}-th data point. + * + * @param i the index of the data point + * @return the x-value + */ + public double getX(final int i) { + return x[i]; + } + + /** + * Returns the y-value of the {@code i}-th data point. + * + * @param i the index of the data point + * @return the y-value + */ + public double getY(final int i) { + return y[i]; + } + + /** + * Returns the total number of parameters. + * + * @return the number of parameters + */ + public int getNumParameters() { + return numParameters; + } + + /** + * Returns the certified values of the paramters. + * + * @return the values of the parameters + */ + public double[] getParameters() { + return MathArrays.copyOf(a); + } + + /** + * Returns the certified value of the {@code i}-th parameter. + * + * @param i the index of the parameter + * @return the value of the parameter + */ + public double getParameter(final int i) { + return a[i]; + } + + /** + * Reurns the certified values of the standard deviations of the parameters. + * + * @return the standard deviations of the parameters + */ + public double[] getParametersStandardDeviations() { + return MathArrays.copyOf(sigA); + } + + /** + * Returns the certified value of the standard deviation of the {@code i}-th + * parameter. + * + * @param i the index of the parameter + * @return the standard deviation of the parameter + */ + public double getParameterStandardDeviation(final int i) { + return sigA[i]; + } + + /** + * Returns the certified value of the residual sum of squares. + * + * @return the residual sum of squares + */ + public double getResidualSumOfSquares() { + return residualSumOfSquares; + } + + /** + * Returns the total number of starting points (initial guesses for the + * optimization process). + * + * @return the number of starting points + */ + public int getNumStartingPoints() { + return numStartingPoints; + } + + /** + * Returns the {@code i}-th set of initial values of the parameters. + * + * @param i the index of the starting point + * @return the starting point + */ + public double[] getStartingPoint(final int i) { + return MathArrays.copyOf(startingValues[i]); + } + + /** + * Returns the least-squares problem corresponding to fitting the model to + * the specified data. + * + * @return the least-squares problem + */ + public LeastSquaresProblem getLeastSquaresProblem() { + return problem; + } + + /** + * Returns the value of the model for the specified values of the predictor + * variable and the parameters. + * + * @param x the predictor variable + * @param a the parameters + * @return the value of the model + */ + public abstract double getModelValue(final double x, final double[] a); + + /** + * Returns the values of the partial derivatives of the model with respect + * to the parameters. + * + * @param x the predictor variable + * @param a the parameters + * @return the partial derivatives + */ + public abstract double[] getModelDerivatives(final double x, + final double[] a); + + /** + * <p> + * Parses the specified text lines, and extracts the indices of the first + * and last lines of the data defined by the specified {@code key}. This key + * must be one of + * </p> + * <ul> + * <li>{@code "Starting Values"},</li> + * <li>{@code "Certified Values"},</li> + * <li>{@code "Data"}.</li> + * </ul> + * <p> + * In the NIST data files, the line indices are separated by the keywords + * {@code "lines"} and {@code "to"}. + * </p> + * + * @param lines the line of text to be parsed + * @return an array of two {@code int}s. First value is the index of the + * first line, second value is the index of the last line. + * {@code null} if the line could not be parsed. + */ + private static int[] findLineNumbers(final String key, + final Iterable<String> lines) { + for (String text : lines) { + boolean flag = text.contains(key) && text.contains("lines") && + text.contains("to") && text.contains(")"); + if (flag) { + final int[] numbers = new int[2]; + final String from = text.substring(text.indexOf("lines") + 5, + text.indexOf("to")); + numbers[0] = Integer.parseInt(from.trim()); + final String to = text.substring(text.indexOf("to") + 2, + text.indexOf(")")); + numbers[1] = Integer.parseInt(to.trim()); + return numbers; + } + } + return null; + } +}
Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java ------------------------------------------------------------------------------ svn:eol-style = native Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDataset.java ------------------------------------------------------------------------------ svn:keywords = Id Revision Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java (added) +++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java Tue Jul 30 15:04:22 2013 @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.commons.math3.fitting.leastsquares; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import org.apache.commons.math3.util.FastMath; + +/** + * A factory to create instances of {@link StatisticalReferenceDataset} from + * available resources. + */ +public class StatisticalReferenceDatasetFactory { + + private StatisticalReferenceDatasetFactory() { + // Do nothing + } + + /** + * Creates a new buffered reader from the specified resource name. + * + * @param name the name of the resource + * @return a buffered reader + * @throws IOException if an I/O error occured + */ + public static BufferedReader createBufferedReaderFromResource(final String name) + throws IOException { + final InputStream resourceAsStream; + resourceAsStream = StatisticalReferenceDatasetFactory.class + .getResourceAsStream(name); + if (resourceAsStream == null) { + throw new IOException("could not find resource " + name); + } + return new BufferedReader(new InputStreamReader(resourceAsStream)); + } + + public static StatisticalReferenceDataset createKirby2() + throws IOException { + final BufferedReader in = createBufferedReaderFromResource("Kirby2.dat"); + StatisticalReferenceDataset dataset = null; + try { + dataset = new StatisticalReferenceDataset(in) { + + @Override + public double getModelValue(final double x, final double[] a) { + final double p = a[0] + x * (a[1] + x * a[2]); + final double q = 1.0 + x * (a[3] + x * a[4]); + return p / q; + } + + @Override + public double[] getModelDerivatives(final double x, + final double[] a) { + final double[] dy = new double[5]; + final double p = a[0] + x * (a[1] + x * a[2]); + final double q = 1.0 + x * (a[3] + x * a[4]); + dy[0] = 1.0 / q; + dy[1] = x / q; + dy[2] = x * dy[1]; + dy[3] = -x * p / (q * q); + dy[4] = x * dy[3]; + return dy; + } + }; + } finally { + in.close(); + } + return dataset; + } + + public static StatisticalReferenceDataset createHahn1() + throws IOException { + final BufferedReader in = createBufferedReaderFromResource("Hahn1.dat"); + StatisticalReferenceDataset dataset = null; + try { + dataset = new StatisticalReferenceDataset(in) { + + @Override + public double getModelValue(final double x, final double[] a) { + final double p = a[0] + x * (a[1] + x * (a[2] + x * a[3])); + final double q = 1.0 + x * (a[4] + x * (a[5] + x * a[6])); + return p / q; + } + + @Override + public double[] getModelDerivatives(final double x, + final double[] a) { + final double[] dy = new double[7]; + final double p = a[0] + x * (a[1] + x * (a[2] + x * a[3])); + final double q = 1.0 + x * (a[4] + x * (a[5] + x * a[6])); + dy[0] = 1.0 / q; + dy[1] = x * dy[0]; + dy[2] = x * dy[1]; + dy[3] = x * dy[2]; + dy[4] = -x * p / (q * q); + dy[5] = x * dy[4]; + dy[6] = x * dy[5]; + return dy; + } + }; + } finally { + in.close(); + } + return dataset; + } + + public static StatisticalReferenceDataset createMGH17() + throws IOException { + final BufferedReader in = createBufferedReaderFromResource("MGH17.dat"); + StatisticalReferenceDataset dataset = null; + try { + dataset = new StatisticalReferenceDataset(in) { + + @Override + public double getModelValue(final double x, final double[] a) { + return a[0] + a[1] * FastMath.exp(-a[3] * x) + a[2] * + FastMath.exp(-a[4] * x); + } + + @Override + public double[] getModelDerivatives(final double x, + final double[] a) { + final double[] dy = new double[5]; + dy[0] = 1.0; + dy[1] = FastMath.exp(-x * a[3]); + dy[2] = FastMath.exp(-x * a[4]); + dy[3] = -x * a[1] * dy[1]; + dy[4] = -x * a[2] * dy[2]; + return dy; + } + }; + } finally { + in.close(); + } + return dataset; + } + + public static StatisticalReferenceDataset createLanczos1() + throws IOException { + final BufferedReader in = + createBufferedReaderFromResource("Lanczos1.dat"); + StatisticalReferenceDataset dataset = null; + try { + dataset = new StatisticalReferenceDataset(in) { + + @Override + public double getModelValue(final double x, final double[] a) { + System.out.println(a[0]+", "+a[1]+", "+a[2]+", "+a[3]+", "+a[4]+", "+a[5]); + return a[0] * FastMath.exp(-a[3] * x) + + a[1] * FastMath.exp(-a[4] * x) + + a[2] * FastMath.exp(-a[5] * x); + } + + @Override + public double[] getModelDerivatives(final double x, + final double[] a) { + final double[] dy = new double[6]; + dy[0] = FastMath.exp(-x * a[3]); + dy[1] = FastMath.exp(-x * a[4]); + dy[2] = FastMath.exp(-x * a[5]); + dy[3] = -x * a[0] * dy[0]; + dy[4] = -x * a[1] * dy[1]; + dy[5] = -x * a[2] * dy[2]; + return dy; + } + }; + } finally { + in.close(); + } + return dataset; + } + + /** + * Returns an array with all available reference datasets. + * + * @return the array of datasets + * @throws IOException if an I/O error occurs + */ + public StatisticalReferenceDataset[] createAll() + throws IOException { + return new StatisticalReferenceDataset[] { + createKirby2(), createMGH17() + }; + } +} Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java ------------------------------------------------------------------------------ svn:eol-style = native Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StatisticalReferenceDatasetFactory.java ------------------------------------------------------------------------------ svn:keywords = Id Revision Added: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java (added) +++ commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java Tue Jul 30 15:04:22 2013 @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.commons.math3.fitting.leastsquares; + +import java.util.ArrayList; +import org.apache.commons.math3.analysis.MultivariateVectorFunction; +import org.apache.commons.math3.analysis.MultivariateMatrixFunction; +import org.apache.commons.math3.analysis.UnivariateFunction; +import org.apache.commons.math3.stat.regression.SimpleRegression; + +/** + * Class that models a straight line defined as {@code y = a x + b}. + * The parameters of problem are: + * <ul> + * <li>{@code a}</li> + * <li>{@code b}</li> + * </ul> + * The model functions are: + * <ul> + * <li>for each pair (a, b), the y-coordinate of the line.</li> + * </ul> + */ +class StraightLineProblem { + /** Cloud of points assumed to be fitted by a straight line. */ + private final ArrayList<double[]> points; + /** Error (on the y-coordinate of the points). */ + private final double sigma; + + /** + * @param error Assumed error for the y-coordinate. + */ + public StraightLineProblem(double error) { + points = new ArrayList<double[]>(); + sigma = error; + } + + public void addPoint(double px, double py) { + points.add(new double[] { px, py }); + } + + /** + * @return the list of x-coordinates. + */ + public double[] x() { + final double[] v = new double[points.size()]; + for (int i = 0; i < points.size(); i++) { + final double[] p = points.get(i); + v[i] = p[0]; // x-coordinate. + } + + return v; + } + + /** + * @return the list of y-coordinates. + */ + public double[] y() { + final double[] v = new double[points.size()]; + for (int i = 0; i < points.size(); i++) { + final double[] p = points.get(i); + v[i] = p[1]; // y-coordinate. + } + + return v; + } + + public double[] target() { + return y(); + } + + public double[] weight() { + final double weight = 1 / (sigma * sigma); + final double[] w = new double[points.size()]; + for (int i = 0; i < points.size(); i++) { + w[i] = weight; + } + + return w; + } + + public MultivariateVectorFunction getModelFunction() { + return new MultivariateVectorFunction() { + public double[] value(double[] params) { + final Model line = new Model(params[0], params[1]); + + final double[] model = new double[points.size()]; + for (int i = 0; i < points.size(); i++) { + final double[] p = points.get(i); + model[i] = line.value(p[0]); + } + + return model; + } + }; + } + + public MultivariateMatrixFunction getModelFunctionJacobian() { + return new MultivariateMatrixFunction() { + public double[][] value(double[] point) { + return jacobian(point); + } + }; + } + + /** + * Directly solve the linear problem, using the {@link SimpleRegression} + * class. + */ + public double[] solve() { + final SimpleRegression regress = new SimpleRegression(true); + for (double[] d : points) { + regress.addData(d[0], d[1]); + } + + final double[] result = { regress.getSlope(), regress.getIntercept() }; + return result; + } + + private double[][] jacobian(double[] params) { + final double[][] jacobian = new double[points.size()][2]; + + for (int i = 0; i < points.size(); i++) { + final double[] p = points.get(i); + // Partial derivative wrt "a". + jacobian[i][0] = p[0]; + // Partial derivative wrt "b". + jacobian[i][1] = 1; + } + + return jacobian; + } + + /** + * Linear function. + */ + public static class Model implements UnivariateFunction { + final double a; + final double b; + + public Model(double a, + double b) { + this.a = a; + this.b = b; + } + + public double value(double x) { + return a * x + b; + } + } +} Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java ------------------------------------------------------------------------------ svn:eol-style = native Propchange: commons/proper/math/trunk/src/test/java/org/apache/commons/math3/fitting/leastsquares/StraightLineProblem.java ------------------------------------------------------------------------------ svn:keywords = Id Revision Added: commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Hahn1.dat URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Hahn1.dat?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Hahn1.dat (added) +++ commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Hahn1.dat Tue Jul 30 15:04:22 2013 @@ -0,0 +1,296 @@ +NIST/ITL StRD +Dataset Name: Hahn1 (Hahn1.dat) + +File Format: ASCII + Starting Values (lines 41 to 47) + Certified Values (lines 41 to 52) + Data (lines 61 to 296) + +Procedure: Nonlinear Least Squares Regression + +Description: These data are the result of a NIST study involving + the thermal expansion of copper. The response + variable is the coefficient of thermal expansion, and + the predictor variable is temperature in degrees + kelvin. + + +Reference: Hahn, T., NIST (197?). + Copper Thermal Expansion Study. + + + + + +Data: 1 Response (y = coefficient of thermal expansion) + 1 Predictor (x = temperature, degrees kelvin) + 236 Observations + Average Level of Difficulty + Observed Data + +Model: Rational Class (cubic/cubic) + 7 Parameters (b1 to b7) + + y = (b1+b2*x+b3*x**2+b4*x**3) / + (1+b5*x+b6*x**2+b7*x**3) + e + + + Starting values Certified Values + + Start 1 Start 2 Parameter Standard Deviation + b1 = 10 1 1.0776351733E+00 1.7070154742E-01 + b2 = -1 -0.1 -1.2269296921E-01 1.2000289189E-02 + b3 = 0.05 0.005 4.0863750610E-03 2.2508314937E-04 + b4 = -0.00001 -0.000001 -1.4262662514E-06 2.7578037666E-07 + b5 = -0.05 -0.005 -5.7609940901E-03 2.4712888219E-04 + b6 = 0.001 0.0001 2.4053735503E-04 1.0449373768E-05 + b7 = -0.000001 -0.0000001 -1.2314450199E-07 1.3027335327E-08 + +Residual Sum of Squares: 1.5324382854E+00 +Residual Standard Deviation: 8.1803852243E-02 +Degrees of Freedom: 229 +Number of Observations: 236 + + + + + + + +Data: y x + .591E0 24.41E0 + 1.547E0 34.82E0 + 2.902E0 44.09E0 + 2.894E0 45.07E0 + 4.703E0 54.98E0 + 6.307E0 65.51E0 + 7.03E0 70.53E0 + 7.898E0 75.70E0 + 9.470E0 89.57E0 + 9.484E0 91.14E0 + 10.072E0 96.40E0 + 10.163E0 97.19E0 + 11.615E0 114.26E0 + 12.005E0 120.25E0 + 12.478E0 127.08E0 + 12.982E0 133.55E0 + 12.970E0 133.61E0 + 13.926E0 158.67E0 + 14.452E0 172.74E0 + 14.404E0 171.31E0 + 15.190E0 202.14E0 + 15.550E0 220.55E0 + 15.528E0 221.05E0 + 15.499E0 221.39E0 + 16.131E0 250.99E0 + 16.438E0 268.99E0 + 16.387E0 271.80E0 + 16.549E0 271.97E0 + 16.872E0 321.31E0 + 16.830E0 321.69E0 + 16.926E0 330.14E0 + 16.907E0 333.03E0 + 16.966E0 333.47E0 + 17.060E0 340.77E0 + 17.122E0 345.65E0 + 17.311E0 373.11E0 + 17.355E0 373.79E0 + 17.668E0 411.82E0 + 17.767E0 419.51E0 + 17.803E0 421.59E0 + 17.765E0 422.02E0 + 17.768E0 422.47E0 + 17.736E0 422.61E0 + 17.858E0 441.75E0 + 17.877E0 447.41E0 + 17.912E0 448.7E0 + 18.046E0 472.89E0 + 18.085E0 476.69E0 + 18.291E0 522.47E0 + 18.357E0 522.62E0 + 18.426E0 524.43E0 + 18.584E0 546.75E0 + 18.610E0 549.53E0 + 18.870E0 575.29E0 + 18.795E0 576.00E0 + 19.111E0 625.55E0 + .367E0 20.15E0 + .796E0 28.78E0 + 0.892E0 29.57E0 + 1.903E0 37.41E0 + 2.150E0 39.12E0 + 3.697E0 50.24E0 + 5.870E0 61.38E0 + 6.421E0 66.25E0 + 7.422E0 73.42E0 + 9.944E0 95.52E0 + 11.023E0 107.32E0 + 11.87E0 122.04E0 + 12.786E0 134.03E0 + 14.067E0 163.19E0 + 13.974E0 163.48E0 + 14.462E0 175.70E0 + 14.464E0 179.86E0 + 15.381E0 211.27E0 + 15.483E0 217.78E0 + 15.59E0 219.14E0 + 16.075E0 262.52E0 + 16.347E0 268.01E0 + 16.181E0 268.62E0 + 16.915E0 336.25E0 + 17.003E0 337.23E0 + 16.978E0 339.33E0 + 17.756E0 427.38E0 + 17.808E0 428.58E0 + 17.868E0 432.68E0 + 18.481E0 528.99E0 + 18.486E0 531.08E0 + 19.090E0 628.34E0 + 16.062E0 253.24E0 + 16.337E0 273.13E0 + 16.345E0 273.66E0 + 16.388E0 282.10E0 + 17.159E0 346.62E0 + 17.116E0 347.19E0 + 17.164E0 348.78E0 + 17.123E0 351.18E0 + 17.979E0 450.10E0 + 17.974E0 450.35E0 + 18.007E0 451.92E0 + 17.993E0 455.56E0 + 18.523E0 552.22E0 + 18.669E0 553.56E0 + 18.617E0 555.74E0 + 19.371E0 652.59E0 + 19.330E0 656.20E0 + 0.080E0 14.13E0 + 0.248E0 20.41E0 + 1.089E0 31.30E0 + 1.418E0 33.84E0 + 2.278E0 39.70E0 + 3.624E0 48.83E0 + 4.574E0 54.50E0 + 5.556E0 60.41E0 + 7.267E0 72.77E0 + 7.695E0 75.25E0 + 9.136E0 86.84E0 + 9.959E0 94.88E0 + 9.957E0 96.40E0 + 11.600E0 117.37E0 + 13.138E0 139.08E0 + 13.564E0 147.73E0 + 13.871E0 158.63E0 + 13.994E0 161.84E0 + 14.947E0 192.11E0 + 15.473E0 206.76E0 + 15.379E0 209.07E0 + 15.455E0 213.32E0 + 15.908E0 226.44E0 + 16.114E0 237.12E0 + 17.071E0 330.90E0 + 17.135E0 358.72E0 + 17.282E0 370.77E0 + 17.368E0 372.72E0 + 17.483E0 396.24E0 + 17.764E0 416.59E0 + 18.185E0 484.02E0 + 18.271E0 495.47E0 + 18.236E0 514.78E0 + 18.237E0 515.65E0 + 18.523E0 519.47E0 + 18.627E0 544.47E0 + 18.665E0 560.11E0 + 19.086E0 620.77E0 + 0.214E0 18.97E0 + 0.943E0 28.93E0 + 1.429E0 33.91E0 + 2.241E0 40.03E0 + 2.951E0 44.66E0 + 3.782E0 49.87E0 + 4.757E0 55.16E0 + 5.602E0 60.90E0 + 7.169E0 72.08E0 + 8.920E0 85.15E0 + 10.055E0 97.06E0 + 12.035E0 119.63E0 + 12.861E0 133.27E0 + 13.436E0 143.84E0 + 14.167E0 161.91E0 + 14.755E0 180.67E0 + 15.168E0 198.44E0 + 15.651E0 226.86E0 + 15.746E0 229.65E0 + 16.216E0 258.27E0 + 16.445E0 273.77E0 + 16.965E0 339.15E0 + 17.121E0 350.13E0 + 17.206E0 362.75E0 + 17.250E0 371.03E0 + 17.339E0 393.32E0 + 17.793E0 448.53E0 + 18.123E0 473.78E0 + 18.49E0 511.12E0 + 18.566E0 524.70E0 + 18.645E0 548.75E0 + 18.706E0 551.64E0 + 18.924E0 574.02E0 + 19.1E0 623.86E0 + 0.375E0 21.46E0 + 0.471E0 24.33E0 + 1.504E0 33.43E0 + 2.204E0 39.22E0 + 2.813E0 44.18E0 + 4.765E0 55.02E0 + 9.835E0 94.33E0 + 10.040E0 96.44E0 + 11.946E0 118.82E0 + 12.596E0 128.48E0 + 13.303E0 141.94E0 + 13.922E0 156.92E0 + 14.440E0 171.65E0 + 14.951E0 190.00E0 + 15.627E0 223.26E0 + 15.639E0 223.88E0 + 15.814E0 231.50E0 + 16.315E0 265.05E0 + 16.334E0 269.44E0 + 16.430E0 271.78E0 + 16.423E0 273.46E0 + 17.024E0 334.61E0 + 17.009E0 339.79E0 + 17.165E0 349.52E0 + 17.134E0 358.18E0 + 17.349E0 377.98E0 + 17.576E0 394.77E0 + 17.848E0 429.66E0 + 18.090E0 468.22E0 + 18.276E0 487.27E0 + 18.404E0 519.54E0 + 18.519E0 523.03E0 + 19.133E0 612.99E0 + 19.074E0 638.59E0 + 19.239E0 641.36E0 + 19.280E0 622.05E0 + 19.101E0 631.50E0 + 19.398E0 663.97E0 + 19.252E0 646.9E0 + 19.89E0 748.29E0 + 20.007E0 749.21E0 + 19.929E0 750.14E0 + 19.268E0 647.04E0 + 19.324E0 646.89E0 + 20.049E0 746.9E0 + 20.107E0 748.43E0 + 20.062E0 747.35E0 + 20.065E0 749.27E0 + 19.286E0 647.61E0 + 19.972E0 747.78E0 + 20.088E0 750.51E0 + 20.743E0 851.37E0 + 20.83E0 845.97E0 + 20.935E0 847.54E0 + 21.035E0 849.93E0 + 20.93E0 851.61E0 + 21.074E0 849.75E0 + 21.085E0 850.98E0 + 20.935E0 848.23E0 Added: commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Kirby2.dat URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Kirby2.dat?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Kirby2.dat (added) +++ commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Kirby2.dat Tue Jul 30 15:04:22 2013 @@ -0,0 +1,211 @@ +NIST/ITL StRD +Dataset Name: Kirby2 (Kirby2.dat) + +File Format: ASCII + Starting Values (lines 41 to 45) + Certified Values (lines 41 to 50) + Data (lines 61 to 211) + +Procedure: Nonlinear Least Squares Regression + +Description: These data are the result of a NIST study involving + scanning electron microscope line with standards. + + +Reference: Kirby, R., NIST (197?). + Scanning electron microscope line width standards. + + + + + + + + +Data: 1 Response (y) + 1 Predictor (x) + 151 Observations + Average Level of Difficulty + Observed Data + +Model: Rational Class (quadratic/quadratic) + 5 Parameters (b1 to b5) + + y = (b1 + b2*x + b3*x**2) / + (1 + b4*x + b5*x**2) + e + + + Starting values Certified Values + + Start 1 Start 2 Parameter Standard Deviation + b1 = 2 1.5 1.6745063063E+00 8.7989634338E-02 + b2 = -0.1 -0.15 -1.3927397867E-01 4.1182041386E-03 + b3 = 0.003 0.0025 2.5961181191E-03 4.1856520458E-05 + b4 = -0.001 -0.0015 -1.7241811870E-03 5.8931897355E-05 + b5 = 0.00001 0.00002 2.1664802578E-05 2.0129761919E-07 + +Residual Sum of Squares: 3.9050739624E+00 +Residual Standard Deviation: 1.6354535131E-01 +Degrees of Freedom: 146 +Number of Observations: 151 + + + + + + + + + +Data: y x + 0.0082E0 9.65E0 + 0.0112E0 10.74E0 + 0.0149E0 11.81E0 + 0.0198E0 12.88E0 + 0.0248E0 14.06E0 + 0.0324E0 15.28E0 + 0.0420E0 16.63E0 + 0.0549E0 18.19E0 + 0.0719E0 19.88E0 + 0.0963E0 21.84E0 + 0.1291E0 24.00E0 + 0.1710E0 26.25E0 + 0.2314E0 28.86E0 + 0.3227E0 31.85E0 + 0.4809E0 35.79E0 + 0.7084E0 40.18E0 + 1.0220E0 44.74E0 + 1.4580E0 49.53E0 + 1.9520E0 53.94E0 + 2.5410E0 58.29E0 + 3.2230E0 62.63E0 + 3.9990E0 67.03E0 + 4.8520E0 71.25E0 + 5.7320E0 75.22E0 + 6.7270E0 79.33E0 + 7.8350E0 83.56E0 + 9.0250E0 87.75E0 + 10.2670E0 91.93E0 + 11.5780E0 96.10E0 + 12.9440E0 100.28E0 + 14.3770E0 104.46E0 + 15.8560E0 108.66E0 + 17.3310E0 112.71E0 + 18.8850E0 116.88E0 + 20.5750E0 121.33E0 + 22.3200E0 125.79E0 + 22.3030E0 125.79E0 + 23.4600E0 128.74E0 + 24.0600E0 130.27E0 + 25.2720E0 133.33E0 + 25.8530E0 134.79E0 + 27.1100E0 137.93E0 + 27.6580E0 139.33E0 + 28.9240E0 142.46E0 + 29.5110E0 143.90E0 + 30.7100E0 146.91E0 + 31.3500E0 148.51E0 + 32.5200E0 151.41E0 + 33.2300E0 153.17E0 + 34.3300E0 155.97E0 + 35.0600E0 157.76E0 + 36.1700E0 160.56E0 + 36.8400E0 162.30E0 + 38.0100E0 165.21E0 + 38.6700E0 166.90E0 + 39.8700E0 169.92E0 + 40.0300E0 170.32E0 + 40.5000E0 171.54E0 + 41.3700E0 173.79E0 + 41.6700E0 174.57E0 + 42.3100E0 176.25E0 + 42.7300E0 177.34E0 + 43.4600E0 179.19E0 + 44.1400E0 181.02E0 + 44.5500E0 182.08E0 + 45.2200E0 183.88E0 + 45.9200E0 185.75E0 + 46.3000E0 186.80E0 + 47.0000E0 188.63E0 + 47.6800E0 190.45E0 + 48.0600E0 191.48E0 + 48.7400E0 193.35E0 + 49.4100E0 195.22E0 + 49.7600E0 196.23E0 + 50.4300E0 198.05E0 + 51.1100E0 199.97E0 + 51.5000E0 201.06E0 + 52.1200E0 202.83E0 + 52.7600E0 204.69E0 + 53.1800E0 205.86E0 + 53.7800E0 207.58E0 + 54.4600E0 209.50E0 + 54.8300E0 210.65E0 + 55.4000E0 212.33E0 + 56.4300E0 215.43E0 + 57.0300E0 217.16E0 + 58.0000E0 220.21E0 + 58.6100E0 221.98E0 + 59.5800E0 225.06E0 + 60.1100E0 226.79E0 + 61.1000E0 229.92E0 + 61.6500E0 231.69E0 + 62.5900E0 234.77E0 + 63.1200E0 236.60E0 + 64.0300E0 239.63E0 + 64.6200E0 241.50E0 + 65.4900E0 244.48E0 + 66.0300E0 246.40E0 + 66.8900E0 249.35E0 + 67.4200E0 251.32E0 + 68.2300E0 254.22E0 + 68.7700E0 256.24E0 + 69.5900E0 259.11E0 + 70.1100E0 261.18E0 + 70.8600E0 264.02E0 + 71.4300E0 266.13E0 + 72.1600E0 268.94E0 + 72.7000E0 271.09E0 + 73.4000E0 273.87E0 + 73.9300E0 276.08E0 + 74.6000E0 278.83E0 + 75.1600E0 281.08E0 + 75.8200E0 283.81E0 + 76.3400E0 286.11E0 + 76.9800E0 288.81E0 + 77.4800E0 291.08E0 + 78.0800E0 293.75E0 + 78.6000E0 295.99E0 + 79.1700E0 298.64E0 + 79.6200E0 300.84E0 + 79.8800E0 302.02E0 + 80.1900E0 303.48E0 + 80.6600E0 305.65E0 + 81.2200E0 308.27E0 + 81.6600E0 310.41E0 + 82.1600E0 313.01E0 + 82.5900E0 315.12E0 + 83.1400E0 317.71E0 + 83.5000E0 319.79E0 + 84.0000E0 322.36E0 + 84.4000E0 324.42E0 + 84.8900E0 326.98E0 + 85.2600E0 329.01E0 + 85.7400E0 331.56E0 + 86.0700E0 333.56E0 + 86.5400E0 336.10E0 + 86.8900E0 338.08E0 + 87.3200E0 340.60E0 + 87.6500E0 342.57E0 + 88.1000E0 345.08E0 + 88.4300E0 347.02E0 + 88.8300E0 349.52E0 + 89.1200E0 351.44E0 + 89.5400E0 353.93E0 + 89.8500E0 355.83E0 + 90.2500E0 358.32E0 + 90.5500E0 360.20E0 + 90.9300E0 362.67E0 + 91.2000E0 364.53E0 + 91.5500E0 367.00E0 + 92.2000E0 371.30E0 Added: commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Lanczos1.dat URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Lanczos1.dat?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Lanczos1.dat (added) +++ commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/Lanczos1.dat Tue Jul 30 15:04:22 2013 @@ -0,0 +1,84 @@ +NIST/ITL StRD +Dataset Name: Lanczos1 (Lanczos1.dat) + +File Format: ASCII + Starting Values (lines 41 to 46) + Certified Values (lines 41 to 51) + Data (lines 61 to 84) + +Procedure: Nonlinear Least Squares Regression + +Description: These data are taken from an example discussed in + Lanczos (1956). The data were generated to 14-digits + of accuracy using + f(x) = 0.0951*exp(-x) + 0.8607*exp(-3*x) + + 1.5576*exp(-5*x). + + +Reference: Lanczos, C. (1956). + Applied Analysis. + Englewood Cliffs, NJ: Prentice Hall, pp. 272-280. + + + + +Data: 1 Response (y) + 1 Predictor (x) + 24 Observations + Average Level of Difficulty + Generated Data + +Model: Exponential Class + 6 Parameters (b1 to b6) + + y = b1*exp(-b2*x) + b3*exp(-b4*x) + b5*exp(-b6*x) + e + + + + Starting values Certified Values + + Start 1 Start 2 Parameter Standard Deviation + b1 = 1.2 0.5 9.5100000027E-02 5.3347304234E-11 + b2 = 0.3 0.7 1.0000000001E+00 2.7473038179E-10 + b3 = 5.6 3.6 8.6070000013E-01 1.3576062225E-10 + b4 = 5.5 4.2 3.0000000002E+00 3.3308253069E-10 + b5 = 6.5 4 1.5575999998E+00 1.8815731448E-10 + b6 = 7.6 6.3 5.0000000001E+00 1.1057500538E-10 + +Residual Sum of Squares: 1.4307867721E-25 +Residual Standard Deviation: 8.9156129349E-14 +Degrees of Freedom: 18 +Number of Observations: 24 + + + + + + + + +Data: y x + 2.513400000000E+00 0.000000000000E+00 + 2.044333373291E+00 5.000000000000E-02 + 1.668404436564E+00 1.000000000000E-01 + 1.366418021208E+00 1.500000000000E-01 + 1.123232487372E+00 2.000000000000E-01 + 9.268897180037E-01 2.500000000000E-01 + 7.679338563728E-01 3.000000000000E-01 + 6.388775523106E-01 3.500000000000E-01 + 5.337835317402E-01 4.000000000000E-01 + 4.479363617347E-01 4.500000000000E-01 + 3.775847884350E-01 5.000000000000E-01 + 3.197393199326E-01 5.500000000000E-01 + 2.720130773746E-01 6.000000000000E-01 + 2.324965529032E-01 6.500000000000E-01 + 1.996589546065E-01 7.000000000000E-01 + 1.722704126914E-01 7.500000000000E-01 + 1.493405660168E-01 8.000000000000E-01 + 1.300700206922E-01 8.500000000000E-01 + 1.138119324644E-01 9.000000000000E-01 + 1.000415587559E-01 9.500000000000E-01 + 8.833209084540E-02 1.000000000000E+00 + 7.833544019350E-02 1.050000000000E+00 + 6.976693743449E-02 1.100000000000E+00 + 6.239312536719E-02 1.150000000000E+00 Added: commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/MGH17.dat URL: http://svn.apache.org/viewvc/commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/MGH17.dat?rev=1508481&view=auto ============================================================================== --- commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/MGH17.dat (added) +++ commons/proper/math/trunk/src/test/resources/org/apache/commons/math3/fitting/leastsquares/MGH17.dat Tue Jul 30 15:04:22 2013 @@ -0,0 +1,93 @@ +NIST/ITL StRD +Dataset Name: MGH17 (MGH17.dat) + +File Format: ASCII + Starting Values (lines 41 to 45) + Certified Values (lines 41 to 50) + Data (lines 61 to 93) + +Procedure: Nonlinear Least Squares Regression + +Description: This problem was found to be difficult for some very + good algorithms. + + See More, J. J., Garbow, B. S., and Hillstrom, K. E. + (1981). Testing unconstrained optimization software. + ACM Transactions on Mathematical Software. 7(1): + pp. 17-41. + +Reference: Osborne, M. R. (1972). + Some aspects of nonlinear least squares + calculations. In Numerical Methods for Nonlinear + Optimization, Lootsma (Ed). + New York, NY: Academic Press, pp. 171-189. + +Data: 1 Response (y) + 1 Predictor (x) + 33 Observations + Average Level of Difficulty + Generated Data + +Model: Exponential Class + 5 Parameters (b1 to b5) + + y = b1 + b2*exp[-x*b4] + b3*exp[-x*b5] + e + + + + Starting values Certified Values + + Start 1 Start 2 Parameter Standard Deviation + b1 = 50 0.5 3.7541005211E-01 2.0723153551E-03 + b2 = 150 1.5 1.9358469127E+00 2.2031669222E-01 + b3 = -100 -1 -1.4646871366E+00 2.2175707739E-01 + b4 = 1 0.01 1.2867534640E-02 4.4861358114E-04 + b5 = 2 0.02 2.2122699662E-02 8.9471996575E-04 + +Residual Sum of Squares: 5.4648946975E-05 +Residual Standard Deviation: 1.3970497866E-03 +Degrees of Freedom: 28 +Number of Observations: 33 + + + + + + + + + +Data: y x + 8.440000E-01 0.000000E+00 + 9.080000E-01 1.000000E+01 + 9.320000E-01 2.000000E+01 + 9.360000E-01 3.000000E+01 + 9.250000E-01 4.000000E+01 + 9.080000E-01 5.000000E+01 + 8.810000E-01 6.000000E+01 + 8.500000E-01 7.000000E+01 + 8.180000E-01 8.000000E+01 + 7.840000E-01 9.000000E+01 + 7.510000E-01 1.000000E+02 + 7.180000E-01 1.100000E+02 + 6.850000E-01 1.200000E+02 + 6.580000E-01 1.300000E+02 + 6.280000E-01 1.400000E+02 + 6.030000E-01 1.500000E+02 + 5.800000E-01 1.600000E+02 + 5.580000E-01 1.700000E+02 + 5.380000E-01 1.800000E+02 + 5.220000E-01 1.900000E+02 + 5.060000E-01 2.000000E+02 + 4.900000E-01 2.100000E+02 + 4.780000E-01 2.200000E+02 + 4.670000E-01 2.300000E+02 + 4.570000E-01 2.400000E+02 + 4.480000E-01 2.500000E+02 + 4.380000E-01 2.600000E+02 + 4.310000E-01 2.700000E+02 + 4.240000E-01 2.800000E+02 + 4.200000E-01 2.900000E+02 + 4.140000E-01 3.000000E+02 + 4.110000E-01 3.100000E+02 + 4.060000E-01 3.200000E+02