Jeez, I guess I shouldn't have kept it short in the first post.
Guys, I'm in college, I take college level math, I'm not a moron.
Obviously you haven't made it to Stats, Physics or Higher level Math yet!
There are a variety of methods to compute a curve from a set of points. A basic algorithm is the "Gauss Newton" method. You plug in a series of points and it will give you a formula that approximates the curve. The more points the better the resulting formula.
Once you have generated a formula you can plug in your own values into the formula you generated and compute the resulting points.
http://en.wikipedia.org/wiki/Gauss%E2%80%93Newton_algorithm
The program that you want to use to do this for you is MatLab!!!!
Or, if you are adventurous here you go:
namespace NumericalMethods
{
public class GaussNewton
{
private Matrix _jacobian;
private Matrix _residuals;
private Matrix _regressionParameters0;
private Derivatives _derivatives;
private Parameter[] _regressionParameters;
private Parameter[] _observedParameters;
private Func<double> _regressionFunction;
private double[,] _data;
public GaussNewton(Func<double> regressionFunction, Parameter[] regressionParameters, Parameter[] observedParameters, double[,] data, int numberOfDerivativePoints)
{
Debug.Assert(data.GetLength(0) == observedParameters.Length + 1);
_data = data;
_observedParameters = observedParameters;
_regressionParameters = regressionParameters;
_regressionFunction = regressionFunction;
int numberOfParameters = _regressionParameters.Length;
int numberOfPoints = data.GetLength(1);
_derivatives = new Derivatives(numberOfDerivativePoints);
_jacobian = new Matrix(numberOfPoints, numberOfParameters);
_residuals = new Matrix(numberOfPoints, 1);
_regressionParameters0 = new Matrix(numberOfParameters, 1);
}
public GaussNewton(Func<double> function, Parameter[] regressionParameters, Parameter[] observedParameters, double[,] data) :
this(function, regressionParameters, observedParameters, data, 3)
{
}
public void Iterate()
{
int numberOfPoints = _data.GetLength(1);
int numberOfParameters = _regressionParameters.Length;
double currentResidual = 0.0;
for (int i = 0; i < numberOfPoints; i++)
{
for (int j = 0; j < _observedParameters.Length; j++)
{
_observedParameters[j].Value = _data[j, i];
}
double functionValue = _regressionFunction();
double residual = _data[_observedParameters.Length, i] - functionValue;
_residuals[i, 0] = residual;
currentResidual += residual * residual;
for (int j = 0; j < numberOfParameters; j++)
{
_jacobian[i, j] = _derivatives.ComputePartialDerivative(_regressionFunction, _regressionParameters[j], 1, functionValue);
}
}
for (int i = 0; i < numberOfParameters; i++)
{
_regressionParameters0[i, 0] = _regressionParameters
;
}
Matrix matJacobianTrans = _jacobian.Transpose();
Matrix matJTranResid = matJacobianTrans * _residuals;
Matrix matJTranJ = matJacobianTrans * _jacobian;
Matrix matDelta = matJTranJ.SolveFor(matJTranResid); ;
Matrix matNewRegPars = _regressionParameters0 + matDelta;
for (int i = 0; i < numberOfParameters; i++)
{
_regressionParameters.Value = matNewRegPars[i, 0];
}
}
}
}
P.S. Calc 3 was in 1988 for me! 