<span>/// <summary></span>
<span>/// Optimizes the defined function. </span>
<span>/// </summary></span>
<span>/// </span>
<span>/// <param name="values">The initial guess values for the parameters.</param></span>
<span>/// <returns>The values of the parameters which optimizes the function.</returns></span>
<span>/// </span>
<span>public</span> <span>unsafe</span> <span>double</span> Minimize(<span>double</span>[] values)
{
<span>if</span> (values == <span>null</span>)
<span>throw</span> <span>new</span> ArgumentNullException(<span>"values"</span>);
<span>if</span> (values.Length != numberOfVariables)
<span>throw</span> <span>new</span> DimensionMismatchException(<span>"values"</span>);
<span>if</span> (Function == <span>null</span>) <span>throw</span> <span>new</span> InvalidOperationException(
<span>"The function to be minimized has not been defined."</span>);
<span>if</span> (Gradient == <span>null</span>) <span>throw</span> <span>new</span> InvalidOperationException(
<span>"The gradient function has not been defined."</span>);
<span>// Initialization</span>
x = (<span>double</span>[])values.Clone();
<span>int</span> n = numberOfVariables, m = corrections;
<span>// Make initial evaluation</span>
f = getFunction(x);
g = getGradient(x);
<span>this</span>.iterations = 0;
<span>this</span>.evaluations = 1;
<span>// Obtain initial Hessian</span>
<span>double</span>[] diagonal = <span>null</span>;
<span>if</span> (Diagonal != <span>null</span>)
{
diagonal = getDiagonal();
}
<span>else</span>
{
diagonal = <span>new</span> <span>double</span>[n];
<span>for</span> (<span>int</span> i = 0; i < diagonal.Length; i++)
diagonal[i] = 1.0;
}
<span>fixed</span> (<span>double</span>* w = work)
{
<span>// The first N locations of the work vector are used to</span>
<span>// store the gradient and other temporary information.</span>
<span>double</span>* rho = &w[n]; <span>// Stores the scalars rho.</span>
<span>double</span>* alpha = &w[n + m]; <span>// Stores the alphas in computation of H*g.</span>
<span>double</span>* steps = &w[n + 2 * m]; <span>// Stores the last M search steps.</span>
<span>double</span>* delta = &w[n + 2 * m + n * m]; <span>// Stores the last M gradient diferences.</span>
<span>// Initialize work vector</span>
<span>for</span> (<span>int</span> i = 0; i < g.Length; i++)
steps[i] = -g[i] * diagonal[i];
<span>// Initialize statistics</span>
<span>double</span> gnorm = Norm.Euclidean(g);
<span>double</span> xnorm = Norm.Euclidean(x);
<span>double</span> stp = 1.0 / gnorm;
<span>double</span> stp1 = stp;
<span>// Initialize loop</span>
<span>int</span> nfev, point = 0;
<span>int</span> npt = 0, cp = 0;
<span>bool</span> finish = <span>false</span>;
<span>// Make initial progress report with initialization parameters</span>
<span>if</span> (Progress != <span>null</span>) Progress(<span>this</span>, <span>new</span> OptimizationProgressEventArgs
(iterations, evaluations, g, gnorm, x, xnorm, f, stp, finish));
<span>// Start main</span>
while (!finish)
{
iterations++;
<span>double</span> bound = iterations - 1;
<span>if</span> (iterations != 1)
{
<span>if</span> (iterations > m)
bound = m;
<span>double</span> ys = 0;
<span>for</span> (<span>int</span> i = 0; i < n; i++)
ys += delta[npt + i] * steps[npt + i];
<span>// Compute the diagonal of the Hessian</span>
<span>// or use an approximation by the user.</span>
<span>if</span> (Diagonal != <span>null</span>)
{
diagonal = getDiagonal();
}
<span>else</span>
{
<span>double</span> yy = 0;
<span>for</span> (<span>int</span> i = 0; i < n; i++)
yy += delta[npt + i] * delta[npt + i];
<span>double</span> d = ys / yy;
<span>for</span> (<span>int</span> i = 0; i < n; i++)
diagonal[i] = d;
}
<span>// Compute -H*g using the formula given in:</span>
<span>// Nocedal, J. 1980, "Updating quasi-Newton matrices with limited storage",</span>
<span>// Mathematics of Computation, Vol.24, No.151, pp. 773-782.</span>
cp = (point == 0) ? m : point;
rho[cp - 1] = 1.0 / ys;
<span>for</span> (<span>int</span> i = 0; i < n; i++)
w[i] = -g[i];
cp = point;
<span>for</span> (<span>int</span> i = 1; i <= bound; i += 1)
{
<span>if</span> (--cp == -1) cp = m - 1;
<span>double</span> sq = 0;
<span>for</span> (<span>int</span> j = 0; j < n; j++)
sq += steps[cp * n + j] * w[j];
<span>double</span> beta = alpha[cp] = rho[cp] * sq;
<span>for</span> (<span>int</span> j = 0; j < n; j++)
w[j] -= beta * delta[cp * n + j];
}
<span>for</span> (<span>int</span> i = 0; i < diagonal.Length; i++)
w[i] *= diagonal[i];
<span>for</span> (<span>int</span> i = 1; i <= bound; i += 1)
{
<span>double</span> yr = 0;
<span>for</span> (<span>int</span> j = 0; j < n; j++)
yr += delta[cp * n + j] * w[j];
<span>double</span> beta = alpha[cp] - rho[cp] * yr;
<span>for</span> (<span>int</span> j = 0; j < n; j++)
w[j] += beta * steps[cp * n + j];
<span>if</span> (++cp == m) cp = 0;
}
npt = point * n;
<span>// Store the search direction</span>
<span>for</span> (<span>int</span> i = 0; i < n; i++)
steps[npt + i] = w[i];
stp = 1;
}
<span>// Save original gradient</span>
<span>for</span> (<span>int</span> i = 0; i < g.Length; i++)
w[i] = g[i];
<span>// Obtain the one-dimensional minimizer of f by computing a line search</span>
mcsrch(x, <span>ref</span> f, <span>ref</span> g, &steps[point * n], <span>ref</span> stp, <span>out</span> nfev, diagonal);
<span>// Register evaluations</span>
evaluations += nfev;
<span>// Compute the new step and</span>
<span>// new gradient differences</span>
<span>for</span> (<span>int</span> i = 0; i < g.Length; i++)
{
steps[npt + i] *= stp;
delta[npt + i] = g[i] - w[i];
}
<span>if</span> (++point == m) point = 0;
<span>// Check for termination</span>
gnorm = Norm.Euclidean(g);
xnorm = Norm.Euclidean(x);
xnorm = Math.Max(1.0, xnorm);
<span>if</span> (gnorm / xnorm <= tolerance)
finish = <span>true</span>;
<span>if</span> (Progress != <span>null</span>) Progress(<span>this</span>, <span>new</span> OptimizationProgressEventArgs
(iterations, evaluations, g, gnorm, x, xnorm, f, stp, finish));
}
}
<span>return</span> f; <span>// return the minimum value found (at solution x)</span>
}