minRosenBySD.m
%In this script we apply steepest descent with the
%backtracking linesearch to minimize the 2-D
%Rosenbrock function starting at the point x=(-1.9,2).
%Termination parameters
eps = 1.0e-4;
epsf = 1.0e-6;
maxit = 10000;
iter = 0;
%Linesearch parameters for backtracking
gamma = 0.5;
c = 0.01;
%Initialization
xc = [-1.9;2];
fnc = 'rosenbrock';
[fc,Df] = feval(fnc,xc);
nDf = norm(Df);
%Are we already at a solution, or should we continue.
if nDf <= epsf,
disp('Termination due to small gradient.')
else
%Initialize main loop.
ndiff = 1;
data = [iter,nDf,ndiff,1,fc];
%The main loop.
while nDf > epsf & ndiff > eps & iter < maxit,
d = -Df/nDf;
DDfnc = Df'*d;
[xn,fn,fcall]=backtrack(xc,d,fc,fnc,DDfnc,c,gamma,eps);
ndiff = norm(xn-xc);
xc = xn;
[fc,Df] = feval(fnc,xc);
nDf = norm(Df);
iter = iter + 1;
data = [data;[iter,nDf,ndiff,fcall,fc]];
%Report reason for termination.
if nDf <= epsf,
disp('Termination due to small gradient.')
break
elseif ndiff <= eps
disp('Termination due to small steps.')
break
elseif iter == maxit
disp('Exceeded maximum number of iterations.')
break
end
end
%Report on how the algorithm performed.
[datalength,width] = size(data);
onenorm = ones(datalength,1);
Tfcall = onenorm'*data(:,4);
disp('Final point is '), xc
disp('Final function value is'), fc
disp('Final norm of the gradient is '), nDf
disp('Total number of function calls is '), Tfcall
disp('Total number of gradient calls is '), iter
subplot(2,2,1), plot(data(:,4))
xlabel('iteration'),ylabel('function calls')
subplot(2,2,2), semilogy(data(:,1),data(:,2))
xlabel('iteration'),ylabel('norm of the gradient')
subplot(2,2,3), semilogy(data(:,1),data(:,3))
xlabel('iteration'),ylabel('distance between iterates')
subplot(2,2,4), semilogy(data(:,1),data(:,5))
xlabel('iteration'),ylabel('function value')
end