function exam2017MajUppg8() clear all close all %% only visualization fXY = @(x,y) y.^2/2-x+x.^2.*(x+y).^2/4; %defined with (x_1,x_2) = (x,y) so we can use it in the contour plot. [X,Y] = meshgrid(-0.5:0.005:2,-2:0.005:1); Z = fXY(X,Y); [C,h] = contour(X,Y,Z,-0.9157 + logspace(-5,1,40)); xlabel('x') ylabel('y') hold on %% Algorithm f = @(x) x(2).^2/2-x(1)+x(1).^2*(x(1)+x(2))^2/4; gradF = @(x)[-1+(x(1)*(x(1)+x(2))^2+x(1)^2*(x(1)+x(2)))/2; x(2)+x(1)^2*(x(1)+x(2))/2]; % Need hessian to solve line search minimization H = @(x)[3*x(1)^2+3*x(1)*x(2)+x(2)^2/2 3*x(1)^2/2+x(1)^2*x(2); 3*x(1)^2/2+x(1)*x(2) 1+x(1)^2/2]; x{1} = [0;0]; plot([x{1}(1)], [x{1}(2)], 'ro','linewidth',3) pause(1.5) iter = 10; xVec = x{1}; fVec = f(x{1}); alphaVec = []; sVec = []; % output variables not important for algorithm for k =1:iter % steepest descent sokriktning s{k} = -gradF(x{k}); % Newtons metod losning av linjesokningsproblem: min_alpha_k f(x{k} + alpha_k s{k}) %vi anvander 5 iterationer alpha{k} = 1; for j=1:5 alpha{k} = alpha{k} ... - gradF(x{k}+alpha{k}*s{k})'*s{k}/(s{k}'*H(x{k}+alpha{k}*s{k})*s{k}); end % Uppdatering steepest descent x{k+1} = x{k} +alpha{k}*s{k}; plot([x{k}(1) x{k+1}(1)], [x{k}(2) x{k+1}(2)], 'r-o','linewidth',2) pause(1.5) sVec = [sVec s{k}]; alphaVec = [alphaVec alpha{k}]; xVec = [xVec x{k+1}]; fVec = [fVec f(x{k+1})]; end xVec sVec alphaVec fVec [xRef,fMinValRef] = fminsearch(f,[0;0])%matlabs minimeringsfunktion [0;0] är startapproximation. end