0% found this document useful (0 votes)
3 views3 pages

Code for Matlab

The document outlines a numerical optimization process using Newton's method to find the minimum of a given function defined by parameters A, B, and C. It includes iterations for both exact and numerical gradient calculations, along with error analysis and convergence analysis based on varying step sizes. The results are visualized through plots and include detailed outputs of the optimization performance and alpha values used in the methods.

Uploaded by

gpta514
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
3 views3 pages

Code for Matlab

The document outlines a numerical optimization process using Newton's method to find the minimum of a given function defined by parameters A, B, and C. It includes iterations for both exact and numerical gradient calculations, along with error analysis and convergence analysis based on varying step sizes. The results are visualized through plots and include detailed outputs of the optimization performance and alpha values used in the methods.

Uploaded by

gpta514
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 3

Appendix

A = 3; B = 3; C = 4;
x0 = 2; y0 = 8;
h_vals = [1e-2, 1e-4, 1e-6, 1e-8];
tol = 1e-7; max_iter = 100;

f = @(x, y) A*x.^2 - B*x.*y + C*y.^2 + x - y;

path_exact = []; path_num = {};


iter_counts = []; errors = zeros(1, length(h_vals));
alpha_history_method1 = []; alpha_history_method2 = [];

x = x0; y = y0;
path_exact(:,1) = [x; y];

for k = 1:max_iter
grad = [2*A*x - B*y + 1; -B*x + 2*C*y - 1];
H = [2*A, -B; -B, 2*C];
delta = H \ grad;
alpha = line_search(f, x, y, delta(1), delta(2), 0, 1, tol);
alpha_history_method1(k) = alpha;
x = x - alpha * delta(1);
y = y - alpha * delta(2);
path_exact(:,k+1) = [x; y];
if norm(path_exact(:,end) - path_exact(:,end-1)) < tol
break;
end
end
min_exact = [x; y];

for hi = 1:length(h_vals)
h = h_vals(hi);
x = x0; y = y0;
path = [x; y];
for k = 1:max_iter
grad_x = (f(x+h, y) - f(x, y)) / h;
grad_y = (f(x, y+h) - f(x, y)) / h;
grad = [grad_x; grad_y];
H = [2*A, -B; -B, 2*C];
delta = H \ grad;
alpha = line_search(f, x, y, delta(1), delta(2), 0, 1, tol);
alpha_history_method2(hi,k) = alpha;
x = x - alpha * delta(1);
y = y - alpha * delta(2);
path(:,k+1) = [x; y];
if norm(path(:,end) - path(:,end-1)) < tol
break;
end
end
path_num{hi} = path;
iter_counts(hi) = k;
errors(hi) = norm([x; y] - min_exact);
end

[x_grid, y_grid] = meshgrid(linspace(-2, 4, 100), linspace(-2, 4, 100));


z_grid = f(x_grid, y_grid);

figure('Name', 'Optimization Paths');


surf(x_grid, y_grid, z_grid, 'EdgeColor', 'none'); hold on;
plot3(path_exact(1,:), path_exact(2,:), f(path_exact(1,:), path_exact(2,:)), ...
'r-', 'LineWidth', 2);
for i = 1:length(h_vals)
p = path_num{i};
plot3(p(1,:), p(2,:), f(p(1,:), p(2,:)), '--', 'LineWidth', 1.5);
end
xlabel('x'); ylabel('y'); zlabel('f(x,y)');
title('Newton Optimization Paths');
legend(['Exact \nabla f'], arrayfun(@(h) sprintf('h = %.0e', h), h_vals, ...
'UniformOutput', false));
view(135, 30); grid on;

figure('Name', 'Error Analysis');


loglog(h_vals, errors, 'bo-', 'LineWidth', 2);
xlabel('Step size (h)'); ylabel('Error from exact minimum');
title('Numerical Gradient Error Analysis'); grid on;

figure('Name', 'Convergence Analysis');


semilogx(h_vals, iter_counts, 'ms--', 'LineWidth', 2);
xlabel('Step size (h)'); ylabel('Iterations to convergence');
title('Convergence Rate vs. Step Size'); grid on;

fprintf('\n=== Optimization Results ===\n');


fprintf('Analytical Minimum: (%.6f, %.6f)\n', min_exact(1), min_exact(2));
fprintf('\nNumerical Gradient Performance:\n');
for i = 1:length(h_vals)
fprintf('h = %.0e -> %d iterations, error: %.2e\n', ...
h_vals(i), iter_counts(i), errors(i));
end
fprintf('\nAlpha values used in Method 1 (Exact Gradient):\n');
disp(alpha_history_method1);
fprintf('Alpha values used in Method 2 (Numerical Gradient):\n');
disp(alpha_history_method2);

function alpha_opt = line_search(f, x, y, delta_x, delta_y, a, b, tol)


golden_ratio = (1 + sqrt(5)) / 2;
while (b - a) > tol
c = b - (b - a)/golden_ratio;
d = a + (b - a)/golden_ratio;
fc = f(x - c*delta_x, y - c*delta_y);
fd = f(x - d*delta_x, y - d*delta_y);
if fc < fd
b = d;
else
a = c;
end
end
alpha_points = linspace(a, b, 3);
f_values = arrayfun(@(alpha) f(x - alpha*delta_x, y - alpha*delta_y), alpha_points);
A = [alpha_points'.^2, alpha_points', ones(3,1)];
coeffs = A \ f_values';
alpha_opt = -coeffs(2)/(2*coeffs(1));
alpha_opt = max(a, min(b, alpha_opt));
end

You might also like

pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy