matlab 编程实现牛顿法和共轭梯度法求解Rosenbrock函数极小值
时间: 2024-06-12 15:08:12 浏览: 144
% Rosenbrock函数
f = @(x) 100*(x(2) - x(1)^2)^2 + (1 - x(1))^2;
% Rosenbrock函数的梯度
grad_f = @(x) [-400*x(1)*(x(2) - x(1)^2) - 2*(1 - x(1)); 200*(x(2) - x(1)^2)];
% Rosenbrock函数的海森矩阵
hess_f = @(x) [1200*x(1)^2 - 400*x(2) + 2, -400*x(1); -400*x(1), 200];
% 初始点
x0 = [-1.2; 1];
% 牛顿法求解
[x1, fval1, ~] = newton(f, grad_f, hess_f, x0);
% 共轭梯度法求解
[x2, fval2, ~] = conjugate_gradient(f, grad_f, x0);
% 输出结果
fprintf('Newton method:\n');
fprintf('x = (%f, %f)\n', x1(1), x1(2));
fprintf('fval = %f\n\n', fval1);
fprintf('Conjugate gradient method:\n');
fprintf('x = (%f, %f)\n', x2(1), x2(2));
fprintf('fval = %f\n', fval2);
% 牛顿法函数
function [x, fval, iter] = newton(f, grad_f, hess_f, x0)
max_iter = 100;
tol = 1e-6;
x = x0;
iter = 0;
while iter < max_iter
iter = iter + 1;
dx = -hess_f(x)\grad_f(x);
x = x + dx;
fval = f(x);
if norm(dx) < tol
break;
end
end
end
% 共轭梯度法函数
function [x, fval, iter] = conjugate_gradient(f, grad_f, x0)
max_iter = 100;
tol = 1e-6;
x = x0;
g = grad_f(x);
d = -g;
iter = 0;
while iter < max_iter
iter = iter + 1;
alpha = -(g'*d)/(d'*hess_f(x)*d);
x = x + alpha*d;
g_new = grad_f(x);
beta = (g_new'*hess_f(x)*d)/(d'*hess_f(x)*d);
d = -g_new + beta*d;
g = g_new;
fval = f(x);
if norm(g) < tol
break;
end
end
end
阅读全文