转成matlab: def _apply_linesearch_optimzation(self, update_embedding_with, grad, calc_loss, loss, **kwargs): self.eta = self.eta_max if kwargs.get('first_iter',False) and not self.linesearch_first: self.eta = kwargs.get('eta_first',1) loss_diff = 1 while loss_diff > 0: loss_diff, temp_embedding, delta = self._linesearch_once( update_embedding_with,grad,calc_loss,loss,**kwargs) if self.eta <= self.eta_min and loss_diff > 0: loss_diff, temp_embedding, delta = self._linesearch_once( update_embedding_with,grad,calc_loss,loss,**kwargs) loss_diff = -1 self.eta *= 2 update_embedding_with(new_embedding=temp_embedding) return delta def _linesearch_once(self, update_embedding_with, grad, calc_loss, loss, **kwargs): delta = self._calc_delta(grad) temp_embedding = update_embedding_with(delta=delta,copy=True) loss_diff = calc_loss(temp_embedding) - loss self.eta /= 2 return loss_diff, temp_embedding, delta
时间: 2024-04-28 10:20:41 浏览: 124
function delta = _apply_linesearch_optimzation(self, update_embedding_with, grad, calc_loss, loss, varargin)
eta = self.eta_max;
if nargin > 4 && varargin{1} && ~self.linesearch_first
eta = varargin{2};
end
loss_diff = 1;
while loss_diff > 0
[loss_diff, temp_embedding, delta] = self._linesearch_once(update_embedding_with, grad, calc_loss, loss, varargin{:});
if eta <= self.eta_min && loss_diff > 0
[loss_diff, temp_embedding, delta] = self._linesearch_once(update_embedding_with, grad, calc_loss, loss, varargin{:});
loss_diff = -1;
end
end
eta = eta * 2;
update_embedding_with('new_embedding', temp_embedding);
end
function [loss_diff, temp_embedding, delta] = _linesearch_once(self, update_embedding_with, grad, calc_loss, loss, varargin)
delta = self._calc_delta(grad);
temp_embedding = update_embedding_with('delta', delta, 'copy', true);
loss_diff = calc_loss(temp_embedding) - loss;
self.eta = self.eta / 2;
end
阅读全文