# Diff of /trunk/escript/py_src/minimize.py

revision 989 by gross, Fri Oct 6 10:02:18 2006 UTC revision 990 by ksteube, Wed Feb 21 04:27:52 2007 UTC
# Line 14  A collection of general-purpose optimiza Line 14  A collection of general-purpose optimiza
14
15  fmin        ---      Nelder-Mead Simplex algorithm (uses only function calls)  fmin        ---      Nelder-Mead Simplex algorithm (uses only function calls)
16  fminBFGS    ---      Quasi-Newton method (uses function and gradient)  fminBFGS    ---      Quasi-Newton method (uses function and gradient)
17  fminNCG     ---      Line-search Newton Conjugate Gradient (uses function, gradient  fminNCG     ---      Line-search Newton Conjugate Gradient (uses function, gradient and hessian (if it's provided))
and hessian (if it's provided))
18
19  """  """
20  import numarray  import numarray
# Line 205  def zoom(a_lo, a_hi): Line 204  def zoom(a_lo, a_hi):
204
205
206  def line_search(f, fprime, xk, pk, gfk, args=(), c1=1e-4, c2=0.9, amax=50):  def line_search(f, fprime, xk, pk, gfk, args=(), c1=1e-4, c2=0.9, amax=50):
207      """alpha, fc, gc = line_search(f, xk, pk, gfk,      """alpha, fc, gc = line_search(f, xk, pk, gfk, args=(), c1=1e-4, c2=0.9, amax=1)
args=(), c1=1e-4, c2=0.9, amax=1)
208
209      minimize the function f(xk+alpha pk) using the line search algorithm of      minimize the function f(xk+alpha pk) using the line search algorithm of
210      Wright and Nocedal in 'numarrayerical Optimization', 1999, pg. 59-60      Wright and Nocedal in 'numarrayerical Optimization', 1999, pg. 59-60
# Line 256  def line_search(f, fprime, xk, pk, gfk, Line 254  def line_search(f, fprime, xk, pk, gfk,
254
255
256  def line_search_BFGS(f, xk, pk, gfk, args=(), c1=1e-4, alpha0=1):  def line_search_BFGS(f, xk, pk, gfk, args=(), c1=1e-4, alpha0=1):
257      """alpha, fc, gc = line_search(f, xk, pk, gfk,      """alpha, fc, gc = line_search(f, xk, pk, gfk, args=(), c1=1e-4, alpha0=1)
args=(), c1=1e-4, alpha0=1)
258
259      minimize over alpha, the function f(xk+alpha pk) using the interpolation      minimize over alpha, the function f(xk+alpha pk) using the interpolation
260      algorithm (Armiijo backtracking) as suggested by      algorithm (Armiijo backtracking) as suggested by
# Line 329  def approx_fhess_p(x0,p,fprime,*args): Line 326  def approx_fhess_p(x0,p,fprime,*args):
326
327
328  def fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1):  def fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1):
329      """xopt = fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5,      """xopt = fminBFGS(f, x0, fprime=None, args=(), avegtol=1e-5, maxiter=None, fulloutput=0, printmessg=1)
maxiter=None, fulloutput=0, printmessg=1)
330
331      Optimize the function, f, whose gradient is given by fprime using the      Optimize the function, f, whose gradient is given by fprime using the
332      quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)      quasi-Newton method of Broyden, Fletcher, Goldfarb, and Shanno (BFGS)
# Line 410  def fminBFGS(f, x0, fprime=None, args=() Line 406  def fminBFGS(f, x0, fprime=None, args=()
406
407
408  def fminNCG(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, maxiter=None, fulloutput=0, printmessg=1):  def fminNCG(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, maxiter=None, fulloutput=0, printmessg=1):
409      """xopt = fminNCG(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5,      """xopt = fminNCG(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5, maxiter=None, fulloutput=0, printmessg=1)
maxiter=None, fulloutput=0, printmessg=1)
410
411      Optimize the function, f, whose gradient is given by fprime using the      Optimize the function, f, whose gradient is given by fprime using the
412      Newton-CG method.  fhess_p must compute the hessian times an arbitrary      Newton-CG method.  fhess_p must compute the hessian times an arbitrary

Legend:
 Removed from v.989 changed lines Added in v.990