1
""" Unit tests for optimization routines
6
from numpy.testing import *
9
from scipy import optimize
10
from numpy import array, zeros, float64, dot, log, exp
14
class test_optimize(ScipyTestCase):
15
""" Test case for a simple constrained entropy maximization problem
16
(the machine translation example of Berger et al in
17
Computational Linguistics, vol 22, num 1, pp 39--72, 1996.)
20
self.F = array([[1,1,1],[1,1,0],[1,0,1],[1,0,0],[1,0,0]])
21
self.K = array([1., 0.3, 0.5])
22
self.startparams = zeros(3, float64)
23
self.solution = array([0., -0.524869316, 0.487525860])
30
if self.funccalls > 6000:
31
raise RuntimeError, "too many iterations in optimization routine"
32
log_pdot = dot(self.F, x)
33
logZ = log(sum(exp(log_pdot)))
34
f = logZ - dot(self.K, x)
39
log_pdot = dot(self.F, x)
40
logZ = log(sum(exp(log_pdot)))
41
p = exp(log_pdot - logZ)
42
return dot(self.F.transpose(), p) - self.K
46
""" conjugate gradient optimization routine
48
retval = optimize.fmin_cg(self.func, self.startparams, self.grad, (), \
49
maxiter=self.maxiter, \
50
full_output=True, disp=False, retall=False)
52
(params, fopt, func_calls, grad_calls, warnflag) = retval
54
err = abs(self.func(params) - self.func(self.solution))
55
#print "CG: Difference is: " + str(err)
60
""" Broyden-Fletcher-Goldfarb-Shanno optimization routine
62
retval = optimize.fmin_bfgs(self.func, self.startparams, self.grad, \
63
args=(), maxiter=self.maxiter, \
64
full_output=True, disp=False, retall=False)
66
(params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag) = retval
68
err = abs(self.func(params) - self.func(self.solution))
69
#print "BFGS: Difference is: " + str(err)
73
def check_powell(self):
74
""" Powell (direction set) optimization routine
76
retval = optimize.fmin_powell(self.func, self.startparams, \
77
args=(), maxiter=self.maxiter, \
78
full_output=True, disp=False, retall=False)
80
(params, fopt, direc, numiter, func_calls, warnflag) = retval
82
err = abs(self.func(params) - self.func(self.solution))
83
#print "Powell: Difference is: " + str(err)
86
def check_neldermead(self):
87
""" Nelder-Mead simplex algorithm
89
retval = optimize.fmin(self.func, self.startparams, \
90
args=(), maxiter=self.maxiter, \
91
full_output=True, disp=False, retall=False)
93
(params, fopt, numiter, func_calls, warnflag) = retval
95
err = abs(self.func(params) - self.func(self.solution))
96
#print "Nelder-Mead: Difference is: " + str(err)
100
""" line-search Newton conjugate gradient optimization routine
102
retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,\
103
args=(), maxiter=self.maxiter, \
104
full_output=False, disp=False, retall=False)
108
err = abs(self.func(params) - self.func(self.solution))
109
#print "NCG: Difference is: " + str(err)
113
def check_l_bfgs_b(self):
114
""" limited-memory bound-constrained BFGS algorithm
116
retval = optimize.fmin_l_bfgs_b(self.func, self.startparams, self.grad,\
117
args=(), maxfun=self.maxiter)
119
(params, fopt, d) = retval
121
err = abs(self.func(params) - self.func(self.solution))
122
#print "LBFGSB: Difference is: " + str(err)
126
if __name__ == "__main__":