Package parsimony :: Package algorithms :: Module utils
[hide private]
[frames] | no frames]

Source Code for Module parsimony.algorithms.utils

  1  # -*- coding: utf-8 -*- 
  2  """ 
  3  The :mod:`parsimony.algorithms.utils` module contains auxiliary algorithms. 
  4   
  5  Algorithms may not store states. I.e., if they are classes, do not keep 
  6  references to objects with state in the algorithm objects. It should be 
  7  possible to copy and share algorithms between e.g. estimators, and thus they 
  8  should not depend on any state. 
  9   
 10  Created on Thu Mar 31 17:25:01 2014 
 11   
 12  Copyright (c) 2013-2014, CEA/DSV/I2BM/Neurospin. All rights reserved. 
 13   
 14  @author:  Tommy Löfstedt 
 15  @email:   lofstedt.tommy@gmail.com 
 16  @license: BSD 3-clause. 
 17  """ 
 18  import numpy as np 
 19   
 20  try: 
 21      from . import bases  # Only works when imported as a package. 
 22  except ValueError: 
 23      import parsimony.algorithms.bases as bases  # When run as a program. 
 24  import parsimony.utils.consts as consts 
 25  import parsimony.functions.penalties as penalties 
 26  import parsimony.functions.properties as properties 
 27   
 28  __all__ = ["Info", 
 29   
 30             "Bisection", "NewtonRaphson", 
 31             "BacktrackingLineSearch"] 
32 33 34 # TODO: This class should be replaced with Enum. 35 -class Info(object):
36 """Enum-like class for information constants. 37 38 Fields may _NOT_ be None! 39 40 This class will be replaced with Enum, so do not rely on the actual values 41 of the fields. Never use the string "ok", always use Info.ok. 42 """ 43 ok = "ok" # Did everything go well? 44 converged = "converged" # Did the algorithm converge? 45 num_iter = "num_iter" # Number of iterations. 46 time = "time" # Time of e.g. every iteration. 47 fvalue = "fvalue" # Function value at e.g. every iteration. Deprecated! 48 func_val = "func_val" # Function value at e.g. every iteration. 49 smooth_func_val = "smooth_func_val" # Smoothed function value. 50 gap = "gap" # The gap at e.g. every iteration. 51 mu = "mu" # Smoothing constant, or other parameter, at every iteration. 52 parameter = "parameter" # Parameter(s), at e.g. every iteration. 53 bound = "bound" # Upper bound at e.g. every iteration. 54 beta = "beta" # E.g. the start vector used. 55 betak = "betak" # The final found vector. 56 beta_start = "beta_start" # The start vector used. 57 continuations = "continuations" # In continuation: Number of continuations
58
59 60 -class Bisection(bases.ExplicitAlgorithm, 61 bases.IterativeAlgorithm, 62 bases.InformationAlgorithm):
63 """Finds a root of the function assumed to be on the line between two 64 points. 65 66 Assumes a function f(x) such that |f(x)|_2 < -eps if x is too small, 67 |f(x)|_2 > eps if x is too large and |f(x)|_2 <= eps if x is just right. 68 69 Parameters 70 ---------- 71 force_negative : Boolean. Default is False. Will try, by running more 72 iterations, to make the result negative. It may fail, but that is 73 unlikely. 74 75 eps : Positive float. A value such that |f(x)|_2 <= eps. Only guaranteed 76 if |f(x)|_2 <= eps in less than max_iter iterations. 77 78 info : List or tuple of utils.Info. What, if any, extra run 79 information should be stored. Default is an empty list, which means 80 that no run information is computed nor returned. 81 82 max_iter : Non-negative integer. Maximum allowed number of iterations. 83 84 min_iter : Non-negative integer less than or equal to max_iter. Minimum 85 number of iterations that must be performed. Default is 1. 86 """ 87 INTERFACES = [properties.Function] 88 89 INFO_PROVIDED = [Info.ok, 90 Info.num_iter, 91 Info.converged] 92
93 - def __init__(self, force_negative=False, 94 parameter_positive=True, 95 parameter_negative=True, 96 parameter_zero=True, 97 98 eps=consts.TOLERANCE, 99 info=[], max_iter=30, min_iter=1):
100 101 super(Bisection, self).__init__(info=info, 102 max_iter=max_iter, 103 min_iter=min_iter) 104 105 self.force_negative = force_negative 106 self.parameter_positive = parameter_positive 107 self.parameter_negative = parameter_negative 108 self.parameter_zero = parameter_zero 109 110 self.eps = eps
111 112 @bases.force_reset 113 @bases.check_compatibility
114 - def run(self, function, x=None):
115 """ 116 Parameters 117 ---------- 118 function : Function. The function for which a root is found. 119 120 x : A vector or tuple with two elements. The first element is the lower 121 end of the interval for which |f(x[0])|_2 < -eps. The second 122 element is the upper end of the interfal for which 123 |f(x[1])|_2 > eps. If x is None, these values are found 124 automatically. Finding them may be slow, though, if the 125 function is expensive to evaluate. 126 """ 127 if self.info_requested(Info.ok): 128 self.info_set(Info.ok, False) 129 130 if x is not None: 131 low = x[0] 132 high = x[1] 133 else: 134 if self.parameter_negative: 135 low = -1.0 136 elif self.parameter_zero: 137 low = 0.0 138 else: 139 low = consts.TOLERANCE 140 141 if self.parameter_positive: 142 high = 1.0 143 elif self.parameter_zero: 144 high = 0.0 145 else: 146 high = -consts.TOLERANCE 147 148 # Find start values. If the low and high 149 # values are feasible this will just break 150 for i in xrange(self.max_iter): 151 f_low = function.f(low) 152 f_high = function.f(high) 153 # print "low :", low, ", f:", f_low 154 # print "high:", high, ", f:", f_high 155 156 if np.sign(f_low) != np.sign(f_high): 157 break 158 else: 159 if self.parameter_positive \ 160 and self.parameter_negative \ 161 and self.parameter_zero: 162 163 low -= abs(low) * 2.0 ** i 164 high += abs(high) * 2.0 ** i 165 166 elif self.parameter_positive \ 167 and self.parameter_negative \ 168 and not self.parameter_zero: 169 170 low -= abs(low) * 2.0 ** i 171 high += abs(high) * 2.0 ** i 172 173 if abs(low) < consts.TOLERANCE: 174 low -= consts.TOLERANCE 175 if abs(high) < consts.TOLERANCE: 176 high += consts.TOLERANCE 177 178 elif self.parameter_positive \ 179 and not self.parameter_negative \ 180 and self.parameter_zero: 181 182 low /= 2.0 183 high *= 2.0 184 185 elif self.parameter_positive \ 186 and not self.parameter_negative \ 187 and not self.parameter_zero: 188 189 low /= 2.0 190 high *= 2.0 191 192 if abs(low) < consts.TOLERANCE: 193 low = consts.TOLERANCE 194 if abs(high) < consts.TOLERANCE: 195 high = consts.TOLERANCE 196 197 elif not self.parameter_positive \ 198 and self.parameter_negative \ 199 and self.parameter_zero: 200 201 low *= 2.0 202 high /= 2.0 203 204 elif not self.parameter_positive \ 205 and self.parameter_negative \ 206 and not self.parameter_zero: 207 208 low *= 2.0 209 high /= 2.0 210 211 if abs(low) < consts.TOLERANCE: 212 low = -consts.TOLERANCE 213 if abs(high) < consts.TOLERANCE: 214 high = -consts.TOLERANCE 215 216 elif not self.parameter_positive \ 217 and not self.parameter_negative \ 218 and self.parameter_zero: 219 220 low = 0.0 221 high = 0.0 222 223 elif not self.parameter_positive \ 224 and not self.parameter_negative \ 225 and not self.parameter_zero: 226 227 raise ValueError("Parameter must be allowed to be real!") 228 229 # Use the bisection method to find where |f(x)|_2 <= eps. 230 neg_count = 0 231 232 mid = (low + high) / 2.0 233 f_mid = function.f(mid) 234 i = 0 235 while True: 236 if np.sign(f_mid) == np.sign(f_low): 237 low = mid 238 f_low = f_mid 239 else: 240 high = mid 241 f_high = f_mid 242 243 mid = (low + high) / 2.0 244 f_mid = function.f(mid) 245 # print "i:", (i + 1), ", mid: ", mid, ", f_mid:", f_mid 246 247 if (abs(f_high - f_low) <= self.eps and i >= self.min_iter - 1) \ 248 or i >= self.max_iter - 1: 249 if self.force_negative and f_mid > 0.0: 250 if neg_count < self.max_iter: 251 neg_count += 1 252 else: 253 break 254 else: 255 break 256 i += 1 257 258 if self.info_requested(Info.converged): 259 if abs(f_high - f_low) <= self.eps: 260 self.info_set(Info.converged, True) 261 262 if self.force_negative and f_mid > 0.0: 263 self.info_set(Info.converged, False) 264 if self.info_requested(Info.num_iter): 265 self.info_set(Info.num_iter, i + 1) 266 if self.info_requested(Info.ok): 267 self.info_set(Info.ok, True) 268 269 self.num_iter = i + 1 270 271 # TODO: We already have f_mid, so we can return a better approximation 272 # here! 273 return mid
274
275 276 -class NewtonRaphson(bases.ExplicitAlgorithm, 277 bases.IterativeAlgorithm, 278 bases.InformationAlgorithm):
279 """Finds a root of the function assumed to be in the vicinity of a given 280 point. 281 282 Newtons method is not guaranteed to converge, and may diverge from the 283 solution if e.g. the starting point is too far from the root. 284 285 Problems may also arise if the gradient is too small (e.g. at a stationary 286 point) on the path to the root. 287 288 Parameters 289 ---------- 290 force_negative : Boolean. Default is False. Will try to make the result 291 negative. It may fail if the function does not behave "nicely" 292 around the found point. 293 294 eps : Positive float. A small value used as the stopping criterion. The 295 stopping criterion will be fulfilled if it converges in less 296 than max_iter iterations. 297 298 info : List or tuple of utils.Info. What, if any, extra run 299 information should be stored. Default is an empty list, which means 300 that no run information is computed nor returned. 301 302 max_iter : Non-negative integer. Maximum allowed number of iterations. 303 304 min_iter : Non-negative integer less than or equal to max_iter. Minimum 305 number of iterations that must be performed. Default is 1. 306 """ 307 INTERFACES = [properties.Function, 308 properties.Gradient] 309 310 INFO_PROVIDED = [Info.ok, 311 Info.num_iter, 312 Info.converged] 313
314 - def __init__(self, force_negative=False, 315 parameter_positive=True, 316 parameter_negative=True, 317 parameter_zero=True, 318 319 eps=consts.TOLERANCE, 320 info=[], max_iter=30, min_iter=1):
321 322 super(NewtonRaphson, self).__init__(info=info, 323 max_iter=max_iter, 324 min_iter=min_iter) 325 326 self.force_negative = force_negative 327 self.parameter_positive = parameter_positive 328 self.parameter_negative = parameter_negative 329 self.parameter_zero = parameter_zero 330 331 self.eps = eps
332 333 @bases.force_reset 334 @bases.check_compatibility
335 - def run(self, function, x=None):
336 """ 337 Parameters 338 ---------- 339 function : Function. The function for which a root should be found. 340 341 x : Float. The starting point of the Newton-Raphson algorithm. Should 342 be "close" to the root. 343 """ 344 if self.info_requested(Info.ok): 345 self.info_set(Info.ok, False) 346 347 if x is None: 348 if self.parameter_positive: 349 x = 1.0 350 elif self.parameter_negative: 351 x = -1.0 352 else: 353 x = 0.0 354 355 # Use the Newton-Raphson algorithm to find a root of f(x). 356 i = 0 357 while True: 358 x_ = x 359 f = function.f(x_) 360 df = function.grad(x_) 361 x = x_ - f / df 362 # TODO: Handle the other cases! 363 if not self.parameter_negative \ 364 and not self.parameter_zero \ 365 and self.parameter_positive \ 366 and x < consts.TOLERANCE: 367 x = consts.TOLERANCE 368 elif not self.parameter_negative \ 369 and self.parameter_zero \ 370 and self.parameter_positive \ 371 and x < 0.0: 372 x = 0.0 373 374 # TODO: We seek a root, i.e. where f(x) = 0. The stopping criterion 375 # should (could?) thus be abs(f(x)) <= eps! 376 if (abs(x - x_) <= self.eps and i >= self.min_iter - 1) \ 377 or i >= self.max_iter - 1: 378 if self.force_negative: 379 f = function.f(x) 380 if f > 0.0: 381 df = function.grad(x) 382 # We assume that we are within |x_opt - x| < eps from 383 # the root. I.e. that the root is within the interval 384 # [x_opt - eps, x_opt + eps]. We are at x_opt + eps or 385 # x_opt - eps. Then we go to x_opt - 0.5 * eps or 386 # x_opt + 0.5 * eps, respectively. 387 x -= 1.5 * (f / df) 388 # f = function.f(x) 389 break 390 391 i += 1 392 393 if self.info_requested(Info.converged): 394 if abs(x - x_) <= self.eps: # TODO: Stopping criterion. See above! 395 self.info_set(Info.converged, True) 396 397 if self.force_negative: 398 f = function.f(x) 399 if f > 0.0: 400 self.info_set(Info.converged, False) 401 if self.info_requested(Info.num_iter): 402 self.info_set(Info.num_iter, i + 1) 403 if self.info_requested(Info.ok): 404 self.info_set(Info.ok, True) 405 406 self.num_iter = i + 1 407 408 return x
409
410 411 -class BacktrackingLineSearch(bases.ExplicitAlgorithm):
412 """Finds a step length a that fulfills a given descent criterion. 413 """ 414 INTERFACES = [properties.Function, 415 properties.Gradient] 416
417 - def __init__(self, condition=None, 418 output=False, 419 max_iter=30, min_iter=1, 420 eps=consts.TOLERANCE): # Note that tolerance is never used!
421 """ 422 Parameters 423 ---------- 424 condition : The class of the descent condition. If not given, defaults 425 to the SufficientDescentCondition. 426 427 output : Boolean. Whether or not to return additional output. 428 429 max_iter : Non-negative integer. The maximum allowed number of 430 iterations. 431 432 min_iter : Non-negative integer, min_iter <= max_iter. The minimum 433 number of iterations that must be made. 434 """ 435 self.condition = condition 436 if self.condition is None: 437 self.condition = penalties.SufficientDescentCondition 438 self.output = output 439 self.max_iter = max_iter 440 self.min_iter = min_iter
441
442 - def run(self, function, x, p, rho=0.5, a=1.0, condition_params=dict()):
443 """Finds the step length for a descent algorithm. 444 445 Parameters 446 ---------- 447 function : A Loss function. The function to minimise. 448 449 x : Numpy array. The current point. 450 451 p : Numpy array. The descent direction. 452 453 rho : Float, 0 < rho < 1. The rate at which to decrease a in each 454 iteration. Smaller will finish faster, but may yield a lesser 455 descent. 456 457 a : Float. The upper bound on the step length. Defaults to 1.0, which 458 is suitable for e.g. Newton's method. 459 460 condition_params : Dictionary. Parameters for the descent condition. 461 """ 462 self.check_compatibility(function, self.INTERFACES) 463 464 line_search = self.condition(function, p, **condition_params) 465 it = 0 466 while True: 467 if line_search.feasible((x, a)): 468 # print "Broke after %d iterations of %d iterations." \ 469 # % (it, self.max_iter) 470 return a 471 472 it += 1 473 if it >= self.max_iter: 474 return 0.0 # If we did not find a feasible point, don't move! 475 476 a = a * rho
477 478 479 if __name__ == "__main__": 480 import doctest 481 doctest.testmod() 482