Package parsimony :: Package algorithms :: Module gradient
[hide private]
[frames] | no frames]

Source Code for Module parsimony.algorithms.gradient

  1  # -*- coding: utf-8 -*- 
  2  """ 
  3  The :mod:`parsimony.algorithms.gradient` module includes several algorithms 
  4  that minimises an explicit loss function while utilising the gradient of the 
  5  function. 
  6   
  7  Algorithms may not store states. I.e., if they are classes, do not keep 
  8  references to objects with state in the algorithm objects. It should be 
  9  possible to copy and share algorithms between e.g. estimators, and thus they 
 10  should not depend on any state. 
 11   
 12  Created on Wed Jun  4 15:22:50 2014 
 13   
 14  Copyright (c) 2013-2014, CEA/DSV/I2BM/Neurospin. All rights reserved. 
 15   
 16  @author:  Tommy Löfstedt 
 17  @email:   lofstedt.tommy@gmail.com 
 18  @license: BSD 3-clause. 
 19  """ 
 20  try: 
 21      from . import bases  # Only works when imported as a package. 
 22  except ValueError: 
 23      import parsimony.algorithms.bases as bases  # When run as a program. 
 24  import parsimony.utils as utils 
 25  import parsimony.utils.maths as maths 
 26  import parsimony.utils.consts as consts 
 27  from parsimony.algorithms.utils import Info 
 28  import parsimony.functions.properties as properties 
 29   
 30  __all__ = ["GradientDescent"] 
31 32 33 -class GradientDescent(bases.ExplicitAlgorithm, 34 bases.IterativeAlgorithm, 35 bases.InformationAlgorithm):
36 """The gradient descent algorithm. 37 38 Parameters 39 ---------- 40 eps : Positive float. Tolerance for the stopping criterion. 41 42 info : List or tuple of utils.consts.Info. What, if any, extra run 43 information should be stored. Default is an empty list, which means 44 that no run information is computed nor returned. 45 46 max_iter : Non-negative integer. Maximum allowed number of iterations. 47 Default is 20000. 48 49 min_iter : Non-negative integer less than or equal to max_iter. Minimum 50 number of iterations that must be performed. Default is 1. 51 52 Examples 53 -------- 54 >>> from parsimony.algorithms.gradient import GradientDescent 55 >>> from parsimony.functions.losses import RidgeRegression 56 >>> import numpy as np 57 >>> np.random.seed(42) 58 >>> X = np.random.rand(100, 50) 59 >>> y = np.random.rand(100, 1) 60 >>> gd = GradientDescent(max_iter=10000) 61 >>> function = RidgeRegression(X, y, k=0.0, mean=False) 62 >>> beta1 = gd.run(function, np.random.rand(50, 1)) 63 >>> beta2 = np.dot(np.linalg.pinv(X), y) 64 >>> round(np.linalg.norm(beta1 - beta2), 13) 65 0.0003121557633 66 """ 67 INTERFACES = [properties.Function, 68 properties.Gradient, 69 properties.StepSize] 70 71 INFO_PROVIDED = [Info.ok, 72 Info.num_iter, 73 Info.time, 74 Info.fvalue, 75 Info.converged] 76
77 - def __init__(self, eps=consts.TOLERANCE, 78 info=[], max_iter=20000, min_iter=1):
79 super(GradientDescent, self).__init__(info=info, 80 max_iter=max_iter, 81 min_iter=min_iter) 82 83 self.eps = eps
84 85 @bases.force_reset 86 @bases.check_compatibility
87 - def run(self, function, beta):
88 """Find the minimiser of the given function, starting at beta. 89 90 Parameters 91 ---------- 92 function : Function. The function to minimise. 93 94 beta : Numpy array. The start vector. 95 """ 96 if self.info_requested(Info.ok): 97 self.info_set(Info.ok, False) 98 99 step = function.step(beta) 100 101 betanew = betaold = beta 102 103 if self.info_requested(Info.time): 104 t = [] 105 if self.info_requested(Info.fvalue): 106 f = [] 107 if self.info_requested(Info.converged): 108 self.info_set(Info.converged, False) 109 110 for i in xrange(1, self.max_iter + 1): 111 112 if self.info_requested(Info.time): 113 tm = utils.time_cpu() 114 115 step = function.step(betanew) 116 117 betaold = betanew 118 betanew = betaold - step * function.grad(betaold) 119 120 if self.info_requested(Info.time): 121 t.append(utils.time_cpu() - tm) 122 if self.info_requested(Info.fvalue): 123 f.append(function.f(betanew)) 124 125 if maths.norm(betanew - betaold) < self.eps \ 126 and i >= self.min_iter: 127 128 if self.info_requested(Info.converged): 129 self.info_set(Info.converged, True) 130 131 break 132 133 if self.info_requested(Info.num_iter): 134 self.info_set(Info.num_iter, i) 135 if self.info_requested(Info.time): 136 self.info_set(Info.time, t) 137 if self.info_requested(Info.fvalue): 138 self.info_set(Info.fvalue, f) 139 if self.info_requested(Info.ok): 140 self.info_set(Info.ok, True) 141 142 return betanew
143 144 if __name__ == "__main__": 145 import doctest 146 doctest.testmod() 147