1
2 """
3 The :mod:`parsimony.algorithms.gradient` module includes several algorithms
4 that minimises an explicit loss function while utilising the gradient of the
5 function.
6
7 Algorithms may not store states. I.e., if they are classes, do not keep
8 references to objects with state in the algorithm objects. It should be
9 possible to copy and share algorithms between e.g. estimators, and thus they
10 should not depend on any state.
11
12 Created on Wed Jun 4 15:22:50 2014
13
14 Copyright (c) 2013-2014, CEA/DSV/I2BM/Neurospin. All rights reserved.
15
16 @author: Tommy Löfstedt
17 @email: lofstedt.tommy@gmail.com
18 @license: BSD 3-clause.
19 """
20 try:
21 from . import bases
22 except ValueError:
23 import parsimony.algorithms.bases as bases
24 import parsimony.utils as utils
25 import parsimony.utils.maths as maths
26 import parsimony.utils.consts as consts
27 from parsimony.algorithms.utils import Info
28 import parsimony.functions.properties as properties
29
30 __all__ = ["GradientDescent"]
31
32
33 -class GradientDescent(bases.ExplicitAlgorithm,
34 bases.IterativeAlgorithm,
35 bases.InformationAlgorithm):
36 """The gradient descent algorithm.
37
38 Parameters
39 ----------
40 eps : Positive float. Tolerance for the stopping criterion.
41
42 info : List or tuple of utils.consts.Info. What, if any, extra run
43 information should be stored. Default is an empty list, which means
44 that no run information is computed nor returned.
45
46 max_iter : Non-negative integer. Maximum allowed number of iterations.
47 Default is 20000.
48
49 min_iter : Non-negative integer less than or equal to max_iter. Minimum
50 number of iterations that must be performed. Default is 1.
51
52 Examples
53 --------
54 >>> from parsimony.algorithms.gradient import GradientDescent
55 >>> from parsimony.functions.losses import RidgeRegression
56 >>> import numpy as np
57 >>> np.random.seed(42)
58 >>> X = np.random.rand(100, 50)
59 >>> y = np.random.rand(100, 1)
60 >>> gd = GradientDescent(max_iter=10000)
61 >>> function = RidgeRegression(X, y, k=0.0, mean=False)
62 >>> beta1 = gd.run(function, np.random.rand(50, 1))
63 >>> beta2 = np.dot(np.linalg.pinv(X), y)
64 >>> round(np.linalg.norm(beta1 - beta2), 13)
65 0.0003121557633
66 """
67 INTERFACES = [properties.Function,
68 properties.Gradient,
69 properties.StepSize]
70
71 INFO_PROVIDED = [Info.ok,
72 Info.num_iter,
73 Info.time,
74 Info.fvalue,
75 Info.converged]
76
79 super(GradientDescent, self).__init__(info=info,
80 max_iter=max_iter,
81 min_iter=min_iter)
82
83 self.eps = eps
84
85 @bases.force_reset
86 @bases.check_compatibility
87 - def run(self, function, beta):
143
144 if __name__ == "__main__":
145 import doctest
146 doctest.testmod()
147