Package parsimony :: Package algorithms :: Module primaldual
[hide private]
[frames] | no frames]

Source Code for Module parsimony.algorithms.primaldual

  1  # -*- coding: utf-8 -*- 
  2  """ 
  3  The :mod:`parsimony.algorithms.primaldual` module includes algorithms that 
  4  exploit primal-dual techniques to minimises an explicitly given loss function. 
  5   
  6  Algorithms may not store states. I.e., if they are classes, do not keep 
  7  references to objects with state in the algorithm objects. It should be 
  8  possible to copy and share algorithms between e.g. estimators, and thus they 
  9  should not depend on any state. 
 10   
 11  Created on Wed Jun  4 15:34:42 2014 
 12   
 13  Copyright (c) 2013-2014, CEA/DSV/I2BM/Neurospin. All rights reserved. 
 14   
 15  @author:  Tommy Löfstedt 
 16  @email:   lofstedt.tommy@gmail.com 
 17  @license: BSD 3-clause. 
 18  """ 
 19  import numpy as np 
 20   
 21  try: 
 22      from . import bases  # Only works when imported as a package. 
 23  except ValueError: 
 24      import parsimony.algorithms.bases as bases  # When run as a program. 
 25  import parsimony.utils as utils 
 26  import parsimony.utils.consts as consts 
 27  from parsimony.algorithms.utils import Info 
 28  import parsimony.functions.properties as properties 
 29   
 30  __all__ = ["ExcessiveGapMethod"] 
31 32 33 -class ExcessiveGapMethod(bases.ExplicitAlgorithm, 34 bases.IterativeAlgorithm, 35 bases.InformationAlgorithm):
36 """Nesterov's excessive gap method for strongly convex functions. 37 38 Parameters 39 ---------- 40 output : Boolean. Whether or not to return extra output information. If 41 output is True, running the algorithm will return a tuple with two 42 elements. The first element is the found regression vector, and the 43 second is the extra output information. 44 45 eps : Positive float. Tolerance for the stopping criterion. 46 47 info : List or tuple of utils.consts.Info. What, if any, extra run 48 information should be stored. Default is an empty list, which means 49 that no run information is computed nor returned. 50 51 max_iter : Non-negative integer. Maximum allowed number of iterations. 52 53 min_iter : Non-negative integer less than or equal to max_iter. Minimum 54 number of iterations that must be performed. Default is 1. 55 """ 56 INTERFACES = [properties.NesterovFunction, 57 properties.GradientMap, 58 properties.DualFunction, 59 properties.StronglyConvex] 60 61 INFO_PROVIDED = [Info.ok, 62 Info.converged, 63 Info.num_iter, 64 Info.time, 65 Info.fvalue, 66 Info.mu, 67 Info.bound, 68 Info.gap, 69 Info.beta] 70
71 - def __init__(self, eps=consts.TOLERANCE, 72 info=[], max_iter=10000, min_iter=1, 73 simulation=False):
74 75 super(ExcessiveGapMethod, self).__init__(info=info, 76 max_iter=max_iter, 77 min_iter=min_iter) 78 79 self.eps = max(consts.FLOAT_EPSILON, float(eps)) 80 self.simulation = bool(simulation)
81 82 @bases.force_reset 83 @bases.check_compatibility
84 - def run(self, function, beta=None):
85 """The excessive gap method for strongly convex functions. 86 87 Parameters 88 ---------- 89 function : The function to minimise. It contains two parts, function.g 90 is the strongly convex part and function.h is the smoothed part 91 of the function. 92 93 beta : Numpy array. A start vector. This is normally not given, but 94 left None, since the start vector is computed by the algorithm. 95 """ 96 if self.info_requested(Info.ok): 97 self.info_set(Info.ok, False) 98 99 A = function.A() 100 101 u = [0] * len(A) 102 for i in xrange(len(A)): 103 u[i] = np.zeros((A[i].shape[0], 1)) 104 105 # L = lambda_max(A'A) / (lambda_min(X'X) + k) 106 L = function.L() 107 if L < consts.TOLERANCE: 108 L = consts.TOLERANCE 109 mu = [2.0 * L] 110 # print "[EGM] mu0: ", mu[0] 111 # print "[EGM] M: ", function.M() 112 function.set_mu(mu) 113 if beta is not None: 114 beta0 = beta 115 else: 116 beta0 = function.betahat(u) # u is zero here 117 beta = beta0 118 alpha = function.V(u, beta, L) # u is zero here 119 120 if self.info_requested(Info.time): 121 t = [] 122 if self.info_requested(Info.fvalue): 123 f = [] 124 if self.info_requested(Info.bound): 125 bound = [] 126 if self.info_requested(Info.gap): 127 gap = [] 128 if self.info_requested(Info.converged): 129 self.info_set(Info.converged, False) 130 131 k = 0 132 while True: 133 if self.info_requested(Info.time): 134 tm = utils.time_cpu() 135 136 tau = 2.0 / (float(k) + 3.0) 137 138 function.set_mu(mu[k]) 139 alpha_hat = function.alpha(beta) 140 for i in xrange(len(alpha_hat)): 141 u[i] = (1.0 - tau) * alpha[i] + tau * alpha_hat[i] 142 143 mu.append((1.0 - tau) * mu[k]) 144 betahat = function.betahat(u) 145 beta = (1.0 - tau) * beta + tau * betahat 146 alpha = function.V(u, betahat, L) 147 148 # Gamma = mu[k + 1] * function.M() 149 Gamma = (4.0 * function.M() * mu[0]) / ((k + 1.0) * (k + 2.0)) 150 151 if self.info_requested(Info.time): 152 t.append(utils.time_cpu() - tm) 153 if self.info_requested(Info.fvalue): 154 mu_old = function.get_mu() 155 function.set_mu(0.0) 156 f.append(function.f(beta)) 157 function.set_mu(mu_old) 158 if self.info_requested(Info.bound): 159 # bound.append(2.0 * function.M() * mu[0] \ 160 # / ((float(k) + 1.0) * (float(k) + 2.0))) 161 # bound[-1] += function.phi(alpha, beta) 162 # bound.append(function.phi(alpha, beta)) 163 bound.append(Gamma + function.phi(alpha, beta)) 164 if self.info_requested(Info.gap): 165 gap.append(Gamma) 166 167 if not self.simulation: 168 if Gamma < self.eps and k >= self.min_iter - 1: 169 170 if self.info_requested(Info.converged): 171 self.info_set(Info.converged, True) 172 173 break 174 175 if k >= self.max_iter - 1 and k >= self.min_iter - 1: 176 break 177 178 k = k + 1 179 180 if self.info_requested(Info.num_iter): 181 self.info_set(Info.num_iter, k + 1) 182 if self.info_requested(Info.time): 183 self.info_set(Info.time, t) 184 if self.info_requested(Info.fvalue): 185 self.info_set(Info.fvalue, f) 186 if self.info_requested(Info.mu): 187 self.info_set(Info.mu, mu) 188 if self.info_requested(Info.bound): 189 self.info_set(Info.bound, bound) 190 if self.info_requested(Info.gap): 191 self.info_set(Info.gap, gap) 192 if self.info_requested(Info.beta): 193 self.info_set(Info.beta, beta0) 194 if self.info_requested(Info.ok): 195 self.info_set(Info.ok, True) 196 197 return beta
198 199 if __name__ == "__main__": 200 import doctest 201 doctest.testmod() 202