From 8cfe1f4840bb4767c988b344ddfab593c010aa8d Mon Sep 17 00:00:00 2001 From: Alexis Mignon Date: Tue, 23 Jun 2015 16:52:58 +0200 Subject: [PATCH] Corrected a bug leading to infinite loops in the presence of a flat objective function --- pylearn2/optimization/batch_gradient_descent.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pylearn2/optimization/batch_gradient_descent.py b/pylearn2/optimization/batch_gradient_descent.py index 8a74634f84..cf2f91bee4 100644 --- a/pylearn2/optimization/batch_gradient_descent.py +++ b/pylearn2/optimization/batch_gradient_descent.py @@ -350,9 +350,10 @@ def minimize(self, * inputs): if self.verbose: logger.info('\t{0} {1}'.format(alpha, obj)) - # Use <= rather than = so if there are ties - # the bigger step size wins - if obj <= best_obj: + # Should not use <= instead of < because, for a flat + # objective, this leads to an infinite loop due to the + # condition used to grow the step sizes + if obj < best_obj: best_obj = obj best_alpha = alpha best_alpha_ind = ind @@ -374,7 +375,8 @@ def minimize(self, * inputs): alpha_list = [alpha / 3. for alpha in alpha_list] if self.verbose: logger.info('shrinking the step size') - elif best_alpha_ind > len(alpha_list) - 2: + elif best_alpha_ind >= len(alpha_list) - 1: + # Grow the step size if the last step size was used alpha_list = [alpha * 2. for alpha in alpha_list] if self.verbose: logger.info('growing the step size')