77Paper: http://www.cs.ucla.edu/~weiwang/paper/ICDM12.pdf
88"""
99
10- from random import choice
10+ from __future__ import print_function , absolute_import
1111import numpy as np
1212import scipy .linalg
13- from base_metric import BaseMetricLearner
13+ from random import choice
14+ from six .moves import xrange
15+ from .base_metric import BaseMetricLearner
1416
1517
1618class LSML (BaseMetricLearner ):
@@ -63,14 +65,14 @@ def fit(self, X, constraints, weights=None, prior=None, verbose=False):
6365 s_best = self ._total_loss (self .M , prior_inv )
6466 step_sizes = np .logspace (- 10 , 0 , 10 )
6567 if verbose :
66- print 'initial loss' , s_best
68+ print ( 'initial loss' , s_best )
6769 for it in xrange (1 , self .max_iter + 1 ):
6870 grad = self ._gradient (self .M , prior_inv )
6971 grad_norm = scipy .linalg .norm (grad )
7072 if grad_norm < self .tol :
7173 break
7274 if verbose :
73- print 'gradient norm' , grad_norm
75+ print ( 'gradient norm' , grad_norm )
7476 M_best = None
7577 for step_size in step_sizes :
7678 step_size /= grad_norm
@@ -83,12 +85,12 @@ def fit(self, X, constraints, weights=None, prior=None, verbose=False):
8385 s_best = cur_s
8486 M_best = new_metric
8587 if verbose :
86- print 'iter' , it , 'cost' , s_best , 'best step' , l_best * grad_norm
88+ print ( 'iter' , it , 'cost' , s_best , 'best step' , l_best * grad_norm )
8789 if M_best is None :
8890 break
8991 self .M = M_best
9092 else :
91- print "Didn't converge after %d iterations. Final loss: %f" % ( it , s_best )
93+ print ( "Didn't converge after" , it , " iterations. Final loss:" , s_best )
9294 return self
9395
9496 def _comparison_loss (self , metric ):
0 commit comments