@@ -2816,5 +2816,79 @@ def testSetInitializedWithRestore(self):
28162816 result = sess .run (var ._is_initialized_op )
28172817 self .assertEqual (True , result )
28182818
2819+ def testCountsTensor (self ):
2820+ os .environ ["TF_RECORD_FREQ" ] = "1"
2821+ checkpoint_directory = self .get_temp_dir ()
2822+ ckpt_path = os .path .join (checkpoint_directory , "model.ckpt" )
2823+ with ops .Graph ().as_default () as g , ops .device ('/cpu:0' ):
2824+ var = variable_scope .get_embedding_variable ("var_1" ,
2825+ embedding_dim = 3 )
2826+ sp1 = sparse_tensor .SparseTensor (
2827+ indices = [[0 ,0 ],[1 ,0 ],[2 ,0 ],[3 ,0 ],[4 ,0 ],[5 ,0 ]],
2828+ values = math_ops .cast ([0 ,0 ,0 ,1 ,1 ,2 ], dtypes .int64 ),
2829+ dense_shape = [6 , 1 ])
2830+ sp2 = sparse_tensor .SparseTensor (
2831+ indices = [[0 ,0 ],[1 ,0 ],[2 ,0 ],[3 ,0 ],[4 ,0 ],[5 ,0 ]],
2832+ values = math_ops .cast ([3 ,3 ,3 ,4 ,4 ,1 ], dtypes .int64 ),
2833+ dense_shape = [6 , 1 ])
2834+ emb1 = embedding_ops .embedding_lookup_sparse (var , sp1 , None )
2835+ emb2 = embedding_ops .embedding_lookup_sparse (var , sp2 , None )
2836+ emb = emb1 + emb2
2837+ fun = math_ops .multiply (emb , 2.0 , name = 'multiply' )
2838+ loss = math_ops .reduce_sum (fun , name = 'reduce_sum' )
2839+ gs = training_util .get_or_create_global_step ()
2840+ opt = adagrad_decay .AdagradDecayOptimizer (0.1 , gs )
2841+ g_v = opt .compute_gradients (loss )
2842+ train_op = opt .apply_gradients (g_v )
2843+ saver = saver_module .Saver ()
2844+ init = variables .global_variables_initializer ()
2845+ with self .test_session (graph = g ) as sess :
2846+ sess .run ([init ])
2847+ sess .run (train_op )
2848+ saver .save (sess , ckpt_path )
2849+
2850+ for name , shape in checkpoint_utils .list_variables (ckpt_path ):
2851+ if name == "var_1-freqs" :
2852+ value = checkpoint_utils .load_variable (ckpt_path , name )
2853+ self .assertAllEqual (value , [3 , 3 , 1 , 3 , 2 ])
2854+
2855+ def testCountsTensorWithGradientDescent (self ):
2856+ os .environ ["TF_RECORD_FREQ" ] = "1"
2857+ checkpoint_directory = self .get_temp_dir ()
2858+ ckpt_path = os .path .join (checkpoint_directory , "model.ckpt" )
2859+ with ops .Graph ().as_default () as g , ops .device ('/cpu:0' ):
2860+ var = variable_scope .get_embedding_variable ("var_1" ,
2861+ embedding_dim = 3 )
2862+ sp1 = sparse_tensor .SparseTensor (
2863+ indices = [[0 ,0 ],[1 ,0 ],[2 ,0 ],[3 ,0 ],[4 ,0 ],[5 ,0 ]],
2864+ values = math_ops .cast ([0 ,0 ,0 ,1 ,1 ,2 ], dtypes .int64 ),
2865+ dense_shape = [6 , 1 ])
2866+ sp2 = sparse_tensor .SparseTensor (
2867+ indices = [[0 ,0 ],[1 ,0 ],[2 ,0 ],[3 ,0 ],[4 ,0 ],[5 ,0 ]],
2868+ values = math_ops .cast ([3 ,3 ,3 ,4 ,4 ,1 ], dtypes .int64 ),
2869+ dense_shape = [6 , 1 ])
2870+ emb1 = embedding_ops .embedding_lookup_sparse (var , sp1 , None )
2871+ emb2 = embedding_ops .embedding_lookup_sparse (var , sp2 , None )
2872+ emb = emb1 + emb2
2873+ fun = math_ops .multiply (emb , 2.0 , name = 'multiply' )
2874+ loss = math_ops .reduce_sum (fun , name = 'reduce_sum' )
2875+ gs = training_util .get_or_create_global_step ()
2876+ opt = gradient_descent .GradientDescentOptimizer (0.1 )
2877+ g_v = opt .compute_gradients (loss )
2878+ train_op = opt .apply_gradients (g_v )
2879+ saver = saver_module .Saver ()
2880+ init = variables .global_variables_initializer ()
2881+ with self .test_session (graph = g ) as sess :
2882+ sess .run ([init ])
2883+ sess .run (train_op )
2884+ saver .save (sess , ckpt_path )
2885+
2886+ for name , shape in checkpoint_utils .list_variables (ckpt_path ):
2887+ if name == "var_1-freqs" :
2888+ value = checkpoint_utils .load_variable (ckpt_path , name )
2889+ self .assertAllEqual (value , [3 , 3 , 1 , 3 , 2 ])
2890+
2891+ del os .environ ["TF_RECORD_FREQ" ]
2892+
28192893if __name__ == "__main__" :
28202894 googletest .main ()
0 commit comments