@@ -348,9 +348,9 @@ def test_no_targets_no_thresholds():
348348 sc ._no_targets_no_thresholds (metrics , returns , h2o_model = True )
349349 assert (
350350 sc .score_code == f"{ '' :4} Classification = prediction[1][0]\n "
351- f"{ '' :4} Proba_A = prediction[1][1]\n "
352- f"{ '' :4} Proba_B = prediction[1][2]\n "
353- f"{ '' :4} Proba_C = prediction[1][3]\n \n "
351+ f"{ '' :4} Proba_A = float( prediction[1][1]) \n "
352+ f"{ '' :4} Proba_B = float( prediction[1][2]) \n "
353+ f"{ '' :4} Proba_C = float( prediction[1][3]) \n \n "
354354 f"{ '' :4} return Classification, Proba_A, Proba_B, Proba_C"
355355 )
356356
@@ -441,7 +441,7 @@ def test_binary_target():
441441 metrics = ["Classification" , "Probability" ]
442442 with pytest .warns ():
443443 sc ._binary_target (metrics , ["A" , "B" ], [], threshold = 0.7 , h2o_model = True )
444- assert sc .score_code .endswith ("return prediction[1][0], prediction[1][2]" )
444+ assert sc .score_code .endswith ("return prediction[1][0], float( prediction[1][2]) " )
445445 sc .score_code = ""
446446
447447 with pytest .warns ():
@@ -478,7 +478,7 @@ def test_binary_target():
478478 metrics = ["C" , "P1" , "P2" ]
479479 sc ._binary_target (metrics , ["A" , "B" ], [1 , 2 , "3" ], h2o_model = True )
480480 assert sc .score_code .endswith (
481- "return prediction[1][0], prediction[1][1], " " prediction[1][2]"
481+ "return prediction[1][0], float( prediction[1][1]), float( prediction[1][2]) "
482482 )
483483 sc .score_code = ""
484484
0 commit comments