@@ -185,16 +185,19 @@ def test_patch_on_get_available_versions_for_packages(session):
185185 reason = "numpy and pandas are required" ,
186186)
187187def test_add_packages (session , local_testing_mode ):
188+ # Use numpy 2.3.1 for Python 3.13+, numpy 1.26.3 doesn't support Python 3.13
189+ numpy_version = "numpy==2.3.1" if sys .version_info >= (3 , 13 ) else "numpy==1.26.3"
190+
188191 session .add_packages (
189192 [
190- "numpy==1.26.3" ,
193+ numpy_version ,
191194 "pandas==2.2.3" ,
192195 "matplotlib" ,
193196 "pyyaml" ,
194197 ]
195198 )
196199 assert session .get_packages () == {
197- "numpy" : "numpy==1.26.3" ,
200+ "numpy" : numpy_version ,
198201 "pandas" : "pandas==2.2.3" ,
199202 "matplotlib" : "matplotlib" ,
200203 "pyyaml" : "pyyaml" ,
@@ -210,8 +213,9 @@ def get_numpy_pandas_dateutil_version() -> str:
210213 df = session .create_dataframe ([None ]).to_df ("a" )
211214 res = df .select (call_udf (udf_name )).collect ()[0 ][0 ]
212215 # don't need to check the version of dateutil, as it can be changed on the server side
216+ expected_numpy_ver = "2.3.1" if sys .version_info >= (3 , 13 ) else "1.26.3"
213217 assert (
214- res .startswith ("1.26.3 /2.2.3" )
218+ res .startswith (f" { expected_numpy_ver } /2.2.3" )
215219 if not local_testing_mode
216220 else res == get_numpy_pandas_dateutil_version ()
217221 )
@@ -284,7 +288,7 @@ def extract_major_minor_patch(version_string):
284288
285289@pytest .mark .udf
286290def test_add_packages_with_underscore (session ):
287- packages = ["spacy-model-en_core_web_sm " , "typing_extensions" ]
291+ packages = ["huggingface_hub " , "typing_extensions" ]
288292 count = (
289293 session .table ("information_schema.packages" )
290294 .where (col ("package_name" ).in_ (packages ))
@@ -299,10 +303,9 @@ def test_add_packages_with_underscore(session):
299303 @udf (name = udf_name , packages = packages )
300304 def check_if_package_installed () -> bool :
301305 try :
302- import spacy
306+ import huggingface_hub # noqa: F401
303307 import typing_extensions # noqa: F401
304308
305- spacy .load ("en_core_web_sm" )
306309 return True
307310 except Exception :
308311 return False
@@ -417,7 +420,7 @@ def test_add_requirements(session, resources_path, local_testing_mode):
417420
418421 session .add_requirements (test_files .test_requirements_file )
419422 assert session .get_packages () == {
420- "numpy" : "numpy==1.26.3" ,
423+ "numpy" : "numpy==2.3.1" if sys . version_info >= ( 3 , 13 ) else "numpy== 1.26.3" ,
421424 "pandas" : "pandas==2.2.3" ,
422425 }
423426
@@ -429,9 +432,10 @@ def get_numpy_pandas_version() -> str:
429432
430433 df = session .create_dataframe ([None ]).to_df ("a" )
431434 res = df .select (call_udf (udf_name ))
435+ expected_numpy_ver = "2.3.1" if sys .version_info >= (3 , 13 ) else "1.26.3"
432436 Utils .check_answer (
433437 res ,
434- [Row ("1.26.3 /2.2.3" )]
438+ [Row (f" { expected_numpy_ver } /2.2.3" )]
435439 if not local_testing_mode
436440 else [Row (f"{ numpy .__version__ } /{ pandas .__version__ } " )],
437441 )
@@ -690,7 +694,7 @@ def test_add_packages_with_native_dependency_without_force_push(session):
690694 with pytest .raises (
691695 RuntimeError , match = "Your code depends on packages that contain native code"
692696 ):
693- session .add_packages (["catboost==1.2.3 " ])
697+ session .add_packages (["catboost==1.2.8 " ])
694698
695699
696700@pytest .fixture (scope = "function" )
@@ -911,29 +915,30 @@ def test_add_requirements_with_empty_stage_as_cache_path(
911915 }
912916
913917 session .add_requirements (test_files .test_requirements_file )
918+ expected_numpy_ver = "2.3.1" if sys .version_info >= (3 , 13 ) else "1.26.3"
914919 assert session .get_packages () == {
915- "numpy" : "numpy==1.26.3 " ,
920+ "numpy" : f "numpy=={ expected_numpy_ver } " ,
916921 "pandas" : "pandas==2.2.3" ,
917922 }
918923
919924 udf_name = Utils .random_name_for_temp_object (TempObjectType .FUNCTION )
920925
921926 # use a newer snowpark to create an old snowpark udf could lead to conflict cloudpickle.
922- # e.g. using snowpark 1.27 with cloudpickle 3.0 to create udf using snowpark 1.8, this will leads to
927+ # e.g. using snowpark 1.39 with cloudpickle 3.0 to create udf using snowpark 1.8, this will leads to
923928 # error as cloudpickle 3.0 is specified in udf creation but unsupported in snowpark 1.8
924929 # the solution is to downgrade to cloudpickle 2.2.1 in the env
925930 # TODO: SNOW-1951792, improve error experience
926- # pin cloudpickle as 1.27 .0 snowpark upper bounds it to <=3.0.0
931+ # pin cloudpickle as 1.39 .0 snowpark upper bounds it to <=3.0.0
927932 @udf (
928933 name = udf_name ,
929- packages = ["snowflake-snowpark-python==1.27 .0" , "cloudpickle==3.0.0" ],
934+ packages = ["snowflake-snowpark-python==1.39 .0" , "cloudpickle==3.0.0" ],
930935 )
931936 def get_numpy_pandas_version () -> str :
932937 import snowflake .snowpark as snowpark
933938
934939 return f"{ snowpark .__version__ } "
935940
936- Utils .check_answer (session .sql (f"select { udf_name } ()" ), [Row ("1.27 .0" )])
941+ Utils .check_answer (session .sql (f"select { udf_name } ()" ), [Row ("1.39 .0" )])
937942
938943
939944@pytest .mark .udf
0 commit comments