11from chispa .dataframe_comparer import assert_df_equality # type: ignore
22from databricks .labs .dqx .geo .check_funcs import (
3- has_area_equal_to ,
4- has_area_not_equal_to ,
5- has_area_greater_than ,
6- has_area_less_than ,
7- has_num_points_equal_to ,
8- has_num_points_not_equal_to ,
9- has_num_points_greater_than ,
10- has_num_points_less_than ,
3+ is_area_equal_to ,
4+ is_area_not_equal_to ,
5+ is_area_not_less_than ,
6+ is_area_not_greater_than ,
7+ is_num_points_equal_to ,
8+ is_num_points_not_equal_to ,
9+ is_num_points_not_less_than ,
10+ is_num_points_not_greater_than ,
1111 has_dimension ,
1212 has_x_coordinate_between ,
1313 has_y_coordinate_between ,
@@ -408,7 +408,7 @@ def test_has_y_coordinate_between(skip_if_runtime_not_geo_compatible, spark):
408408 assert_df_equality (actual , expected , ignore_nullable = True )
409409
410410
411- def test_has_area_equal_to (skip_if_runtime_not_geo_compatible , spark ):
411+ def test_is_area_equal_to (skip_if_runtime_not_geo_compatible , spark ):
412412 test_df = spark .sql (
413413 """
414414 SELECT geom, geog FROM VALUES
@@ -422,9 +422,9 @@ def test_has_area_equal_to(skip_if_runtime_not_geo_compatible, spark):
422422 )
423423
424424 actual = test_df .select (
425- has_area_equal_to ("geom" , 0.0 ).alias ("basic_geometry" ),
426- has_area_equal_to ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
427- has_area_equal_to ("geog" , 0.0 , geodesic = True ).alias ("geography_geodesic" ),
425+ is_area_equal_to ("geom" , 0.0 ).alias ("basic_geometry" ),
426+ is_area_equal_to ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
427+ is_area_equal_to ("geog" , 0.0 , geodesic = True ).alias ("geography_geodesic" ),
428428 )
429429
430430 checked_schema = "basic_geometry: string, geometry_srid: string, geography_geodesic: string"
@@ -454,7 +454,7 @@ def test_has_area_equal_to(skip_if_runtime_not_geo_compatible, spark):
454454 assert_df_equality (actual , expected , ignore_nullable = True )
455455
456456
457- def test_has_area_not_equal_to (skip_if_runtime_not_geo_compatible , spark ):
457+ def test_is_area_not_equal_to (skip_if_runtime_not_geo_compatible , spark ):
458458 test_df = spark .sql (
459459 """
460460 SELECT geom, geog FROM VALUES
@@ -468,9 +468,9 @@ def test_has_area_not_equal_to(skip_if_runtime_not_geo_compatible, spark):
468468 )
469469
470470 actual = test_df .select (
471- has_area_not_equal_to ("geom" , 0.0 ).alias ("basic_geometry" ),
472- has_area_not_equal_to ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
473- has_area_not_equal_to ("geog" , 0.0 , geodesic = True ).alias ("geography_geodesic" ),
471+ is_area_not_equal_to ("geom" , 0.0 ).alias ("basic_geometry" ),
472+ is_area_not_equal_to ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
473+ is_area_not_equal_to ("geog" , 0.0 , geodesic = True ).alias ("geography_geodesic" ),
474474 )
475475
476476 checked_schema = "basic_geometry: string, geometry_srid: string, geography_geodesic: string"
@@ -504,7 +504,7 @@ def test_has_area_not_equal_to(skip_if_runtime_not_geo_compatible, spark):
504504 assert_df_equality (actual , expected , ignore_nullable = True )
505505
506506
507- def test_has_area_less_than (skip_if_runtime_not_geo_compatible , spark ):
507+ def test_is_area_not_greater_than (skip_if_runtime_not_geo_compatible , spark ):
508508 test_df = spark .sql (
509509 """
510510 SELECT geom, geog FROM VALUES
@@ -518,9 +518,9 @@ def test_has_area_less_than(skip_if_runtime_not_geo_compatible, spark):
518518 )
519519
520520 actual = test_df .select (
521- has_area_less_than ("geom" , 20000.0 ).alias ("basic_geometry" ),
522- has_area_less_than ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
523- has_area_less_than ("geog" , 1000.0 , geodesic = True ).alias ("geography_geodesic" ),
521+ is_area_not_greater_than ("geom" , 20000.0 ).alias ("basic_geometry" ),
522+ is_area_not_greater_than ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
523+ is_area_not_greater_than ("geog" , 1000.0 , geodesic = True ).alias ("geography_geodesic" ),
524524 )
525525
526526 checked_schema = "basic_geometry: string, geometry_srid: string, geography_geodesic: string"
@@ -550,7 +550,7 @@ def test_has_area_less_than(skip_if_runtime_not_geo_compatible, spark):
550550 assert_df_equality (actual , expected , ignore_nullable = True )
551551
552552
553- def test_has_area_greater_than (skip_if_runtime_not_geo_compatible , spark ):
553+ def test_is_area_not_less_than (skip_if_runtime_not_geo_compatible , spark ):
554554 test_df = spark .sql (
555555 """
556556 SELECT geom, geog FROM VALUES
@@ -564,9 +564,9 @@ def test_has_area_greater_than(skip_if_runtime_not_geo_compatible, spark):
564564 )
565565
566566 actual = test_df .select (
567- has_area_greater_than ("geom" , 20000.0 ).alias ("basic_geometry" ),
568- has_area_greater_than ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
569- has_area_greater_than ("geog" , 20000.0 , geodesic = True ).alias ("geography_geodesic" ),
567+ is_area_not_less_than ("geom" , 20000.0 ).alias ("basic_geometry" ),
568+ is_area_not_less_than ("geom" , 1.0 , srid = 4326 ).alias ("geometry_srid" ),
569+ is_area_not_less_than ("geog" , 20000.0 , geodesic = True ).alias ("geography_geodesic" ),
570570 )
571571
572572 checked_schema = "basic_geometry: string, geometry_srid: string, geography_geodesic: string"
@@ -600,7 +600,7 @@ def test_has_area_greater_than(skip_if_runtime_not_geo_compatible, spark):
600600 assert_df_equality (actual , expected , ignore_nullable = True )
601601
602602
603- def test_has_num_points_equal_to (skip_if_runtime_not_geo_compatible , spark ):
603+ def test_is_num_points_equal_to (skip_if_runtime_not_geo_compatible , spark ):
604604 test_df = spark .sql (
605605 """
606606 SELECT geom FROM VALUES
@@ -614,7 +614,7 @@ def test_has_num_points_equal_to(skip_if_runtime_not_geo_compatible, spark):
614614 """
615615 )
616616
617- actual = test_df .select (has_num_points_equal_to ("geom" , 5 ))
617+ actual = test_df .select (is_num_points_equal_to ("geom" , 5 ))
618618
619619 checked_schema = "geom_num_points_not_equal_to_limit: string"
620620 expected = spark .createDataFrame (
@@ -632,7 +632,7 @@ def test_has_num_points_equal_to(skip_if_runtime_not_geo_compatible, spark):
632632 assert_df_equality (actual , expected , ignore_nullable = True )
633633
634634
635- def test_has_num_points_not_equal_to (skip_if_runtime_not_geo_compatible , spark ):
635+ def test_is_num_points_not_equal_to (skip_if_runtime_not_geo_compatible , spark ):
636636 test_df = spark .sql (
637637 """
638638 SELECT geom FROM VALUES
@@ -646,7 +646,7 @@ def test_has_num_points_not_equal_to(skip_if_runtime_not_geo_compatible, spark):
646646 """
647647 )
648648
649- actual = test_df .select (has_num_points_not_equal_to ("geom" , 1 ))
649+ actual = test_df .select (is_num_points_not_equal_to ("geom" , 1 ))
650650
651651 checked_schema = "geom_num_points_equal_to_limit: string"
652652 expected = spark .createDataFrame (
@@ -664,7 +664,7 @@ def test_has_num_points_not_equal_to(skip_if_runtime_not_geo_compatible, spark):
664664 assert_df_equality (actual , expected , ignore_nullable = True )
665665
666666
667- def test_has_num_points_less_than (skip_if_runtime_not_geo_compatible , spark ):
667+ def test_is_num_points_not_greater_than (skip_if_runtime_not_geo_compatible , spark ):
668668 test_df = spark .sql (
669669 """
670670 SELECT geom FROM VALUES
@@ -678,7 +678,7 @@ def test_has_num_points_less_than(skip_if_runtime_not_geo_compatible, spark):
678678 """
679679 )
680680
681- actual = test_df .select (has_num_points_less_than ("geom" , 3 ))
681+ actual = test_df .select (is_num_points_not_greater_than ("geom" , 3 ))
682682
683683 checked_schema = "geom_num_points_greater_than_limit: string"
684684 expected = spark .createDataFrame (
@@ -698,7 +698,7 @@ def test_has_num_points_less_than(skip_if_runtime_not_geo_compatible, spark):
698698 assert_df_equality (actual , expected , ignore_nullable = True )
699699
700700
701- def test_has_num_points_greater_than (skip_if_runtime_not_geo_compatible , spark ):
701+ def test_is_num_points_not_less_than (skip_if_runtime_not_geo_compatible , spark ):
702702 test_df = spark .sql (
703703 """
704704 SELECT geom FROM VALUES
@@ -712,7 +712,7 @@ def test_has_num_points_greater_than(skip_if_runtime_not_geo_compatible, spark):
712712 """
713713 )
714714
715- actual = test_df .select (has_num_points_greater_than ("geom" , 3 ))
715+ actual = test_df .select (is_num_points_not_less_than ("geom" , 3 ))
716716
717717 checked_schema = "geom_num_points_less_than_limit: string"
718718 expected = spark .createDataFrame (
0 commit comments