@@ -2620,11 +2620,18 @@ def center_crop_bounding_boxes(
26202620 format : tv_tensors .BoundingBoxFormat ,
26212621 canvas_size : tuple [int , int ],
26222622 output_size : list [int ],
2623+ clamping_mode : CLAMPING_MODE_TYPE = "soft" ,
26232624) -> tuple [torch .Tensor , tuple [int , int ]]:
26242625 crop_height , crop_width = _center_crop_parse_output_size (output_size )
26252626 crop_top , crop_left = _center_crop_compute_crop_anchor (crop_height , crop_width , * canvas_size )
26262627 return crop_bounding_boxes (
2627- bounding_boxes , format , top = crop_top , left = crop_left , height = crop_height , width = crop_width
2628+ bounding_boxes ,
2629+ format ,
2630+ top = crop_top ,
2631+ left = crop_left ,
2632+ height = crop_height ,
2633+ width = crop_width ,
2634+ clamping_mode = clamping_mode ,
26282635 )
26292636
26302637
@@ -2633,7 +2640,11 @@ def _center_crop_bounding_boxes_dispatch(
26332640 inpt : tv_tensors .BoundingBoxes , output_size : list [int ]
26342641) -> tv_tensors .BoundingBoxes :
26352642 output , canvas_size = center_crop_bounding_boxes (
2636- inpt .as_subclass (torch .Tensor ), format = inpt .format , canvas_size = inpt .canvas_size , output_size = output_size
2643+ inpt .as_subclass (torch .Tensor ),
2644+ format = inpt .format ,
2645+ canvas_size = inpt .canvas_size ,
2646+ output_size = output_size ,
2647+ clamping_mode = inpt .clamping_mode ,
26372648 )
26382649 return tv_tensors .wrap (output , like = inpt , canvas_size = canvas_size )
26392650
@@ -2780,17 +2791,29 @@ def resized_crop_bounding_boxes(
27802791 height : int ,
27812792 width : int ,
27822793 size : list [int ],
2794+ clamping_mode : CLAMPING_MODE_TYPE = "soft" ,
27832795) -> tuple [torch .Tensor , tuple [int , int ]]:
2784- bounding_boxes , canvas_size = crop_bounding_boxes (bounding_boxes , format , top , left , height , width )
2785- return resize_bounding_boxes (bounding_boxes , format = format , canvas_size = canvas_size , size = size )
2796+ bounding_boxes , canvas_size = crop_bounding_boxes (
2797+ bounding_boxes , format , top , left , height , width , clamping_mode = clamping_mode
2798+ )
2799+ return resize_bounding_boxes (
2800+ bounding_boxes , format = format , canvas_size = canvas_size , size = size , clamping_mode = clamping_mode
2801+ )
27862802
27872803
27882804@_register_kernel_internal (resized_crop , tv_tensors .BoundingBoxes , tv_tensor_wrapper = False )
27892805def _resized_crop_bounding_boxes_dispatch (
27902806 inpt : tv_tensors .BoundingBoxes , top : int , left : int , height : int , width : int , size : list [int ], ** kwargs
27912807) -> tv_tensors .BoundingBoxes :
27922808 output , canvas_size = resized_crop_bounding_boxes (
2793- inpt .as_subclass (torch .Tensor ), format = inpt .format , top = top , left = left , height = height , width = width , size = size
2809+ inpt .as_subclass (torch .Tensor ),
2810+ format = inpt .format ,
2811+ top = top ,
2812+ left = left ,
2813+ height = height ,
2814+ width = width ,
2815+ size = size ,
2816+ clamping_mode = inpt .clamping_mode ,
27942817 )
27952818 return tv_tensors .wrap (output , like = inpt , canvas_size = canvas_size )
27962819
0 commit comments