From c46bd47e733c091b1cfe975c6851c25c9113385e Mon Sep 17 00:00:00 2001 From: Flavio Sales Truzzi Date: Mon, 15 Sep 2025 10:25:54 -0700 Subject: [PATCH] - Clean torch.check (#4871) Summary: torch._check expects a string, however this was passing a lambda function that returned a string, which was causing a issues when compiling the code with PT2. Just cleaned it up to use f-string. Reviewed By: spcyppt Differential Revision: D82347053 --- fbgemm_gpu/fbgemm_gpu/quantize_comm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fbgemm_gpu/fbgemm_gpu/quantize_comm.py b/fbgemm_gpu/fbgemm_gpu/quantize_comm.py index 7db84e4bf6..4a786891b7 100644 --- a/fbgemm_gpu/fbgemm_gpu/quantize_comm.py +++ b/fbgemm_gpu/fbgemm_gpu/quantize_comm.py @@ -230,7 +230,7 @@ def calc_quantized_size( ctx = none_throws(ctx) torch._check( input_len % ctx.row_dim == 0, - lambda: f"input_len {input_len} is not a multiple of row dim {ctx.row_dim}", + f"input_len {input_len} is not a multiple of row dim {ctx.row_dim}", ) assert input_len % ctx.row_dim == 0, ( f"input_len {input_len} is not a multiple of row dim {ctx.row_dim} "