-
Notifications
You must be signed in to change notification settings - Fork 566
Open
Labels
Description
❓ Correct behavior of torch.ops.xla.write_mlir_debuginfo
What is the correct behavior of torch.ops.xla.write_mlir_debuginfo
? Seems it adds debug info all upstream operations not just a direct upstream op. Is it expected behavior?
import torch
import torch_xla
import torch_xla.experimental.xla_mlir_debuginfo
from torch_xla.stablehlo import (StableHLOExportOptions,
exported_program_to_stablehlo)
class SampleModel(torch.nn.Module):
def forward(self, x, y):
x = x + y
x = x - y
x = torch.ops.xla.write_mlir_debuginfo(x, "MY_SUB")
return x
model = SampleModel()
exported_program = torch.export.export(model,
(torch.rand(10), torch.rand(10)))
mlir_text = exported_program_to_stablehlo(
exported_program).get_stablehlo_text()
print(mlir_text)
#loc1 = "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>xla__device_data"
module @IrToHlo.12 attributes {mhlo.cross_program_prefetches = [], mhlo.input_output_alias = [], mhlo.is_dynamic = false, mhlo.use_auto_spmd_partitioning = false} {
func.func @main(%arg0: tensor<10xf32> "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>xla__device_data", %arg1: tensor<10xf32> "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>xla__device_data") -> tensor<10xf32> {
%0 = stablehlo.add %arg1, %arg0 : tensor<10xf32> "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>aten__add"
%1 = stablehlo.subtract %0, %arg0 : tensor<10xf32> "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>aten__sub"
return %1 : tensor<10xf32> [unknown]
} [unknown]
} [unknown]
#loc = [unknown]
#loc2 = "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>aten__add"
#loc3 = "<XLA_MLIR_DEBUGINFO_BEGIN>MY_SUB<XLA_MLIR_DEBUGINFO_END>aten__sub"