From 6e976b3fdf0b726cfb8a40cef638047d7ec07287 Mon Sep 17 00:00:00 2001 From: Anatoly Myachev Date: Fri, 11 Oct 2024 10:10:25 +0000 Subject: [PATCH 1/2] Update PyTorch pin Signed-off-by: Anatoly Myachev --- .github/pins/pytorch-upstream.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pins/pytorch-upstream.txt b/.github/pins/pytorch-upstream.txt index 67bed0a666..4cf70cf66d 100644 --- a/.github/pins/pytorch-upstream.txt +++ b/.github/pins/pytorch-upstream.txt @@ -1 +1 @@ -0a2685160140656e3e53818611dd2c65c4397be5 +8321eec009c8c79145ebccd51fdfc336e5f8b848 From 52a15e0a7e5a845bc27e7b099efeff26e3dc2f07 Mon Sep 17 00:00:00 2001 From: Anatoly Myachev Date: Tue, 15 Oct 2024 19:04:59 +0000 Subject: [PATCH 2/2] Revert some changes from triton PR#4496 Signed-off-by: Anatoly Myachev --- python/triton/runtime/autotuner.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/python/triton/runtime/autotuner.py b/python/triton/runtime/autotuner.py index 5f846de170..be02d61a43 100644 --- a/python/triton/runtime/autotuner.py +++ b/python/triton/runtime/autotuner.py @@ -90,6 +90,10 @@ def _post_hook(args, exception): while not inspect.isfunction(self.base_fn): self.base_fn = self.base_fn.fn + self.num_warmups = warmup + self.num_reps = rep + self.use_cuda_graph = use_cuda_graph + # If we got explicitly called via the old interface, raise a warning # and proceed with the old behavior. if warmup is not None or rep is not None or use_cuda_graph: