Skip to content

Commit 4c27cdf

Browse files
peterbell10whitneywhtsang
authored andcommitted
[Tests] Don't run autotuning when running attention tutorial as a test (#6867)
1 parent dc35ad2 commit 4c27cdf

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

python/tutorials/06-fused-attention.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import pytest
1717
import torch
18+
import sys
1819

1920
import triton
2021
import triton.language as tl
@@ -111,6 +112,11 @@ def _host_descriptor_pre_hook(nargs):
111112
for s in NUM_STAGES_OPTIONS \
112113
for w in [4, 8]\
113114
]
115+
if "pytest" in sys.modules:
116+
# Use a single config in testing for reproducibility
117+
configs = [
118+
triton.Config(dict(BLOCK_M=64, BLOCK_N=64), num_stages=4, num_warps=4, pre_hook=_host_descriptor_pre_hook),
119+
]
114120

115121

116122
def keep(conf):

0 commit comments

Comments
 (0)