Skip to content

Commit 09eb01f

Browse files
authored
use op_test.get_cuda_version - part (#76167)
1 parent ead676b commit 09eb01f

File tree

5 files changed

+5
-40
lines changed

5 files changed

+5
-40
lines changed

test/legacy_test/test_flash_attention.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
import logging
15-
import os
16-
import re
1715
import unittest
1816

1917
import numpy as np
20-
from op_test import get_device_place, is_custom_device
18+
from op_test import get_cuda_version, get_device_place, is_custom_device
2119

2220
import paddle
2321
import paddle.nn.functional as F
@@ -38,18 +36,6 @@
3836
logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
3937

4038

41-
def get_cuda_version():
42-
result = os.popen("nvcc --version").read()
43-
regex = r'release (\S+),'
44-
match = re.search(regex, result)
45-
if match:
46-
num = str(match.group(1))
47-
integer, decimal = num.split('.')
48-
return int(integer) * 1000 + int(float(decimal) * 10)
49-
else:
50-
return -1
51-
52-
5339
def attention_naive(q, k, v, causal=False):
5440
qt = paddle.transpose(q, [0, 2, 1, 3])
5541
kt = paddle.transpose(k, [0, 2, 1, 3])

test/legacy_test/test_fused_multi_transformer_op.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@
1616
import unittest
1717

1818
import numpy as np
19-
from op_test import OpTest, get_device_place, is_custom_device
20-
from test_sparse_attention_op import get_cuda_version
19+
from op_test import OpTest, get_cuda_version, get_device_place, is_custom_device
2120

2221
import paddle
2322
import paddle.nn.functional as F

test/legacy_test/test_matmul_fp8_op.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,7 @@
1414
import unittest
1515

1616
import numpy as np
17-
from op_test import is_custom_device
18-
from test_sparse_attention_op import get_cuda_version
17+
from op_test import get_cuda_version, is_custom_device
1918

2019
import paddle
2120
from paddle.base import core

test/legacy_test/test_mul_op.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
import numpy as np
1919

2020
sys.path.append("../../legacy_test")
21-
from test_sparse_attention_op import get_cuda_version
21+
from op_test import get_cuda_version
2222

2323
from paddle.base import core
2424

test/legacy_test/test_sparse_attention_op.py

Lines changed: 1 addition & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -13,36 +13,17 @@
1313
# limitations under the License.
1414

1515
import copy
16-
import os
17-
import re
1816
import unittest
1917

2018
import numpy as np
21-
from op_test import OpTest, get_device_place, is_custom_device
19+
from op_test import OpTest, get_cuda_version, get_device_place, is_custom_device
2220

2321
import paddle
2422
import paddle.nn.functional as F
2523
from paddle import base
2624
from paddle.base import core
2725

2826

29-
def get_cuda_version():
30-
if paddle.is_compiled_with_cuda():
31-
result = os.popen("nvcc --version").read()
32-
regex = r'release (\S+),'
33-
match = re.search(regex, result)
34-
if match:
35-
num = str(match.group(1))
36-
integer, decimal = num.split('.')
37-
return int(integer) * 1000 + int(float(decimal) * 10)
38-
else:
39-
return -1
40-
elif is_custom_device():
41-
return 13000
42-
else:
43-
return -1
44-
45-
4627
def masked_fill(x):
4728
row, col = x.shape[0], x.shape[1]
4829
for i in range(row):

0 commit comments

Comments
 (0)