Skip to content

Commit d28f8c7

Browse files
Clean Up the Tests (#145)
1 parent 69f3b19 commit d28f8c7

File tree

7 files changed

+182
-110
lines changed

7 files changed

+182
-110
lines changed

pyperformance/tests/__init__.py

Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,18 @@
11
import contextlib
22
import errno
33
import os
4+
import os.path
5+
import shutil
6+
import subprocess
7+
import sys
48
import tempfile
59

610

11+
TESTS_ROOT = os.path.realpath(os.path.dirname(__file__))
12+
DATA_DIR = os.path.join(TESTS_ROOT, 'data')
13+
REPO_ROOT = os.path.dirname(os.path.dirname(TESTS_ROOT))
14+
15+
716
@contextlib.contextmanager
817
def temporary_file(**kwargs):
918
tmp_filename = tempfile.mktemp(**kwargs)
@@ -15,3 +24,87 @@ def temporary_file(**kwargs):
1524
except OSError as exc:
1625
if exc.errno != errno.ENOENT:
1726
raise
27+
28+
29+
def run_cmd(*argv):
30+
print(f"(tests) Execute: {' '.join(argv)}", flush=True)
31+
proc = subprocess.Popen(argv, cwd=REPO_ROOT)
32+
try:
33+
proc.wait()
34+
except: # noqa
35+
proc.kill()
36+
proc.wait()
37+
raise
38+
sys.stdout.flush()
39+
exitcode = proc.returncode
40+
if exitcode:
41+
sys.exit(exitcode)
42+
print("", flush=True)
43+
44+
45+
#############################
46+
# functional tests
47+
48+
class Functional:
49+
"""A mixin for functional tests."""
50+
51+
# XXX Disallow multi-proc or threaded test runs?
52+
_TMPDIR = None
53+
_VENV = None
54+
_COUNT = 0
55+
56+
maxDiff = 80 * 100
57+
58+
@classmethod
59+
def setUpClass(cls):
60+
tmpdir = Functional._TMPDIR
61+
if tmpdir is None:
62+
tmpdir = Functional._TMPDIR = tempfile.mkdtemp()
63+
venv = Functional._VENV = os.path.join(tmpdir, 'venv')
64+
run_cmd(
65+
sys.executable, '-u', '-m', 'pyperformance',
66+
'venv', 'create',
67+
'-b', 'all',
68+
'--venv', venv,
69+
)
70+
71+
egg_info = "pyperformance.egg-info"
72+
print(f"(tests) Remove directory {egg_info}", flush=True)
73+
try:
74+
shutil.rmtree(egg_info)
75+
except FileNotFoundError:
76+
pass
77+
Functional._COUNT += 1
78+
super().setUpClass()
79+
80+
@classmethod
81+
def tearDownClass(cls):
82+
super().tearDownClass()
83+
tmpdir = Functional._TMPDIR
84+
venv = Functional._VENV
85+
if Functional._COUNT == 1:
86+
assert venv
87+
run_cmd(
88+
sys.executable, '-u', '-m', 'pyperformance',
89+
'venv', 'remove',
90+
'--venv', venv,
91+
)
92+
if os.path.exists(tmpdir):
93+
shutil.rmtree(tmpdir)
94+
95+
@property
96+
def venv_python(self):
97+
if os.name == "nt":
98+
python = os.path.basename(sys.executable)
99+
return os.path.join(self._VENV, 'Scripts', python)
100+
else:
101+
return os.path.join(self._VENV, 'bin', 'python3')
102+
103+
def run_pyperformance(self, cmd, *args, invenv=True):
104+
if invenv:
105+
assert self._VENV
106+
args += ('--venv', self._VENV)
107+
run_cmd(
108+
sys.executable, '-u', '-m', 'pyperformance',
109+
cmd, *args,
110+
)

pyperformance/tests/__main__.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import unittest
2+
3+
from pyperformance import tests
4+
5+
6+
def load_tests(loader, standard_tests, pattern):
7+
pkgtests = loader.discover(
8+
start_dir=tests.TESTS_ROOT,
9+
top_level_dir=tests.TESTS_ROOT,
10+
pattern=pattern or 'test*',
11+
)
12+
standard_tests.addTests(pkgtests)
13+
return standard_tests
14+
15+
16+
if __name__ == "__main__":
17+
unittest.main()

pyperformance/tests/test_compare.py

Lines changed: 3 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -9,31 +9,7 @@
99
from pyperformance import tests
1010

1111

12-
DATA_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), 'data'))
13-
14-
15-
def run_cmd(cmd):
16-
print("Execute: %s" % ' '.join(cmd))
17-
proc = subprocess.Popen(cmd)
18-
try:
19-
proc.wait()
20-
except: # noqa
21-
proc.kill()
22-
proc.wait()
23-
raise
24-
25-
exitcode = proc.returncode
26-
if exitcode:
27-
sys.exit(exitcode)
28-
29-
30-
class CompareTests(unittest.TestCase):
31-
maxDiff = 80 * 100
32-
33-
@classmethod
34-
def setUpClass(cls):
35-
cmd = [sys.executable, '-m', 'pyperformance', 'venv', 'create']
36-
run_cmd(cmd)
12+
class FunctionalTests(tests.Functional, unittest.TestCase):
3713

3814
def compare(self, *args, **kw):
3915
dataset = kw.get('dataset', 'py')
@@ -48,8 +24,8 @@ def compare(self, *args, **kw):
4824
marker = file1
4925

5026
cmd = [sys.executable, '-m', 'pyperformance', 'compare',
51-
os.path.join(DATA_DIR, file1),
52-
os.path.join(DATA_DIR, file2)]
27+
os.path.join(tests.DATA_DIR, file1),
28+
os.path.join(tests.DATA_DIR, file2)]
5329
cmd.extend(args)
5430
proc = subprocess.Popen(cmd,
5531
stdout=subprocess.PIPE,

pyperformance/tests/test_list.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import unittest
2+
3+
from pyperformance import tests
4+
5+
6+
class FunctionalTests(tests.Functional, unittest.TestCase):
7+
8+
def test_list(self):
9+
self.run_pyperformance('list')
10+
11+
def test_list_groups(self):
12+
self.run_pyperformance('list_groups')
13+
14+
15+
if __name__ == "__main__":
16+
unittest.main()

pyperformance/tests/test_run.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import os.path
2+
import unittest
3+
4+
from pyperformance import tests
5+
6+
7+
class FunctionalTests(tests.Functional, unittest.TestCase):
8+
9+
def test_run_and_show(self):
10+
json = os.path.join(self._TMPDIR, 'bench.json')
11+
12+
# -b all: check that *all* benchmark work
13+
#
14+
# --debug-single-value: benchmark results don't matter, we only
15+
# check that running benchmarks don't fail.
16+
self.run_pyperformance('run', '-b', 'all',
17+
'--debug-single-value',
18+
'-o', json)
19+
20+
# Display slowest benchmarks
21+
tests.run_cmd(
22+
self.venv_python, '-u',
23+
'-m', 'pyperf',
24+
'slowest',
25+
json,
26+
)
27+
28+
29+
if __name__ == "__main__":
30+
unittest.main()

pyperformance/tests/test_show.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import os.path
2+
import unittest
3+
4+
from pyperformance import tests
5+
6+
7+
class FunctionalTests(tests.Functional, unittest.TestCase):
8+
9+
def test_show(self):
10+
for filename in (
11+
os.path.join(tests.DATA_DIR, 'py36.json'),
12+
os.path.join(tests.DATA_DIR, 'mem1.json'),
13+
):
14+
with self.subTest(filename):
15+
self.run_pyperformance('show', filename, invenv=False)
16+
17+
18+
if __name__ == "__main__":
19+
unittest.main()

runtests.py

Lines changed: 4 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -1,93 +1,14 @@
11
#!/usr/bin/env python3
22
import os.path
3-
import shutil
43
import subprocess
54
import sys
6-
import tempfile
7-
8-
9-
def run_cmd(cmd):
10-
print("(runtests.py) Execute: %s" % ' '.join(cmd), flush=True)
11-
proc = subprocess.Popen(cmd)
12-
try:
13-
proc.wait()
14-
except: # noqa
15-
proc.kill()
16-
proc.wait()
17-
raise
18-
sys.stdout.flush()
19-
exitcode = proc.returncode
20-
if exitcode:
21-
sys.exit(exitcode)
22-
print("", flush=True)
23-
24-
25-
def run_tests(venv):
26-
# Move to the root directly
27-
root = os.path.dirname(__file__)
28-
if root:
29-
os.chdir(root)
30-
31-
python = sys.executable
32-
script = 'pyperformance'
33-
if os.name == "nt":
34-
python_executable = os.path.basename(python)
35-
venv_python = os.path.join(venv, 'Scripts', python_executable)
36-
else:
37-
venv_python = os.path.join(venv, 'bin', 'python')
38-
39-
def run_bench(*cmd):
40-
cmd = cmd + ('--venv', venv)
41-
run_cmd(cmd)
42-
43-
run_bench(python, '-u', script, 'venv', 'create', '-b', 'all')
44-
45-
egg_info = "pyperformance.egg-info"
46-
print("(runtests.py) Remove directory %s" % egg_info, flush=True)
47-
try:
48-
shutil.rmtree(egg_info)
49-
except FileNotFoundError:
50-
pass
51-
52-
run_bench(python, '-u', script, 'venv', 'create')
53-
54-
for filename in (
55-
os.path.join('pyperformance', 'tests', 'data', 'py36.json'),
56-
os.path.join('pyperformance', 'tests', 'data', 'mem1.json'),
57-
):
58-
run_cmd((python, script, 'show', filename))
59-
60-
run_bench(python, '-u', script, 'list')
61-
run_bench(python, '-u', script, 'list_groups')
62-
63-
json = os.path.join(venv, 'bench.json')
64-
65-
# -b all: check that *all* benchmark work
66-
#
67-
# --debug-single-value: benchmark results don't matter, we only
68-
# check that running benchmarks don't fail.
69-
run_bench(python, '-u', script, 'run', '-b', 'all', '--debug-single-value',
70-
'-o', json)
71-
72-
# Display slowest benchmarks
73-
run_cmd((venv_python, '-u', '-m', 'pyperf', 'slowest', json))
74-
75-
run_bench(python, '-u', script, 'venv', 'remove')
765

776

787
def main():
79-
# Unit tests
80-
cmd = [sys.executable, '-u',
81-
os.path.join('pyperformance', 'tests', 'test_compare.py')]
82-
run_cmd(cmd)
83-
84-
# Functional tests
85-
tmpdir = tempfile.mkdtemp()
86-
try:
87-
run_tests(tmpdir)
88-
finally:
89-
if os.path.exists(tmpdir):
90-
shutil.rmtree(tmpdir)
8+
subprocess.run(
9+
[sys.executable, '-u', '-m', 'pyperformance.tests'],
10+
cwd=os.path.dirname(__file__) or None,
11+
)
9112

9213

9314
if __name__ == "__main__":

0 commit comments

Comments
 (0)