1+ # This is work in progress, testing workflow in local/CI is gradually being transferred to tox
2+
3+ # Usage instructions:
4+ # `tox` will run all tests sequentially, `tox --parallel` will run all tests in parallel (much faster).
5+ # Run specific selection of tests with `tox -e pretest,<list-of-tests>,posttest` e.g., `tox -e pretest,test-api,test-launcher,posttest`
6+ # `--parallel` flag can be passed when running specific selections.
7+
8+ [tox]
9+ description = Default tox environment list and core configurations
10+
11+ # List all tests to run in parallel or sequential mode here
12+ # So invocation can be specified as `tox`/`tox --parallel` to run all tests in sequential/parallel mode
13+ envlist = pretest,test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators},posttest
14+
15+ isolated_build_env = build
16+
17+ [testenv]
18+ description = Default configuration for test environments, unless overridden
19+
20+ pass_env =
21+ PACKAGE_NAME
22+ MODULE
23+ ANSYS_DPF_ACCEPT_LA
24+ ANSYSLMD_LICENSE_FILE
25+ AWP_ROOT242
26+
27+ package = external # To allow custom wheel builds
28+
29+ [testenv:build_external]
30+ description = Environment for custom build of package wheels, solves PyDPF custom wheel building requirement
31+
32+ package_glob = {toxinidir}{/}dist{/}ansys_dpf_core*
33+
34+ # {on_platform} substitution to automatically detect os type.
35+ commands =
36+ python .ci/build_wheel.py -p {on_platform} -w
37+
38+ [testenv:pretest]
39+ description = Environment to kill servers and organize test files prior to testing
40+
41+ deps =
42+ psutil
43+
44+ skip_install = True
45+
46+ commands =
47+ # Clear any running servers that may be locking resources
48+ python -c " import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
49+ print(f'Killed \{nb_procs} \{proc_name} processes.')"
50+
51+ # Organize test files
52+ python -c " \
53+ import os, shutil; \
54+ test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \
55+ [(os.makedirs(d, exist_ok=True), shutil.copy('tests/conftest.py', d), shutil.copy(f'tests/\{d}.py', d) if os.path.exists(f'tests/\{d}.py') else None) for d in test_data]; \
56+ [os.remove(f'tests/\{d}.py') for d in test_data if os.path.exists(f'tests/\{d}.py')]"
57+
58+ [testenv:posttest]
59+ description = Environment to kill servers and revert test files to original state after testing
60+
61+ depends = pretest, test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}
62+
63+ deps =
64+ psutil
65+
66+ skip_install = True
67+
68+ commands =
69+ # Revert project layout to previous state
70+ python -c " \
71+ import os, shutil; \
72+ test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \
73+ [shutil.move(f'\{d}/\{d}.py', f'tests/\{d}.py') for d in test_data if os.path.exists(f'\{d}/\{d}.py')]; \
74+ [shutil.rmtree(d) for d in test_data if os.path.exists(d)]"
75+
76+ # Clear any running servers that may be locking resources
77+ python -c " import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
78+ print(f'Killed \{nb_procs} \{proc_name} processes.')"
79+
80+ [testenv:test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,operators}]
81+ description = Environment where project testing configuration is defined
82+
83+ depends = pretest
84+
85+ setenv =
86+ # Pytest extra arguments
87+ COVERAGE = --cov =ansys.dpf.core --cov-report =xml --cov-report =html --log-level =ERROR --cov-append
88+ RERUNS = --reruns =2 --reruns-delay =1
89+ DEBUG = -v -s --durations =10 --durations-min =1.0
90+
91+ api: JUNITXML = --junitxml =tests/junit/test-results.xml
92+ launcher: JUNITXML = --junitxml =tests/junit/test-results2.xml
93+ server: JUNITXML = --junitxml =tests/junit/test-results3.xml
94+ local_server: JUNITXML = --junitxml =tests/junit/test-results4.xml
95+ multi_server: JUNITXML = --junitxml =tests/junit/test-results5.xml
96+ remote_workflow: JUNITXML = --junitxml =tests/junit/test-results6.xml
97+ remote_operator: JUNITXML = --junitxml =tests/junit/test-results7.xml
98+ workflow: JUNITXML = --junitxml =tests/junit/test-results8.xml
99+ service: JUNITXML = --junitxml =tests/junit/test-results9.xml
100+ operators: JUNITXML = --junitxml =../tests/junit/test-results12.xml
101+
102+ # Tests sets
103+ api: PYTEST_PYTHON_FILES = tests
104+ launcher: PYTEST_PYTHON_FILES = test_launcher
105+ server: PYTEST_PYTHON_FILES = test_server
106+ local_server: PYTEST_PYTHON_FILES = test_local_server
107+ multi_server: PYTEST_PYTHON_FILES = test_multi_server
108+ remote_workflow: PYTEST_PYTHON_FILES = test_remote_workflow
109+ remote_operator: PYTEST_PYTHON_FILES = test_remote_operator
110+ workflow: PYTEST_PYTHON_FILES = test_workflow
111+ service: PYTEST_PYTHON_FILES = test_service
112+ operators: PYTEST_PYTHON_FILES = tests/operators
113+
114+ deps =
115+ -r requirements/requirements_test.txt
116+
117+ commands =
118+ pytest {env:PYTEST_PYTHON_FILES} {env:DEBUG} {env:COVERAGE} {env:RERUNS} {env:JUNITXML}
0 commit comments