Skip to content

Commit 29bcd80

Browse files
committed
Added unit tests for runtime_profiler
1 parent cbd08e0 commit 29bcd80

File tree

2 files changed

+247
-0
lines changed

2 files changed

+247
-0
lines changed
Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
# test_runtime_profiler.py
2+
#
3+
# Author: Daniel Clark, 2016
4+
5+
'''
6+
Module to unit test the runtime_profiler in nipype
7+
'''
8+
9+
# Import packages
10+
import unittest
11+
from nipype.interfaces.base import traits, CommandLine, CommandLineInputSpec
12+
13+
14+
# UseResources inputspec
15+
class UseResourcesInputSpec(CommandLineInputSpec):
16+
'''
17+
'''
18+
19+
# Init attributes
20+
num_gb = traits.Float(desc='Number of GB of RAM to use',
21+
argstr = "-g %f")
22+
num_procs = traits.Int(desc='Number of processors to use',
23+
argstr = "-p %d")
24+
25+
26+
# UseResources interface
27+
class UseResources(CommandLine):
28+
'''
29+
'''
30+
31+
# Import packages
32+
import os
33+
34+
# Init attributes
35+
input_spec = UseResourcesInputSpec
36+
37+
# Get path of executable
38+
exec_dir = os.path.dirname(os.path.realpath(__file__))
39+
exec_path = os.path.join(exec_dir, 'use_resources')
40+
41+
# Init cmd
42+
_cmd = exec_path
43+
44+
45+
# Test case for the run function
46+
class RuntimeProfilerTestCase(unittest.TestCase):
47+
'''
48+
This class is a test case for the ResourceMultiProc plugin runtime
49+
profiler
50+
51+
Inherits
52+
--------
53+
unittest.TestCase class
54+
55+
Attributes (class):
56+
------------------
57+
see unittest.TestCase documentation
58+
59+
Attributes (instance):
60+
----------------------
61+
'''
62+
63+
# setUp method for the necessary arguments to run cpac_pipeline.run
64+
def setUp(self):
65+
'''
66+
Method to instantiate TestCase
67+
68+
Parameters
69+
----------
70+
self : RuntimeProfileTestCase
71+
a unittest.TestCase-inherited class
72+
'''
73+
74+
self.num_gb = 2
75+
self.num_procs = 2
76+
77+
# Test node
78+
def _run_workflow(self):
79+
'''
80+
Function to run the use_resources script in a nipype workflow
81+
and return the runtime stats recorded by the profiler
82+
83+
Parameters
84+
----------
85+
self : RuntimeProfileTestCase
86+
a unittest.TestCase-inherited class
87+
88+
Returns
89+
-------
90+
finish_str : string
91+
a json-compatible dictionary string containing the runtime
92+
statistics of the nipype node that used system resources
93+
'''
94+
95+
# Import packages
96+
import logging
97+
import os
98+
import tempfile
99+
100+
import nipype.pipeline.engine as pe
101+
import nipype.interfaces.utility as util
102+
from nipype.pipeline.plugins.callback_log import log_nodes_cb
103+
104+
# Init variables
105+
num_gb = self.num_gb
106+
num_procs = self.num_procs
107+
base_dir = tempfile.mkdtemp()
108+
log_file = os.path.join(base_dir, 'callback.log')
109+
110+
# Init logger
111+
logger = logging.getLogger('callback')
112+
logger.setLevel(logging.DEBUG)
113+
handler = logging.FileHandler(log_file)
114+
logger.addHandler(handler)
115+
116+
# Declare workflow
117+
wf = pe.Workflow(name='test_runtime_prof')
118+
wf.base_dir = base_dir
119+
120+
# Input node
121+
input_node = pe.Node(util.IdentityInterface(fields=['num_gb',
122+
'num_procs']),
123+
name='input_node')
124+
input_node.inputs.num_gb = num_gb
125+
input_node.inputs.num_procs = num_procs
126+
127+
# Resources used node
128+
resource_node = pe.Node(UseResources(), name='resource_node')
129+
resource_node.interface.estimated_memory = num_gb
130+
resource_node.interface.num_threads = num_procs
131+
132+
# Connect workflow
133+
wf.connect(input_node, 'num_gb', resource_node, 'num_gb')
134+
wf.connect(input_node, 'num_procs', resource_node, 'num_procs')
135+
136+
# Run workflow
137+
plugin_args = {'n_procs' : num_procs,
138+
'memory' : num_gb,
139+
'runtime_profile' : True,
140+
'status_callback' : log_nodes_cb}
141+
wf.run(plugin='ResourceMultiProc', plugin_args=plugin_args)
142+
143+
# Get runtime stats from log file
144+
finish_str = open(log_file, 'r').readlines()[1].rstrip('\n')
145+
146+
# Delete wf base dir
147+
shutil.rmtree(base_dir)
148+
149+
# Return runtime stats
150+
return finish_str
151+
152+
# Test resources were used as expected
153+
def test_wf_logfile(self):
154+
'''
155+
Test to see that the input resources to consume match what was
156+
recorded during runtime
157+
'''
158+
159+
# Import packages
160+
import json
161+
162+
# Init variables
163+
places = 1
164+
165+
# Run workflow and get stats
166+
finish_str = self._run_workflow()
167+
# Get runtime stats as dictionary
168+
node_stats = json.loads(finish_str)
169+
170+
# Read out runtime stats
171+
runtime_gb = float(node_stats['runtime_memory'])
172+
runtime_procs = int(node_stats['runtime_threads'])
173+
174+
# Assert runtime stats are what was input
175+
mem_err = 'Input memory: %.5f is not within %d places of runtime '\
176+
'memory: %.5f' % (self.num_gb, places, runtime_gb)
177+
self.assertAlmostEqual(self.num_gb, runtime_gb, places=places, msg=mem_err)
178+
procs_err = 'Input procs: %d is not equal to runtime procs: %d' \
179+
% (self.num_procs, runtime_procs)
180+
self.assertEqual(self.num_procs, runtime_procs, msg=procs_err)
181+
182+
183+
# Command-line run-able unittest module
184+
if __name__ == '__main__':
185+
unittest.main()

nipype/interfaces/tests/use_resources

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
#!/usr/bin/env python
2+
#
3+
# use_resources
4+
5+
'''
6+
Python script to use a certain amount of RAM on disk and number of
7+
processors
8+
9+
Usage:
10+
use_resources -g <num_gb> -p <num_procs>
11+
'''
12+
13+
# Function to occupy GB of memory
14+
def use_gb_ram(num_gb):
15+
'''
16+
Function to consume GB of memory
17+
'''
18+
19+
# Eat 1 GB of memory for 1 second
20+
gb_str = ' ' * int(num_gb*1024.0**3)
21+
22+
ctr = 0
23+
while ctr < 100e6:
24+
ctr+= 1
25+
26+
# Clear memory
27+
del ctr
28+
del gb_str
29+
30+
31+
# Make main executable
32+
if __name__ == '__main__':
33+
34+
# Import packages
35+
import argparse
36+
from multiprocessing import Process
37+
38+
# Init argparser
39+
parser = argparse.ArgumentParser(description=__doc__)
40+
41+
# Add arguments
42+
parser.add_argument('-g', '--num_gb', nargs=1, required=True,
43+
help='Number of GB RAM to use, can be float or int')
44+
parser.add_argument('-p', '--num_procs', nargs=1, required=True,
45+
help='Number of processors to run in parallel')
46+
47+
# Parse args
48+
args = parser.parse_args()
49+
50+
# Init variables
51+
num_gb = float(args.num_gb[0])
52+
num_procs = int(args.num_procs[0])
53+
54+
# Build proc list
55+
proc_list = []
56+
for idx in range(num_procs):
57+
proc_list.append(Process(target=use_gb_ram, args=(num_gb/num_procs,)))
58+
59+
# Run multi-threaded
60+
print 'Using %.3f GB of memory over %d processors...' % (num_gb, num_procs)
61+
for proc in proc_list:
62+
proc.start()

0 commit comments

Comments
 (0)