Skip to content

Commit 055c520

Browse files
committed
Tests working with latest toil.
1 parent 0b517dd commit 055c520

File tree

4 files changed

+52
-84
lines changed

4 files changed

+52
-84
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ python:
33
- '2.7'
44
before_install:
55
- sudo apt-get update -qq
6-
- pip install toil[all]==3.16.0
6+
- virtualenv venv && . venv/bin/activate && git clone https://github.com/DataBiosphere/toil.git && cd toil && make prepare && make develop extras=[all] && cd ..
77
- pip install . --process-dependency-links
88
- pip install -r dev-requirements.txt
99
script:

test/test_integration.py

Lines changed: 24 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,16 @@ class IntegrationTest(unittest.TestCase):
1919
"""A baseclass that's inherited for use with different cwl backends."""
2020
@classmethod
2121
def setUpClass(cls):
22-
22+
# cwl
2323
cls.cwl_dockstore_url = 'https://dockstore.org:8443/api/ga4gh/v2/tools/quay.io%2Fbriandoconnor%2Fdockstore-tool-md5sum/versions/master/plain-CWL/descriptor/%2FDockstore.cwl'
2424
cls.cwl_local_path = os.path.abspath('testdata/md5sum.cwl')
25-
cls.json_input = "file://" + os.path.abspath('testdata/md5sum.json')
26-
cls.attachments = ['file://' + os.path.abspath('testdata/md5sum.input'),
27-
'file://' + os.path.abspath('testdata/dockstore-tool-md5sum.cwl')]
25+
cls.cwl_json_input = "file://" + os.path.abspath('testdata/md5sum.json')
26+
cls.cwl_attachments = ['file://' + os.path.abspath('testdata/md5sum.input'),
27+
'file://' + os.path.abspath('testdata/dockstore-tool-md5sum.cwl')]
28+
# wdl
29+
cls.wdl_local_path = os.path.abspath('testdata/md5sum.wdl')
30+
cls.wdl_json_input = "file://" + os.path.abspath('testdata/md5sum.wdl.json')
31+
cls.wdl_attachments = ['file://' + os.path.abspath('testdata/md5sum.input')]
2832

2933
def setUp(self):
3034
"""Start a (local) wes-service server to make requests against."""
@@ -40,41 +44,32 @@ def tearDown(self):
4044
time.sleep(3)
4145
except OSError as e:
4246
print(e)
43-
if os.path.exists('workflows'):
44-
shutil.rmtree('workflows')
47+
# if os.path.exists('workflows'):
48+
# shutil.rmtree('workflows')
4549
unittest.TestCase.tearDown(self)
4650

4751
def test_dockstore_md5sum(self):
4852
"""HTTP md5sum cwl (dockstore), run it on the wes-service server, and check for the correct output."""
4953
outfile_path, _ = run_cwl_md5sum(cwl_input=self.cwl_dockstore_url,
50-
json_input=self.json_input,
51-
workflow_attachment=self.attachments)
54+
json_input=self.cwl_json_input,
55+
workflow_attachment=self.cwl_attachments)
5256
self.assertTrue(check_for_file(outfile_path), 'Output file was not found: ' + str(outfile_path))
5357

5458
def test_local_md5sum(self):
5559
"""LOCAL md5sum cwl to the wes-service server, and check for the correct output."""
5660
outfile_path, run_id = run_cwl_md5sum(cwl_input=self.cwl_local_path,
57-
json_input=self.json_input,
58-
workflow_attachment=self.attachments)
61+
json_input=self.cwl_json_input,
62+
workflow_attachment=self.cwl_attachments)
5963
self.assertTrue(check_for_file(outfile_path), 'Output file was not found: ' + str(outfile_path))
6064

61-
def test_multipart_upload(self):
62-
"""LOCAL md5sum cwl to the wes-service server, and check for uploaded file in service."""
63-
outfile_path, run_id = run_cwl_md5sum(cwl_input=self.cwl_local_path,
64-
json_input=self.json_input,
65-
workflow_attachment=self.attachments)
66-
get_response = get_log_request(run_id)["request"]
67-
self.assertTrue(check_for_file(outfile_path), 'Output file was not found: ' + get_response["workflow_attachment"])
68-
self.assertTrue(check_for_file(get_response["workflow_url"][7:]), 'Output file was not found: ' + get_response["workflow_url"][:7])
69-
7065
def test_run_attachments(self):
7166
"""LOCAL md5sum cwl to the wes-service server, check for attachments."""
7267
outfile_path, run_id = run_cwl_md5sum(cwl_input=self.cwl_local_path,
73-
json_input=self.json_input,
74-
workflow_attachment=self.attachments)
68+
json_input=self.cwl_json_input,
69+
workflow_attachment=self.cwl_attachments)
7570
get_response = get_log_request(run_id)["request"]
76-
attachment_tool_path = get_response["workflow_attachment"][7:] + "/dockstore-tool-md5sum.cwl"
7771
self.assertTrue(check_for_file(outfile_path), 'Output file was not found: ' + get_response["workflow_attachment"])
72+
attachment_tool_path = get_response["workflow_attachment"][7:] + "/dockstore-tool-md5sum.cwl"
7873
self.assertTrue(check_for_file(attachment_tool_path), 'Attachment file was not found: ' + get_response["workflow_attachment"])
7974

8075

@@ -90,25 +85,6 @@ def run_cwl_md5sum(cwl_input, json_input, workflow_attachment=None):
9085
return os.path.join(output_dir, 'md5sum.txt'), response['run_id']
9186

9287

93-
def run_wdl_md5sum(wdl_input):
94-
"""Pass a local md5sum wdl to the wes-service server, and return the path of the output file that was created."""
95-
endpoint = 'http://localhost:8080/ga4gh/wes/v1/runs'
96-
params = '{"ga4ghMd5.inputFile": "' + os.path.abspath('testdata/md5sum.input') + '"}'
97-
parts = [("workflow_params", params),
98-
("workflow_type", "WDL"),
99-
("workflow_type_version", "v1.0"),
100-
("workflow_url", wdl_input)]
101-
response = requests.post(endpoint, files=parts).json()
102-
output_dir = os.path.abspath(os.path.join('workflows', response['workflow_id'], 'outdir'))
103-
check_travis_log = os.path.join(output_dir, 'stderr')
104-
with open(check_travis_log, 'r') as f:
105-
logging.info(f.read())
106-
logging.info(subprocess.check_output(['ls', os.path.join('workflows', response['workflow_id'])]))
107-
logging.info('\n')
108-
logging.info(subprocess.check_output(['ls', output_dir]))
109-
return os.path.join(output_dir, 'md5sum.txt'), response['workflow_id']
110-
111-
11288
def get_log_request(run_id):
11389
endpoint = 'http://localhost:8080/ga4gh/wes/v1/runs/{}'.format(run_id)
11490
return requests.get(endpoint).json()
@@ -159,6 +135,13 @@ def setUp(self):
159135
shell=True)
160136
time.sleep(5)
161137

138+
def test_local_wdl(self):
139+
"""LOCAL md5sum wdl to the wes-service server, and check for the correct output."""
140+
outfile_path, run_id = run_cwl_md5sum(cwl_input=self.wdl_local_path,
141+
json_input=self.wdl_json_input,
142+
workflow_attachment=self.wdl_attachments)
143+
self.assertTrue(check_for_file(outfile_path), 'Output file was not found: ' + str(outfile_path))
144+
162145

163146
# Prevent pytest/unittest's discovery from attempting to discover the base test class.
164147
del IntegrationTest

wes_client/util.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,8 @@ def build_wes_request(workflow_file, json_path, attachments=None):
4949
if attachments:
5050
for attachment in attachments:
5151
attachment = attachment[7:] if attachment.startswith("file://") else attachment
52+
if ':' in attachment:
53+
raise TypeError('Only local files supported for attachment: %s' % attachment)
5254
parts.append(("workflow_attachment", (os.path.basename(attachment), open(attachment, "rb"))))
5355

5456
return parts

wes_service/toil_wes.py

Lines changed: 25 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,12 @@ def __init__(self, run_id):
2828
self.endtime = os.path.join(self.workdir, 'endtime')
2929
self.pidfile = os.path.join(self.workdir, 'pid')
3030
self.cmdfile = os.path.join(self.workdir, 'cmd')
31+
self.jobstorefile = os.path.join(self.workdir, 'jobstore')
3132
self.request_json = os.path.join(self.workdir, 'request.json')
3233
self.output_json = os.path.join(self.workdir, "output.json")
3334
self.input_wf_filename = os.path.join(self.workdir, "wes_workflow.cwl")
3435
self.input_json = os.path.join(self.workdir, "wes_input.json")
35-
self.jobstore_default = os.path.join(self.workdir, 'toiljobstore')
36+
self.jobstore_default = os.path.join(self.workdir, 'file:toiljobstore')
3637
self.jobstore = None
3738

3839
def sort_toil_options(self, extra):
@@ -43,13 +44,18 @@ def sort_toil_options(self, extra):
4344
self.jobstore = e[11:]
4445
if self.jobstore.startswith(('aws', 'google', 'azure')):
4546
cloud = True
46-
if e.startswith('--outdir='):
47+
if e.startswith(('--outdir=', '-o=')):
4748
extra.remove(e)
4849
if not cloud:
4950
extra.append('--outdir=' + self.outdir)
5051
if not self.jobstore:
51-
extra.append('--jobStore=file:' + self.jobstore_default)
52+
extra.append('--jobStore=' + self.jobstore_default)
5253
self.jobstore = self.jobstore_default
54+
55+
# store the jobstore location
56+
with open(self.jobstorefile, 'w') as f:
57+
f.write(self.jobstore)
58+
5359
return extra
5460

5561
def write_workflow(self, request, opts, cwd, wftype='cwl'):
@@ -79,8 +85,8 @@ def write_workflow(self, request, opts, cwd, wftype='cwl'):
7985

8086
def write_json(self, request_dict):
8187
input_json = os.path.join(self.workdir, 'input.json')
82-
with open(input_json, 'w') as inputtemp:
83-
json.dump(request_dict['workflow_params'], inputtemp)
88+
with open(input_json, 'w') as f:
89+
json.dump(request_dict['workflow_params'], f)
8490
return input_json
8591

8692
def call_cmd(self, cmd, cwd):
@@ -128,8 +134,8 @@ def getlog(self):
128134

129135
outputobj = {}
130136
if state == "COMPLETE":
131-
with open(self.output_json, "r") as outputtemp:
132-
outputobj = json.load(outputtemp)
137+
with open(self.output_json, "r") as f:
138+
outputobj = json.load(f)
133139

134140
return {
135141
"run_id": self.run_id,
@@ -209,42 +215,19 @@ def getstate(self):
209215
state = "RUNNING"
210216
exit_code = -1
211217

212-
exitcode_file = os.path.join(self.workdir, "exit_code")
213-
pid_file = os.path.join(self.workdir, "pid")
214-
215-
if os.path.exists(exitcode_file):
216-
with open(exitcode_file) as f:
217-
exit_code = int(f.read())
218-
elif os.path.exists(pid_file):
219-
with open(pid_file, "r") as pid:
220-
pid = int(pid.read())
221-
try:
222-
(_pid, exit_status) = os.waitpid(pid, os.WNOHANG)
223-
if _pid != 0:
224-
exit_code = exit_status >> 8
225-
with open(exitcode_file, "w") as f:
226-
f.write(str(exit_code))
227-
os.unlink(pid_file)
228-
except OSError:
229-
os.unlink(pid_file)
230-
exit_code = 255
231-
232-
if exit_code == 0:
233-
state = "COMPLETE"
234-
elif exit_code != -1:
235-
state = "EXECUTOR_ERROR"
218+
with open(self.jobstorefile, 'r') as f:
219+
self.jobstore = f.read()
236220

237-
# Uncomment once https://github.com/DataBiosphere/toil/pull/2330 is merged
238-
# logs = subprocess.check_output(['toil', 'status', 'file:' + self.jobstore, '--printLogs'])
239-
# if 'ERROR:toil.worker:Exiting' in logs:
240-
# state = "EXECUTOR_ERROR"
241-
# exit_code = 255
242-
# elif 'Root job is absent. The workflow may have completed successfully.' in logs:
243-
# state = "COMPLETE"
244-
# exit_code = 0
245-
# elif 'No job store found.' in logs:
246-
# state = "INITIALIZING"
247-
# exit_code = -1
221+
logs = subprocess.check_output(['toil', 'status', 'file:' + self.jobstore, '--printLogs'])
222+
if 'ERROR:toil.worker:Exiting' in logs:
223+
state = "EXECUTOR_ERROR"
224+
exit_code = 255
225+
elif 'Root job is absent. The workflow may have completed successfully.' in logs:
226+
state = "COMPLETE"
227+
exit_code = 0
228+
elif 'No job store found.' in logs:
229+
state = "INITIALIZING"
230+
exit_code = -1
248231

249232
return state, exit_code
250233

0 commit comments

Comments
 (0)