Skip to content

Commit 88d63eb

Browse files
dianamariand92alvarolopez
authored andcommitted
Add deepaas-execute command
This commit includes a new deepaas-execute command that allows to directly execute the models from the commandline, rather than spawning a server and making HTTP requests. This is useful for running jobs on HPC and HTC facilities.
1 parent 0706f13 commit 88d63eb

File tree

5 files changed

+316
-0
lines changed

5 files changed

+316
-0
lines changed

deepaas/cmd/execute.py

Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
#!/usr/bin/env python -*- coding: utf-8 -*-
2+
3+
# Copyright 2020 Spanish National Research Council (CSIC)
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License"); you may
6+
# not use this file except in compliance with the License. You may obtain
7+
# a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14+
# License for the specific language governing permissions and limitations
15+
# under the License.
16+
17+
import mimetypes
18+
import os
19+
import shutil
20+
import sys
21+
22+
from oslo_config import cfg
23+
from oslo_log import log
24+
25+
from deepaas.model import loading
26+
from deepaas.model.v2.wrapper import UploadedFile
27+
28+
cli_opts = [
29+
cfg.StrOpt('model-name',
30+
help="""
31+
Add the name of the model from which you want
32+
to obtain the prediction.
33+
If there are multiple models installed and youd don't
34+
specify the name of the one you want to use the program will fail.
35+
If there is only one model installed, that will be used
36+
to make the prediction.
37+
"""),
38+
cfg.StrOpt('input-file',
39+
short="i",
40+
help="""
41+
Set local input file to predict.
42+
"""),
43+
cfg.StrOpt('content-type',
44+
default='application/json',
45+
short='ct',
46+
help="""
47+
Especify the content type of the output file. The selected
48+
option must be available in the model used.
49+
(by default application/json).
50+
"""),
51+
cfg.StrOpt('output',
52+
short="o",
53+
help="""
54+
Save the result to a local file.
55+
"""),
56+
cfg.BoolOpt('url',
57+
short='u',
58+
default=False,
59+
help="""
60+
Run as input file an URL.
61+
If this option is set to True, we can use the URL
62+
of an image as an input file.
63+
"""),
64+
]
65+
66+
CONF = cfg.CONF
67+
CONF.register_cli_opts(cli_opts)
68+
69+
LOG = log.getLogger(__name__)
70+
71+
# Loading the model installed
72+
73+
74+
def get_model_name():
75+
model_name = CONF.model_name
76+
models = loading.get_available_models("v2")
77+
if model_name:
78+
model_obj = models.get(model_name)
79+
if model_obj is None:
80+
sys.stderr.write(
81+
"ERROR: The model {} is not available.\n".format(model_name))
82+
sys.exit(1)
83+
return model_name, model_obj
84+
elif len(models) == 1:
85+
return models.popitem()
86+
else:
87+
sys.stderr.write(
88+
'ERROR: There are several models available ({}) '
89+
'you have to choose one.\n'.format(list(models.keys())))
90+
sys.exit(1)
91+
92+
93+
def prediction(input_file, file_type, content_type):
94+
95+
model_name, model_obj = get_model_name()
96+
predict_data = model_obj.predict_data
97+
predict_url = model_obj.predict_url
98+
99+
if file_type is True:
100+
input_data = {'urls': [input_file], 'accept': content_type}
101+
output_pred = predict_url(input_data)
102+
else:
103+
content_type_in, fileEncoding = mimetypes.guess_type(input_file)
104+
file = UploadedFile(name=input_file,
105+
filename=input_file,
106+
content_type=content_type_in)
107+
input_data = {'files': [file], 'accept': content_type}
108+
output_pred = predict_data(input_data)
109+
110+
return (output_pred)
111+
112+
113+
def main():
114+
cfg.CONF(sys.argv[1:])
115+
input_file = CONF.input_file
116+
content_type = CONF.content_type
117+
file_type = CONF.url
118+
output = CONF.output
119+
120+
# Checking required argument
121+
if input_file is None:
122+
sys.stderr.write(
123+
"ERROR: Option input_file is required.\n")
124+
sys.exit(1)
125+
126+
if output is None:
127+
sys.stderr.write(
128+
"ERROR: Option output is required.\n")
129+
sys.exit(1)
130+
131+
output_pred = prediction(input_file, file_type, content_type)
132+
extension = mimetypes.guess_extension(content_type)
133+
if extension is None or output_pred is None:
134+
sys.stderr.write(
135+
"ERROR: Content type {} not valid.\n".format(content_type))
136+
sys.exit(1)
137+
if extension == ".json":
138+
name_image = os.path.splitext(os.path.basename(input_file))[0]
139+
out_file_name = "out_" + name_image
140+
f = open(out_file_name + ".json", "w+")
141+
f.write(repr(output_pred) + '\n')
142+
f.close()
143+
if not os.path.exists(output): # Create path if does not exist
144+
os.makedirs(output)
145+
dir_name = output + f.name
146+
shutil.move(f.name, os.path.join(output, f.name))
147+
else:
148+
output_path_image = output_pred.name
149+
dir_name = output + os.path.basename(output_path_image)
150+
if not os.path.exists(output): # Create path if does not exist
151+
os.makedirs(output)
152+
shutil.copy(output_path_image, output)
153+
154+
print("Output saved at {}" .format(dir_name))
155+
156+
157+
if __name__ == "__main__":
158+
main()

deepaas/tests/test_cmd.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,13 @@
1515
# under the License.
1616

1717
import os
18+
import shutil
1819
import sys
20+
import urllib.request
1921

2022
import mock
2123

24+
from deepaas.cmd import execute
2225
from deepaas.cmd import run
2326
from deepaas.cmd import wsk
2427
from deepaas.tests import base
@@ -101,3 +104,59 @@ def test_run(self, m_proxy_main, m_handle_signals):
101104
wsk.main()
102105
m_proxy_main.assert_called_once()
103106
m_handle_signals.assert_called_once()
107+
108+
109+
class TestExecute(base.TestCase):
110+
@mock.patch("deepaas.cmd.execute.prediction")
111+
def test_execute_data(self, m_out_pred):
112+
in_file = "file"
113+
out_file = "deepaas/tests/out_test/"
114+
self.flags(input_file=in_file)
115+
self.flags(output=out_file)
116+
m_out_pred.return_value = [{
117+
'value1': {'pred': 1}, 'value2': {'pred': 0.9}}]
118+
with mock.patch.object(sys, 'argv', ["deepaas-predict"]):
119+
execute.main()
120+
121+
@mock.patch("deepaas.cmd.execute.prediction")
122+
def test_execute_url(self, m_out_pred):
123+
in_file = "https://xxxxxxxxxx"
124+
out_file = "deepaas/tests/out_test/"
125+
self.flags(input_file=in_file)
126+
self.flags(output=out_file)
127+
self.flags(url=True)
128+
m_out_pred.return_value = [{
129+
'value1': {'pred': 1}, 'value2': {'pred': 0.9}}]
130+
with mock.patch.object(sys, 'argv', ["deepaas-predict"]):
131+
execute.main()
132+
133+
@mock.patch("deepaas.cmd.execute.prediction")
134+
def test_execute_ct(self, m_out_pred):
135+
in_file = "file"
136+
out_file = "deepaas/tests/out_test/"
137+
cont_type = "application/zip"
138+
self.flags(input_file=in_file)
139+
self.flags(output=out_file)
140+
self.flags(content_type=cont_type)
141+
output_dir = "deepaas/tests/tmp_dir"
142+
if os.path.exists(output_dir):
143+
shutil.rmtree(output_dir)
144+
os.mkdir(output_dir)
145+
out_json = [{
146+
'value1': {'pred': 1}, 'value2': {'pred': 0.9}}]
147+
f_json = open(output_dir + 'output' + ".json", "w+")
148+
f_json.write(repr(out_json) + '\n')
149+
f_json.close
150+
url = ('https://storage.googleapis.com/'
151+
'tfjs-models/assets/posenet/frisbee.jpg')
152+
urllib.request.urlretrieve(url, "example_image.jpg")
153+
shutil.move(f_json.name, output_dir)
154+
shutil.move("example_image.jpg", output_dir)
155+
f = shutil.make_archive(base_name=output_dir,
156+
format='zip',
157+
root_dir=output_dir)
158+
m_out_pred.return_value = open(f, 'rb')
159+
with mock.patch.object(sys, 'argv', ["deepaas-predict"]):
160+
execute.main()
161+
shutil.rmtree(output_dir)
162+
os.remove(output_dir + ".zip")

doc/source/cli/deepaas-predict.rst

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
===========
2+
deepaas-predict
3+
===========
4+
5+
Synopsis
6+
========
7+
8+
:program:`deepaas-predict` [options]
9+
10+
Description
11+
===========
12+
13+
:program:`deepaas-predict` It is a command that allows you to obtain,
14+
through the command line, the prediction of a file or the url of
15+
a file, of the models that are loaded through the ``deepaas.v2.models``
16+
entrypoint API.
17+
18+
Options
19+
=======
20+
21+
.. option:: --input-file INPUT_FILE, -i INPUT_FILE
22+
23+
Set local input file to predict. This option is required.
24+
25+
.. option:: --content-type CONTENT_TYPE, -c CONTENT_TYPE
26+
27+
Especify the content type of the output file. The selected
28+
option must be available in the model used.
29+
(by default application/json).
30+
31+
.. option:: --model-name CONTENT_TYPE, -c CONTENT_TYPE
32+
33+
Add the name of the model from which you want
34+
to obtain the prediction.
35+
If there are multiple models installed and youd don't
36+
specify the name of the one you want to use the program will fail.
37+
If there is only one model installed, that will be used
38+
to make the prediction.
39+
40+
.. option:: --output OUTPUT_DIR, -o OUTPUT_DIR
41+
42+
Save the result to a local file. This option is required.
43+
44+
.. option:: --url, -u
45+
46+
If set to true, the input file is the url o a file to predict.
47+
48+
Files
49+
=====
50+
51+
None
52+
53+
See Also
54+
========
55+
56+
Documentation: `DEEPaaS API <https://docs.deep-hybrid-datacloud.eu/projects/deepaas/>`_
57+
58+
Reporting Bugs
59+
==============
60+
61+
Bugs are managed at `GitHub <https://github.com/indigo-dc/deepaas>`_
62+

doc/source/user/predict.rst

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
.. _predict:
2+
3+
DEEPaaS API as a command line action
4+
==================================
5+
6+
Support for execution from the command line for DEEPaaS. In your Dockerfile,
7+
you must ensure that you execute `` deepaas-predict`` in one of the following ways:
8+
9+
Basic form of execution.
10+
11+
.. code-block:: console
12+
13+
(...)
14+
CMD ["sh", "-c", "deepaas-predict -i INPUT_FILE -o OUTPUT_DIR"]
15+
16+
Execution specifying a content type option.
17+
18+
.. code-block:: console
19+
20+
(...)
21+
CMD ["sh", "-c", "deepaas-predict -i INPUT_FILE -c CONTENT_TYPE -o OUTPUT_DIR"]
22+
23+
Execution specifying an url as an input file.
24+
25+
.. code-block:: console
26+
27+
(...)
28+
CMD ["sh", "-c", "deepaas-predict -i INPUT_FILE --url -o OUTPUT_DIR"]
29+
30+
Execution specifying the name of a model.
31+
32+
.. code-block:: console
33+
34+
(...)
35+
CMD ["sh", "-c", "deepaas-predict -i INPUT_FILE --model-name MODEL_NAME -o OUTPUT_DIR"]
36+

setup.cfg

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ packages =
4747
console_scripts =
4848
deepaas-run = deepaas.cmd.run:main
4949
deepaas-wsk = deepaas.cmd.wsk:main
50+
deepaas-predict = deepaas.cmd.execute:main
5051

5152
oslo.config.opts =
5253
deepaas = deepaas.opts:list_opts

0 commit comments

Comments
 (0)