diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..5775a17 --- /dev/null +++ b/Pipfile @@ -0,0 +1,14 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +flask = "*" +requests = "*" +docker = "==5.0.3" + +[dev-packages] + +[requires] +python_version = "3.9" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..e21c8c2 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,178 @@ +{ + "_meta": { + "hash": { + "sha256": "e07bf80edfcb694d6ca59718c31c7a2ecc1dc57d9eaa4456c285ffe8947981d3" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.9" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872", + "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569" + ], + "version": "==2021.10.8" + }, + "charset-normalizer": { + "hashes": [ + "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", + "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" + ], + "markers": "python_version >= '3'", + "version": "==2.0.12" + }, + "click": { + "hashes": [ + "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e", + "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.2" + }, + "docker": { + "hashes": [ + "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0", + "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb" + ], + "index": "pypi", + "version": "==5.0.3" + }, + "flask": { + "hashes": [ + "sha256:8a4cf32d904cf5621db9f0c9fbcd7efabf3003f22a04e4d0ce790c7137ec5264", + "sha256:a8c9bd3e558ec99646d177a9739c41df1ded0629480b4c8d2975412f3c9519c8" + ], + "index": "pypi", + "version": "==2.1.1" + }, + "idna": { + "hashes": [ + "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", + "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" + ], + "markers": "python_version >= '3'", + "version": "==3.3" + }, + "importlib-metadata": { + "hashes": [ + "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6", + "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539" + ], + "markers": "python_version < '3.10'", + "version": "==4.11.3" + }, + "itsdangerous": { + "hashes": [ + "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44", + "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.2" + }, + "jinja2": { + "hashes": [ + "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119", + "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.1" + }, + "markupsafe": { + "hashes": [ + "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", + "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", + "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", + "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", + "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", + "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", + "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", + "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", + "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", + "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", + "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", + "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", + "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", + "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", + "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", + "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", + "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", + "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", + "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", + "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", + "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", + "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", + "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", + "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", + "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", + "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", + "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", + "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", + "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", + "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", + "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", + "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", + "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", + "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", + "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", + "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", + "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", + "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", + "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", + "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.1" + }, + "requests": { + "hashes": [ + "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61", + "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d" + ], + "index": "pypi", + "version": "==2.27.1" + }, + "urllib3": { + "hashes": [ + "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14", + "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.9" + }, + "websocket-client": { + "hashes": [ + "sha256:50b21db0058f7a953d67cc0445be4b948d7fc196ecbeb8083d68d94628e4abf6", + "sha256:722b171be00f2b90e1d4fb2f2b53146a536ca38db1da8ff49c972a4e1365d0ef" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.2" + }, + "werkzeug": { + "hashes": [ + "sha256:3c5493ece8268fecdcdc9c0b112211acd006354723b280d643ec732b6d4063d6", + "sha256:f8e89a20aeabbe8a893c24a461d3ee5dad2123b05cc6abd73ceed01d39c3ae74" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.1" + }, + "zipp": { + "hashes": [ + "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad", + "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099" + ], + "markers": "python_version >= '3.7'", + "version": "==3.8.0" + } + }, + "develop": {} +} diff --git a/README.md b/README.md index d593782..0e98031 100644 --- a/README.md +++ b/README.md @@ -1,66 +1,84 @@ -# Python Systems / Infrastructure Hiring Challenge +# Infra Hiring Challenge -# Introduction +This is my submissions to the Infra Hiring Challenge for the second interview for Datapane. -Datapane is an API-driven product for building analytics reports in Python - part of this includes running user's Python scripts and controlling their execution from a central server. This project simulates some of the necessary tasks required in developing such a system. +## Requirements +A docker environment, python3 and pipenv are all needed on the machine running this script. +## Setup -# Task +Use the package manager [pipenv] to install the necessary modules. Make sure you're in the [python-files directory] and run the following commands -For this task we'll be building a simple API server with a few endpoints that reolve around accepting arbitary Python code and running it "securely". +```bash +pipenv shell +pipenv install +``` -The system must run on Linux, and can make use of any client/server technologies of your choice. +Edit the `app.py` file and set the directory you want to save the clients' projects to. This is for both endpoints. +Edit the `python-api.py` file and set the root directory to this project directory. -## Server +## Usage -The API server supports a few endpoints. +``` +python3 python-api.py +``` -`/run-file/` +## Testing Uploads of files /run-file +Using any method of your choice such as curl or postman, send a POST request as shown below +```bash +curl --location --request POST 'http://localhost:5000/run-file' \ +--form 'files[]=@"/home/rentan/main.py"' \ +--form 'client="test-client"' +``` -this takes as a payload an uploaded python script containing the Python code to run -`/run-json/` +## Testing Uploads of json blob +```bash +curl --location --request POST 'http://localhost:5000/run-json' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "script_name" : "main.py", + "client" : "new-company", + "code" : + ["import flask, marshmallow, os, zipp, docker,time","print(\"hello world\")","os.chdir(\"/\")","print(os.getcwd())","print(\"goodbye world\")"] +}' +``` -as per `run-file` above, but takes a JSON blob with a field called `code` containing the Python code to run -### Results +### Output +It should show the output of the script when completed -You may decide if the `/run-*` endpoints blocks and return a status code, or whether to implement an non-blocking model with a separate `/status/` endpoint to query each run. +## Notes +### Security +As security precautions, the image and container being built and ran will have a `python` user to execute the script which has no root privileges. -Either way, the server should listen for commands from client and act upon them - it should always be able to accept new messages. +The docker container is also run using namespaces isolation to prevent them from communicating with each other, as well as having virtual environments. -## Running Code +Each client has their own project directory and container to be used. -You need to be able to run arbitrary Python code in a clean environment - i.e. each invocation should not affect the others. You will need to make decisions around venvs, installed libraries and dependencies, and more. +The files being updated by the clients are also passed through a module to make sure the name is secure, as well as the size of the files being uploaded. -## Securing code +There is also an option to limit the memory and cpu usage. -The uploaded Python code needs to be executed as securely as possible and handle code that may be hostile. As such you'll need to provide protections against user code that may attempt to use excess resources, e.g. time, space, cpu, etc. +For the time being only `.py` files are accepted -You can look at any collection of technologies to perform sandboxing, such as systemd slices/scopes, podman, docker, chroots, seccomp filtering, and/or anything else +### Performance +A pre-built custom image called `datapane-py-slim` derived from `python:3.10-slim` was created, which had the necessary python3 modules installed, updated linux security packages, the python user created, and the necessary directories. -## Technologies +The image being used can be changed if performance is not suitable. -- Build systems, tools, and scripts of your choice, e.g. poetry, `setup.py`, docker, etc. -- The system must run on Linux and be simple to setup and run -- Any libraries you may find useful to help your task, we prioritise using existing libraries to accomplish tasks rather than building in-house and/or writing custom code that wouldn't scale to larger use-cases +The `runner` stage which uses this image only needs to switch to the python user, set env variable if needed, copy the clients' files, install the required python packages from those files, and run the main.py script -## Requirements +Execution is fairly fast as most of the time depends on the clients' script. -- You do not need to worry about client/server service discovery - the locations of the systems can be hard-coded, provided as env vars, command-line parameters, etc. -- Instructions should be provided on how to build / bundle / start the system -- You should aim to use the latest Python language features, ecosystem, tooling, and libraries where possible - -### Optional Features +### Results +For the time being the results of the scripts are only shown on stdout. It wasn't clear from the task what is needed to be done. -- Defence is depth is a valid strategy, how many of the sandboxing techniques can be combined -- Consider how you would improve this approach and productise it - what issues do you foresee and how would you attempt to solve them -- How would you tackle performance, i.e. delays in spinning up a pod on kubernetes, container startup delays, Python VM startup, reusing / caching files? -- Tests +## Improvements -# Review +Upon further understanding I realised that there could be a `/status` endpoint from which the client could check their results from. This was not implemented in time but in the future a simple client ID and cookie/key could be used. This would make sure clients' couldn't access each other's results. -Please don't spend more than 2-4 hours on this - we're looking to see how you approached the problem and the decisions made rather than a complete solution. This should be a fun challenge rather than a stressful endeavour. +Smaller docker images could be used as currently each invocation takes up 500mb. -There is no right answer as such, we will mainly be looking at code quality, software architecture skills, completeness of the solution from a software engineering perspective, and clarity of thought. +Currently there is no limit to how long the python script can take to execute. -Once completed, please create a PR containing your work, send us an email, and schedule a [second follow-up interview](https://calendar.google.com/calendar/selfsched?sstoken=UU1sbG9QV1hfcHlGfGRlZmF1bHR8ODI1ZjRlZWJlZTY0ZTQ1ZTI4MzNkZThhOGQ5MjZkNzg). +With more time better logging could be implemented to identify why an image wasn't built or why a container failed to start such as exceeded the resource limits, failed scripts etc... diff --git a/app.py b/app.py new file mode 100755 index 0000000..78f906e --- /dev/null +++ b/app.py @@ -0,0 +1,10 @@ +from flask import Flask + +# Where the clients' projects are created +UPLOAD_FOLDER = '/home/user/datapane-infra-challenge/' + +app = Flask(__name__) +#app.secret_key = "secret key" +app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER +# Size of the allowed scripts being uploaded +app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # 16mb diff --git a/default_dir/requirements.txt b/default_dir/requirements.txt new file mode 100644 index 0000000..1e35e33 --- /dev/null +++ b/default_dir/requirements.txt @@ -0,0 +1,20 @@ +certifi==2021.10.8 +charset-normalizer==2.0.12 +click==8.1.2 +docker==5.0.3 +docker-pycreds==0.4.0 +docopt==0.6.2 +Flask==2.1.1 +idna==3.3 +importlib-metadata==4.11.3 +itsdangerous==2.1.2 +Jinja2==3.1.1 +MarkupSafe==2.1.1 +pipreqs==0.4.11 +requests==2.27.1 +six==1.16.0 +urllib3==1.26.9 +websocket-client==1.3.2 +Werkzeug==2.1.1 +yarg==0.1.9 +zipp==3.8.0 diff --git a/python-api.py b/python-api.py new file mode 100755 index 0000000..494a502 --- /dev/null +++ b/python-api.py @@ -0,0 +1,210 @@ +#import required packages +import os, json, docker, shutil, time +from xml.dom.expatbuilder import Namespaces + +#from matplotlib.font_manager import json_load +from app import app +from flask import request, jsonify +from werkzeug.utils import secure_filename #utility which returns a secure version of the filename being uploaded + + +# Types of files allowed +ALLOWED_EXTENSIONS = set(['py']) + +# Lists used by the endpoints +run_json = [] +run_file = [] + +json_parent_directory = app.config['UPLOAD_FOLDER'] # Where the json uploads will be stored +default_dir = "default_dir" # where the requirement files are placed +root_dir = "/home/user/datapane-infra-challenge" # Root path of the project +client_directory = None +client_id = 0 + +def allowed_file(filename): + return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS + +class ContainerException(Exception): + def __init__(self, message='137'): + # Call the base class constructor with the parameters it needs + super(ContainerException, self).__init__(message) + +def run_client_script(client_name, client_dir): + """ + This is the default dockerfile used which will run the users' scripts + It uses the datapane-py-slim image which was custom built from python:3.10-slim + It has the required python packages, non-root user, venv and will install + all the required packages from the user + """ + dockerfile1 = f''' +FROM datapane-py-slim:1.0 AS runner +WORKDIR /usr/app +COPY --chown=python:python . . +USER 999 +# Start app +ENV PYTHONUNBUFFERED=1 +# activate virtual environment +ENV PATH="/usr/app/venv/bin:$PATH" + +RUN pipreqs . --force +RUN pip3 install --no-cache-dir -r requirements.txt +CMD ["python3", "./main.py"]''' + + #print (dockerfile1) + # Save the dockerfile value from above to a Dockerfile on the host machine. + file = open(f"{client_dir}/Dockerfile", "w") + file.writelines(dockerfile1) + file.close() + + # Change to the user's directory + os.chdir(client_dir) + # Connect to the docker env on the host machine + cli = docker.from_env() + #while True: + # try: + # # Build the image ot be used by the user, setting resource limits as necessary + # log = cli.images.build(path='./', dockerfile='Dockerfile', tag=f'{client_name}', container_limits={'memory': '60MB'}) + # #log = cli.images.build(path='./', dockerfile='Dockerfile', tag=f'{client_name}') + # for line in log: + # print (line) + # break + # except: + # print("Sorry, your image wasn't created, check if your cpu and memory are within the required limits") + i, log = cli.images.build(path='./', dockerfile='Dockerfile', tag=f'{client_name}', container_limits={'memory': '60MB'}) + for line in log: + print (line) + + cli = docker.from_env() + # Run a container from the image that was just built, with the same resource limits.$ + log = cli.containers.run(image=f'{client_name}', mem_limit='60mb', auto_remove=True) + #print(log) + +@app.route('/run-file', methods=['POST']) +def upload_file(): + """ + The API endpoint POST request for where files are uploaded + The API requests needs the following 2 fields; files[], and client + files[] is a list object where the users can upload multiple files from their project + but the initial script needs to be named main.py for this case. + client is simple the clients' name that they want to use + """ + # Change to the root directory + #os.chdir(root_dir) + os.chdir(os.path.expanduser(root_dir)) + # Check if any files have been uploaded and act accordingly + if 'files[]' not in request.files: + resp = jsonify({'message' : 'No file part in the request'}) + resp.status_code = 400 + return resp + files = request.files.getlist('files[]') # get list of files being uploaded + client_name = (request.form.get('client')) #get client name + + errors = {} + success = False + # Set client_dir to the upload folder and client's name + client_dir = os.path.join(app.config['UPLOAD_FOLDER'], client_name) + isExist = os.path.exists(client_dir) # check if client directory already exists + if not isExist: # if it doesn't exist, create it + print(os.getcwd()) + print(f'./{default_dir}, {client_dir}') + shutil.copytree(default_dir, client_dir) + for file in files: # go through list of files + # Check if filename is allowed + if file and allowed_file(file.filename): + # Returns a secure version of a file name + filename = secure_filename(file.filename) + # Save the client's project path + client_path = os.path.join(client_dir, filename) + file.save(client_path) + success = True + + else: + errors[file.filename] = 'File type is not allowed' + # For testing purposes, check the time it takes to build and run the container needed + start = time.time() + run_client_script(client_name, client_dir) + end = time.time() + print(end-start) + if success and errors: + errors['message'] = 'File(s) successfully uploaded' + resp = jsonify(errors) + resp.status_code = 500 + return resp + if success: + resp = jsonify({'message' : 'Files successfully uploaded'}) + resp.status_code = 201 + return resp + else: + resp = jsonify(errors) + resp.status_code = 500 + return resp + + +@app.route('/run-json', methods=['POST']) +def upload_json(): + """ + The API endpoint POST request for where jsons are sent + The API requests needs the following 3 fields; script_name, code to be run, and client + script_name is the name of the script to be run, as of this time it needs to be main.py + client is simply the clients' name that they want to use + code is a list which the client needs to make sure is formatted properly, such as + escaping quotes, backslah, tabs etc... + """ + os.chdir(os.path.expanduser(root_dir)) + # Append the existing list with the incoming requests + run_json.append(request.get_json()) + # Convert the incoming python object into a json string + json_list = (json.dumps(run_json, indent=4, sort_keys=True)) + # Convert the above json string into a python dictionary + # Not sure if this is the most efficient way? + json_object = (json.loads(json_list)) + global client_id + + # Starting from 0, each time a new client sends their json it is saved to the json_parsed + # variable and increments the id by 1 + json_parsed = (json_object[client_id]) + client_id+=1 + # Save the value of they keys from the json sent by the client + json_extracted_code = (json_parsed['code']) + client_name = (json_parsed['client']) + json_extracted_script_name = (json_parsed['script_name']) + client_directory = client_name + + # Setting the path of the clients' project directory + path = os.path.join(json_parent_directory, client_directory) + client_directory = os.path.join(app.config['UPLOAD_FOLDER'], client_name) # setting client directory + # check if client directory already exists + isExist = os.path.exists(client_directory) + if not isExist: # if it doesn't exist, create it + print(os.getcwd()) + print(f'./{default_dir}, {client_directory}') + shutil.copytree(default_dir, client_directory) + # Store the extracted code from the json to python_code + python_code = ("\n".join(map(str, json_extracted_code))) + # Get the full path of the script to save the code to + client_path = os.path.join(path, json_extracted_script_name) + textFile = open(client_path, 'w') + # Write the extracted python code to a script + textFile.write(python_code) + textFile.close() + # Build and run the image and container respectively, and time the process + # for testing purposes + start = time.time() + run_client_script(client_name, client_directory) + end = time.time() + print(end-start) + + return 'OK', 200 + +# For testing purposes, no authentication is used to GET the json +@app.route('/run-json') +def get_json(): + return jsonify(run_json) + +#@app.route('/run-file') +#def get_file(): +# return jsonify(run_file) + + +if __name__ == "__main__": + app.run()