diff --git a/.circleci/README.md b/.circleci/README.md deleted file mode 100644 index d37c13a27..000000000 --- a/.circleci/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# Multi Model Server CircleCI build -Model Server uses CircleCI for builds. This folder contains the config and scripts that are needed for CircleCI. - -## config.yml -_config.yml_ contains MMS build logic which will be used by CircleCI. - -## Workflows and Jobs -Currently, following _workflows_ are available - -1. smoke -2. nightly -3. weekly - -Following _jobs_ are executed under each workflow - -1. **build** : Builds _frontend/model-server.jar_ and executes tests from gradle -2. **modelarchiver** : Builds and tests modelarchiver module -3. **python-tests** : Executes pytests from _mms/tests/unit_tests/_ -4. **benchmark** : Executes latency benchmark using resnet-18 model -5. (NEW!) **api-tests** : Executes newman test suite for API testing - -Following _executors_ are available for job execution - -1. py27 -2. py36 - -> Please check the _workflows_, _jobs_ and _executors_ section in _config.yml_ for an up to date list - -## scripts -Instead of using inline commands inside _config.yml_, job steps are configured as shell scripts. -This is easier for maintenance and reduces chances of error in config.yml - -## images -MMS uses customized docker image for its CircleCI build. -To make sure MMS is compatible with both Python2 and Python3, we use two build projects. -We have published two docker images on docker hub for code build -* prashantsail/mms-build:python2.7 -* prashantsail/mms-build:python3.6 - -Following files in the _images_ folder are used to create the docker images -* Dockerfile.python2.7 - Dockerfile for prashantsail/mms-build:python2.7 -* Dockerfile.python3.6 - Dockerfile for prashantsail/mms-build:python3.6 - -## Local CircleCI cli -To make it easy for developers to debug build issues locally, MMS supports CircleCI cli for running a job in a container on your machine. - -#### Dependencies -1. CircleCI cli ([Quick Install](https://circleci.com/docs/2.0/local-cli/#quick-installation)) -2. PyYAML (pip install PyYaml) -3. docker (installed and running) - -#### Command -Developers can use the following command to build MMS locally: -**./run_circleci_tests.py -j -e ** - -- _workflow_name_ -This is a madatory parameter - -- _-j, --job job_name_ -If specified, executes only the specified job name (along with the required parents). -If not specified, all jobs in the workflow are executed sequentially. - -- _-e, --executor executor_name_ -If specified, job is executed only on the specified executor(docker image). -If not specified, job is executed on all the available executors. - -```bash -$ cd multi-model-server -$ ./run_circleci_tests.py smoke -$ ./run_circleci_tests.py smoke -j modelarchiver -$ ./run_circleci_tests.py smoke -e py36 -$ ./run_circleci_tests.py smoke -j modelarchiver -e py36 -``` - -###### Checklist -> 1. Make sure docker is running before you start local execution. -> 2. Docker containers to have **at least 4GB RAM, 2 CPU**. -> 3. If you are on a network with low bandwidth, we advise you to explicitly pull the docker images - -> docker pull prashantsail/mms-build:python2.7 -> docker pull prashantsail/mms-build:python3.6 - -`To avoid Pull Request build failures on github, developers should always make sure that their local builds pass.` diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index c8022a037..000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,189 +0,0 @@ -version: 2.1 - - -executors: - py36: - docker: - - image: prashantsail/mms-build:python3.6 - environment: - _JAVA_OPTIONS: "-Xmx2048m" - - py27: - docker: - - image: prashantsail/mms-build:python2.7 - environment: - _JAVA_OPTIONS: "-Xmx2048m" - - -commands: - attach-mms-workspace: - description: "Attach the MMS directory which was saved into workspace" - steps: - - attach_workspace: - at: . - - install-mms-server: - description: "Install MMS server from a wheel" - steps: - - run: - name: Install MMS - command: pip install dist/*.whl - - exeucute-api-tests: - description: "Execute API tests from a collection" - parameters: - collection: - type: enum - enum: [management, inference, https] - default: management - steps: - - run: - name: Start MMS, Execute << parameters.collection >> API Tests, Stop MMS - command: .circleci/scripts/linux_test_api.sh << parameters.collection >> - - store_artifacts: - name: Store server logs from << parameters.collection >> API tests - path: mms_<< parameters.collection >>.log - - store_artifacts: - name: Store << parameters.collection >> API test results - path: test/<< parameters.collection >>-api-report.html - - -jobs: - build: - parameters: - executor: - type: executor - executor: << parameters.executor >> - steps: - - checkout - - run: - name: Build frontend - command: .circleci/scripts/linux_build.sh - - store_artifacts: - name: Store gradle testng results - path: frontend/server/build/reports/tests/test - - persist_to_workspace: - root: . - paths: - - . - - python-tests: - parameters: - executor: - type: executor - executor: << parameters.executor >> - steps: - - attach-mms-workspace - - run: - name: Execute python unit tests - command: .circleci/scripts/linux_test_python.sh - - store_artifacts: - name: Store python Test results - path: htmlcov - - api-tests: - parameters: - executor: - type: executor - executor: << parameters.executor >> - steps: - - attach-mms-workspace - - install-mms-server - - exeucute-api-tests: - collection: management - - exeucute-api-tests: - collection: inference - - exeucute-api-tests: - collection: https - - benchmark: - parameters: - executor: - type: executor - executor: << parameters.executor >> - steps: - - attach-mms-workspace - - install-mms-server - - run: - name: Start MMS, Execute benchmark tests, Stop MMS - command: .circleci/scripts/linux_test_benchmark.sh - - store_artifacts: - name: Store server logs from benchmark tests - path: mms.log - - store_artifacts: - name: Store Benchmark Latency resnet-18 results - path: /tmp/MMSBenchmark/out/latency/resnet-18/report/ - destination: benchmark-latency-resnet-18 - - modelarchiver: - parameters: - executor: - type: executor - executor: << parameters.executor >> - steps: - - checkout - - run: - name: Execute lint, unit and integration tests - command: .circleci/scripts/linux_test_modelarchiver.sh - - store_artifacts: - name: Store unit tests results from model archiver tests - path: model-archiver/results_units - destination: units - - -workflows: - version: 2 - - smoke: - jobs: - - &build - build: - name: build-<< matrix.executor >> - matrix: &matrix - parameters: - executor: ["py27", "py36"] - - &modelarchiver - modelarchiver: - name: modelarchiver-<< matrix.executor >> - matrix: *matrix - - &python-tests - python-tests: - name: python-tests-<< matrix.executor >> - requires: - - build-<< matrix.executor >> - matrix: *matrix - - nightly: - triggers: - - schedule: - cron: "0 0 * * *" - filters: - branches: - only: - - master - jobs: - - *build - - *modelarchiver - - *python-tests - - &api-tests - api-tests: - name: api-tests-<< matrix.executor >> - requires: - - build-<< matrix.executor >> - matrix: *matrix - - weekly: - triggers: - - schedule: - cron: "0 0 * * 0" - filters: - branches: - only: - - master - jobs: - - *build - - benchmark: - name: benchmark-<< matrix.executor >> - requires: - - build-<< matrix.executor >> - matrix: *matrix diff --git a/.circleci/images/Dockerfile.python2.7 b/.circleci/images/Dockerfile.python2.7 deleted file mode 100644 index a7bf0d2cd..000000000 --- a/.circleci/images/Dockerfile.python2.7 +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"). -# You may not use this file except in compliance with the License. -# A copy of the License is located at -# http://www.apache.org/licenses/LICENSE-2.0 -# or in the "license" file accompanying this file. This file is distributed -# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language governing -# permissions and limitations under the License. -# - -FROM awsdeeplearningteam/mms-build:python2.7@sha256:2b743d6724dead806873cce1330f7b8a0197399a35af47dfd7035251fdade122 - -# 2020 - Updated Build and Test dependencies - -# Python packages for MMS Server -RUN pip install psutil \ - && pip install future \ - && pip install Pillow \ - && pip install wheel \ - && pip install twine \ - && pip install requests \ - && pip install mock \ - && pip install numpy \ - && pip install Image \ - && pip install mxnet==1.5.0 \ - && pip install enum34 - -# Python packages for pytests -RUN pip install pytest==4.0.0 \ - && pip install pytest-cov \ - && pip install pytest-mock - -# Python packages for benchmark -RUN pip install pandas - -# Install NodeJS and packages for API tests -RUN curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash - \ - && sudo apt-get install -y nodejs \ - && sudo npm install -g newman newman-reporter-html - -# Install jmeter for benchmark -# ToDo: Remove --no-check-certificate; temporarily added to bypass jmeter-plugins.org's expired certificate -RUN cd /opt \ - && wget https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.3.tgz \ - && tar -xzf apache-jmeter-5.3.tgz \ - && cd apache-jmeter-5.3 \ - && ln -s /opt/apache-jmeter-5.3/bin/jmeter /usr/local/bin/jmeter \ - && wget --no-check-certificate https://jmeter-plugins.org/get/ -O lib/ext/jmeter-plugins-manager-1.4.jar \ - && wget http://search.maven.org/remotecontent?filepath=kg/apc/cmdrunner/2.2/cmdrunner-2.2.jar -O lib/cmdrunner-2.2.jar \ - && java -cp lib/ext/jmeter-plugins-manager-1.4.jar org.jmeterplugins.repository.PluginManagerCMDInstaller \ - && bin/PluginsManagerCMD.sh install jpgc-synthesis=2.1,jpgc-filterresults=2.1,jpgc-mergeresults=2.1,jpgc-cmd=2.1,jpgc-perfmon=2.1 - -# bzt is used for performance regression test suite -# bzt requires python 3.6 runtime. -# Download pyenv, use pyenv to download python 3.6.5. -# The downloaded python 3.6.5 is isolated and doesn't interfere with default python(2.7) -# Only before starting the performance regression suite, py 3.6.5 is local installed(pyenv local 3.6.5) in test dir -# !! MMS server will continue using Python 2.7 !! -RUN curl https://pyenv.run | bash \ - && $HOME/.pyenv/bin/pyenv install 3.6.5 \ No newline at end of file diff --git a/.circleci/images/Dockerfile.python3.6 b/.circleci/images/Dockerfile.python3.6 deleted file mode 100644 index bcd3b980d..000000000 --- a/.circleci/images/Dockerfile.python3.6 +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"). -# You may not use this file except in compliance with the License. -# A copy of the License is located at -# http://www.apache.org/licenses/LICENSE-2.0 -# or in the "license" file accompanying this file. This file is distributed -# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language governing -# permissions and limitations under the License. -# - -FROM awsdeeplearningteam/mms-build:python3.6@sha256:2c1afa8834907ceec641d254dffbf4bcc659ca2d00fd6f2872d7521f32c9fa2e - -# 2020 - Updated Build and Test dependencies - -# Python packages for MMS Server -RUN pip install psutil \ - && pip install future \ - && pip install Pillow \ - && pip install wheel \ - && pip install twine \ - && pip install requests \ - && pip install mock \ - && pip install numpy \ - && pip install Image \ - && pip install mxnet==1.5.0 - -# Python packages for pytests -RUN pip install pytest==4.0.0 \ - && pip install pytest-cov \ - && pip install pytest-mock - -# Python packages for benchmark -RUN pip install pandas - -# Install NodeJS and packages for API tests -RUN curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash - \ - && sudo apt-get install -y nodejs \ - && sudo npm install -g newman newman-reporter-html - -# Install jmeter for benchmark -# ToDo: Remove --no-check-certificate; temporarily added to bypass jmeter-plugins.org's expired certificate -RUN cd /opt \ - && wget https://archive.apache.org/dist/jmeter/binaries/apache-jmeter-5.3.tgz \ - && tar -xzf apache-jmeter-5.3.tgz \ - && cd apache-jmeter-5.3 \ - && ln -s /opt/apache-jmeter-5.3/bin/jmeter /usr/local/bin/jmeter \ - && wget --no-check-certificate https://jmeter-plugins.org/get/ -O lib/ext/jmeter-plugins-manager-1.4.jar \ - && wget http://search.maven.org/remotecontent?filepath=kg/apc/cmdrunner/2.2/cmdrunner-2.2.jar -O lib/cmdrunner-2.2.jar \ - && java -cp lib/ext/jmeter-plugins-manager-1.4.jar org.jmeterplugins.repository.PluginManagerCMDInstaller \ - && bin/PluginsManagerCMD.sh install jpgc-synthesis=2.1,jpgc-filterresults=2.1,jpgc-mergeresults=2.1,jpgc-cmd=2.1,jpgc-perfmon=2.1 \ No newline at end of file diff --git a/.circleci/scripts/linux_build.sh b/.circleci/scripts/linux_build.sh deleted file mode 100755 index 019e629f9..000000000 --- a/.circleci/scripts/linux_build.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -python setup.py bdist_wheel --universal \ No newline at end of file diff --git a/.circleci/scripts/linux_test_api.sh b/.circleci/scripts/linux_test_api.sh deleted file mode 100755 index 23f908abb..000000000 --- a/.circleci/scripts/linux_test_api.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -MODEL_STORE_DIR='test/model_store' - -MMS_LOG_FILE_MANAGEMENT='mms_management.log' -MMS_LOG_FILE_INFERENCE='mms_inference.log' -MMS_LOG_FILE_HTTPS='mms_https.log' -MMS_CONFIG_FILE_HTTPS='test/resources/config.properties' - -POSTMAN_ENV_FILE='test/postman/environment.json' -POSTMAN_COLLECTION_MANAGEMENT='test/postman/management_api_test_collection.json' -POSTMAN_COLLECTION_INFERENCE='test/postman/inference_api_test_collection.json' -POSTMAN_COLLECTION_HTTPS='test/postman/https_test_collection.json' -POSTMAN_DATA_FILE_INFERENCE='test/postman/inference_data.json' - -REPORT_FILE_MANAGEMENT='test/management-api-report.html' -REPORT_FILE_INFERENCE='test/inference-api-report.html' -REPORT_FILE_HTTPS='test/https-api-report.html' - -start_mms_server() { - multi-model-server --start --model-store $1 >> $2 2>&1 - sleep 10 -} - -start_mms_secure_server() { - multi-model-server --start --mms-config $MMS_CONFIG_FILE_HTTPS --model-store $1 >> $2 2>&1 - sleep 10 -} - -stop_mms_server() { - multi-model-server --stop -} - -trigger_management_tests(){ - start_mms_server $MODEL_STORE_DIR $MMS_LOG_FILE_MANAGEMENT - newman run -e $POSTMAN_ENV_FILE $POSTMAN_COLLECTION_MANAGEMENT \ - -r cli,html --reporter-html-export $REPORT_FILE_MANAGEMENT --verbose - stop_mms_server -} - -trigger_inference_tests(){ - start_mms_server $MODEL_STORE_DIR $MMS_LOG_FILE_INFERENCE - newman run -e $POSTMAN_ENV_FILE $POSTMAN_COLLECTION_INFERENCE -d $POSTMAN_DATA_FILE_INFERENCE \ - -r cli,html --reporter-html-export $REPORT_FILE_INFERENCE --verbose - stop_mms_server -} - -trigger_https_tests(){ - start_mms_secure_server $MODEL_STORE_DIR $MMS_LOG_FILE_HTTPS - newman run --insecure -e $POSTMAN_ENV_FILE $POSTMAN_COLLECTION_HTTPS \ - -r cli,html --reporter-html-export $REPORT_FILE_HTTPS --verbose - stop_mms_server -} - -mkdir -p $MODEL_STORE_DIR - -case $1 in - 'management') - trigger_management_tests - ;; - 'inference') - trigger_inference_tests - ;; - 'https') - trigger_https_tests - ;; - 'ALL') - trigger_management_tests - trigger_inference_tests - trigger_https_tests - ;; - *) - echo $1 'Invalid' - echo 'Please specify any one of - management | inference | https | ALL' - exit 1 - ;; -esac \ No newline at end of file diff --git a/.circleci/scripts/linux_test_benchmark.sh b/.circleci/scripts/linux_test_benchmark.sh deleted file mode 100755 index 73c66f58e..000000000 --- a/.circleci/scripts/linux_test_benchmark.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -# Hack needed to make it work with existing benchmark.py -# benchmark.py expects jmeter to be present at a very specific location -mkdir -p /home/ubuntu/.linuxbrew/Cellar/jmeter/5.3/libexec/bin/ -ln -s /opt/apache-jmeter-5.3/bin/jmeter /home/ubuntu/.linuxbrew/Cellar/jmeter/5.3/libexec/bin/jmeter - -multi-model-server --start >> mms.log 2>&1 -sleep 30 - -cd benchmarks -python benchmark.py latency - -multi-model-server --stop \ No newline at end of file diff --git a/.circleci/scripts/linux_test_modelarchiver.sh b/.circleci/scripts/linux_test_modelarchiver.sh deleted file mode 100755 index bdae61a65..000000000 --- a/.circleci/scripts/linux_test_modelarchiver.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -cd model-archiver/ - -# Lint test -pylint -rn --rcfile=./model_archiver/tests/pylintrc model_archiver/. - -# Execute python unit tests -python -m pytest --cov-report html:results_units --cov=./ model_archiver/tests/unit_tests - - -# Install model archiver module -pip install . - -# Execute integration tests -python -m pytest model_archiver/tests/integ_tests -# ToDo - Report for Integration tests ? \ No newline at end of file diff --git a/.circleci/scripts/linux_test_perf_regression.sh b/.circleci/scripts/linux_test_perf_regression.sh deleted file mode 100755 index 6d3f80764..000000000 --- a/.circleci/scripts/linux_test_perf_regression.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -multi-model-server --start \ - --models squeezenet=https://s3.amazonaws.com/model-server/model_archive_1.0/squeezenet_v1.1.mar \ - >> mms.log 2>&1 -sleep 90 - -cd performance_regression - -# Only on a python 2 environment - -PY_MAJOR_VER=$(python -c 'import sys; major = sys.version_info.major; print(major);') -if [ $PY_MAJOR_VER -eq 2 ]; then - # Hack to use python 3.6.5 for bzt installation and execution - export PATH="/root/.pyenv/bin:/root/.pyenv/shims:$PATH" - pyenv local 3.6.5 -fi - -# Install dependencies -pip install bzt - -curl -O https://s3.amazonaws.com/model-server/inputs/kitten.jpg -bzt -o modules.jmeter.path=/opt/apache-jmeter-5.3/bin/jmeter \ - -o settings.artifacts-dir=/tmp/mms-performance-regression/ \ - -o modules.console.disable=true \ - imageInputModelPlan.jmx.yaml \ - -report - -multi-model-server --stop \ No newline at end of file diff --git a/.circleci/scripts/linux_test_python.sh b/.circleci/scripts/linux_test_python.sh deleted file mode 100755 index 9af6d1e5b..000000000 --- a/.circleci/scripts/linux_test_python.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Lint Test -pylint -rn --rcfile=./mms/tests/pylintrc mms/. - -# Execute python tests -python -m pytest --cov-report html:htmlcov --cov=mms/ mms/tests/unit_tests/ \ No newline at end of file diff --git a/run_circleci_tests.py b/run_circleci_tests.py deleted file mode 100755 index 5001c0006..000000000 --- a/run_circleci_tests.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python -""" -- This script helps to execute circleci jobs in a container on developer's local machine. -- The script accepts workflow(mandatory), job(optional) and executor(optional) arguments. -- The script used circleci cli's process command to generate a processed yaml. -- The processed yaml, is parsed and twekaed to generate a new transformed yaml. -- The transformed yaml contains a single job, which is merged and ordered list of job steps -from the specfied and requird parent jobs. -""" - -# Make sure you have following dependencies installed on your local machine -# 1. PyYAML (pip install PyYaml) -# 2. CircleCI cli from - https://circleci.com/docs/2.0/local-cli/#installation -# 3. docker - -from collections import OrderedDict -from functools import reduce - -import subprocess -import sys -import copy -import argparse -import yaml - -parser = argparse.ArgumentParser(description='Execute circleci jobs in a container \ - on your local machine') -parser.add_argument('workflow', type=str, help='Workflow name from config.yml') -parser.add_argument('-j', '--job', type=str, help='Job name from config.yml') -parser.add_argument('-e', '--executor', type=str, help='Executor name from config.yml') -args = parser.parse_args() - -workflow = args.workflow -job = args.job -executor = args.executor - -CCI_CONFIG_FILE = '.circleci/config.yml' -PROCESSED_FILE = '.circleci/processed.yml' -XFORMED_FILE = '.circleci/xformed.yml' -CCI_CONFIG = {} -PROCESSED_CONFIG = {} -XFORMED_CONFIG = {} -XFORMED_JOB_NAME = 'mms_xformed_job' -BLACKLISTED_STEPS = ['persist_to_workspace', 'attach_workspace', 'store_artifacts'] - -# Read CircleCI's config -with open(CCI_CONFIG_FILE) as fstream: - try: - CCI_CONFIG = yaml.safe_load(fstream) - except yaml.YAMLError as err: - print(err) - -# Create processed YAML using circleci cli's 'config process' commands -PROCESS_CONFIG_CMD = 'circleci config process {} > {}'.format(CCI_CONFIG_FILE, PROCESSED_FILE) -print("Executing command : ", PROCESS_CONFIG_CMD) -subprocess.check_call(PROCESS_CONFIG_CMD, shell=True) - -# Read the processed config -with open(PROCESSED_FILE) as fstream: - try: - PROCESSED_CONFIG = yaml.safe_load(fstream) - except yaml.YAMLError as err: - print(err) - -# All executors available in the config file -available_executors = list(CCI_CONFIG['executors']) - -# All jobs available under the specified workflow -jobs_in_workflow = PROCESSED_CONFIG['workflows'][workflow]['jobs'] - - -def get_processed_job_sequence(processed_job_name): - """ Recursively iterate over jobs in the workflow to generate an ordered list of parent jobs """ - jobs_in_sequence = [] - - job_dict = next((jd for jd in jobs_in_workflow \ - if isinstance(jd, dict) and processed_job_name == list(jd)[0]), None) - if job_dict: - # Find all parent jobs, recurse to find their respective ancestors - parent_jobs = job_dict[processed_job_name].get('requires', []) - for pjob in parent_jobs: - jobs_in_sequence += get_processed_job_sequence(pjob) - - return jobs_in_sequence + [processed_job_name] - - -def get_jobs_to_exec(job_name): - """ Returns a dictionary of executors and a list of jobs to be executed in them """ - jobs_dict = {} - executors = [executor] if executor else available_executors - - for exectr_name in executors: - if job_name is None: - # List of all job names(as string) - jobs_dict[exectr_name] = map(lambda j: j if isinstance(j, str) \ - else list(j)[0], jobs_in_workflow) - # Filter processed job names as per the executor - # "job_name-executor_name" is a convention set in config.yml - # pylint: disable=cell-var-from-loop - jobs_dict[exectr_name] = filter(lambda j: exectr_name in j, jobs_dict[exectr_name]) - else: - # The list might contain duplicate parent jobs due to multiple fan-ins like config - # - Remove the duplicates - # "job_name-executor_name" is a convention set in config.yml - jobs_dict[exectr_name] = \ - OrderedDict.fromkeys(get_processed_job_sequence(job_name + '-' + exectr_name)) - jobs_dict[exectr_name] = list(jobs_dict[exectr_name]) - - return jobs_dict - - -# jobs_to_exec is a dict, with executor(s) as the key and list of jobs to be executed as its value -jobs_to_exec = get_jobs_to_exec(job) - - -def get_jobs_steps(steps, job_name): - """ Merge all the steps from list of jobs to execute """ - job_steps = PROCESSED_CONFIG['jobs'][job_name]['steps'] - filtered_job_steps = list(filter(lambda step: list(step)[0] not in BLACKLISTED_STEPS, \ - job_steps)) - return steps + filtered_job_steps - - -result_dict = {} - -for exectr, jobs in jobs_to_exec.items(): - merged_steps = reduce(get_jobs_steps, jobs, []) - - # Create a new job, using the first job as a reference - # This ensures configs like executor, environment, etc are maintained from the first job - first_job = jobs[0] - xformed_job = copy.deepcopy(PROCESSED_CONFIG['jobs'][first_job]) - - # Add the merged steps to this newly introduced job - xformed_job['steps'] = merged_steps - - # Create a duplicate config(transformed) with the newly introduced job as the only job in config - XFORMED_CONFIG = copy.deepcopy(PROCESSED_CONFIG) - XFORMED_CONFIG['jobs'] = {} - XFORMED_CONFIG['jobs'][XFORMED_JOB_NAME] = xformed_job - - # Create a transformed yaml - with open(XFORMED_FILE, 'w+') as fstream: - yaml.dump(XFORMED_CONFIG, fstream) - - try: - # Locally execute the newly created job - # This newly created job has all the steps (ordered and merged from steps in parent job(s)) - LOCAL_EXECUTE_CMD = 'circleci local execute -c {} --job {}'.format(XFORMED_FILE, \ - XFORMED_JOB_NAME) - print('Executing command : ', LOCAL_EXECUTE_CMD) - result_dict[exectr] = subprocess.check_call(LOCAL_EXECUTE_CMD, shell=True) - except subprocess.CalledProcessError as err: - result_dict[exectr] = err.returncode - -# Clean up, remove the processed and transformed yml files -CLEANUP_CMD = 'rm {} {}'.format(PROCESSED_FILE, XFORMED_FILE) -print('Executing command : ', CLEANUP_CMD) -subprocess.check_call(CLEANUP_CMD, shell=True) - -# Print job execution details -for exectr, retcode in result_dict.items(): - colorcode, status = ('\033[0;37;42m', 'successful') if retcode == 0 \ - else ('\033[0;37;41m', 'failed') - print("{} Job execution {} using {} executor \x1b[0m".format(colorcode, status, exectr)) - -# Exit as per overall status -SYS_EXIT_CODE = 0 if all(retcode == 0 for exectr, retcode in result_dict.items()) else 1 -sys.exit(SYS_EXIT_CODE)