From 0ce70e4abcc11aeb41ab6f2c89aeb12b34623795 Mon Sep 17 00:00:00 2001 From: BluThaitanium Date: Thu, 23 Jun 2022 00:21:34 -0400 Subject: [PATCH 01/11] python linter Signed-off-by: BluThaitanium --- Makefile | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/Makefile b/Makefile index 6fe70ed3..a2b5382c 100644 --- a/Makefile +++ b/Makefile @@ -36,3 +36,13 @@ check_license: ## Make sure source files have license header @git grep -L "SPDX-License-Identifier: Apache-2.0" -- *.py *.yml *.yaml *.sh *.html *.js *.css *.ts *.tsx ':!*.bundle.js' | \ grep . && echo "Missing license headers in files above. Run './tools/bash/add_license_headers.sh'" && exit 1 || \ echo "$@: OK" + +.PHONY: lint_python +lint_python: venv ## Check Python code style compliance + @which flake8 > /dev/null || pip install flake8 || pip3 install flake8 + @flake8 . --show-source --statistics \ + --select=E9,E2,E3,E5,F63,F7,F82,F4,F841,W291,W292 \ + --per-file-ignores sdk/python/tests/compiler/testdata/*:F841,F821 \ + --exclude .git,__pycache__,docs/source/conf.py,old,build,dist,venv \ + --max-line-length=140 + @echo "$@: OK" From 6e77bb241c39f0600af09beacc6de7467b31ffe3 Mon Sep 17 00:00:00 2001 From: BluThaitanium Date: Thu, 23 Jun 2022 00:35:11 -0400 Subject: [PATCH 02/11] javascript linter Signed-off-by: BluThaitanium --- Makefile | 7 +- dashboard/origin-mlx/.eslintrc.yml | 33 + dashboard/origin-mlx/package-lock.json | 10454 ++++++++++++++++++----- dashboard/origin-mlx/package.json | 18 +- 4 files changed, 8341 insertions(+), 2171 deletions(-) create mode 100644 dashboard/origin-mlx/.eslintrc.yml diff --git a/Makefile b/Makefile index a2b5382c..4e380a6d 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,12 @@ lint_python: venv ## Check Python code style compliance @which flake8 > /dev/null || pip install flake8 || pip3 install flake8 @flake8 . --show-source --statistics \ --select=E9,E2,E3,E5,F63,F7,F82,F4,F841,W291,W292 \ - --per-file-ignores sdk/python/tests/compiler/testdata/*:F841,F821 \ + --per-file-ignores ./*:F841,F821 \ --exclude .git,__pycache__,docs/source/conf.py,old,build,dist,venv \ --max-line-length=140 @echo "$@: OK" + +.PHONY: lint_javascript +lint_javascript: ## Check Javascript code style compliance + @cd dashboard/origin-mlx && npm run lint -- --fix + @echo "$@: OK" diff --git a/dashboard/origin-mlx/.eslintrc.yml b/dashboard/origin-mlx/.eslintrc.yml new file mode 100644 index 00000000..2b43ec4f --- /dev/null +++ b/dashboard/origin-mlx/.eslintrc.yml @@ -0,0 +1,33 @@ +env: + browser: true + es2021: true +extends: + - plugin:react/recommended + - plugin:react/jsx-runtime + - airbnb + - airbnb-typescript +parser: '@typescript-eslint/parser' +parserOptions: + project: './tsconfig.json' + ecmaFeatures: + jsx: true + ecmaVersion: latest + sourceType: module +plugins: + - react + - '@typescript-eslint' +rules: + #=== + # Disabled, based on docs + # https://reactjs.org/blog/2020/09/22/introducing-the-new-jsx-transform.html#eslint + react/jsx-uses-react: 'off' + react/react-in-jsx-scope: 'off' + #=== + # Allow only char '_' for unused-variable convention + no-unused-vars: 'off' + '@typescript-eslint/no-unused-vars': + - warn + - argsIgnorePattern: '^_' + varsIgnorePattern: '^_' + caughtErrorsIgnorePattern: '^_' + #=== diff --git a/dashboard/origin-mlx/package-lock.json b/dashboard/origin-mlx/package-lock.json index 2938d5e4..2383b0e0 100644 --- a/dashboard/origin-mlx/package-lock.json +++ b/dashboard/origin-mlx/package-lock.json @@ -25,6 +25,7 @@ "codemirror": "^5.45.0", "dagre": "https://registry.npmjs.org/dagre/-/dagre-0.8.4.tgz", "deepmerge": "^4.0.0", + "eslint-config-airbnb": "^19.0.4", "fuzzysort": "^1.1.4", "js-cookie": "^2.2.1", "js-yaml": "^3.12.2", @@ -33,6 +34,7 @@ "react-dom": "^16.13.1", "react-gtm-module": "^2.0.11", "react-markdown": "^8.0.0", + "react-router": "^6.3.0", "react-router-dom": "^4.3.1", "react-scripts": "^3.0.1", "react-sidebar": "^3.0.2", @@ -46,6 +48,16 @@ "typescript": "^3.5.2", "typestyle": "^2.0.1", "yamljs": "^0.3.0" + }, + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^5.28.0", + "@typescript-eslint/parser": "^5.28.0", + "eslint": "^8.17.0", + "eslint-config-airbnb-typescript": "^17.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.30.0", + "eslint-plugin-react-hooks": "^4.6.0" } }, "node_modules/@ampproject/remapping": { @@ -120,6 +132,14 @@ "url": "https://opencollective.com/babel" } }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/generator": { "version": "7.18.2", "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.18.2.tgz", @@ -173,6 +193,14 @@ "@babel/core": "^7.0.0" } }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/helper-create-class-features-plugin": { "version": "7.18.0", "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.0.tgz", @@ -226,6 +254,14 @@ "@babel/core": "^7.4.0-0" } }, + "node_modules/@babel/helper-define-polyfill-provider/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/helper-environment-visitor": { "version": "7.18.2", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.2.tgz", @@ -481,6 +517,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/@babel/highlight/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -1099,6 +1143,14 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/plugin-transform-classes/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, "node_modules/@babel/plugin-transform-computed-properties": { "version": "7.17.12", "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.17.12.tgz", @@ -1737,6 +1789,14 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/@babel/preset-env/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/preset-modules": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz", @@ -1839,6 +1899,14 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, "node_modules/@babel/types": { "version": "7.18.4", "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.18.4.tgz", @@ -1897,6 +1965,41 @@ "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" }, + "node_modules/@eslint/eslintrc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz", + "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.3.2", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/@eslint/eslintrc/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, "node_modules/@hapi/address": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", @@ -1936,6 +2039,24 @@ "@hapi/hoek": "^8.3.0" } }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.9.5", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", + "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==", + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + }, "node_modules/@hypnosphi/create-react-context": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@hypnosphi/create-react-context/-/create-react-context-0.3.1.tgz", @@ -1999,6 +2120,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/@jest/console/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/@jest/console/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -2007,6 +2136,14 @@ "node": ">=4" } }, + "node_modules/@jest/console/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/@jest/console/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -2056,12 +2193,12 @@ "node": ">= 6" } }, - "node_modules/@jest/core/node_modules/ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", + "node_modules/@jest/core/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/@jest/core/node_modules/ansi-styles": { @@ -2075,6 +2212,37 @@ "node": ">=4" } }, + "node_modules/@jest/core/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/core/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -2101,6 +2269,39 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/@jest/core/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@jest/core/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/core/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -2109,6 +2310,89 @@ "node": ">=4" } }, + "node_modules/@jest/core/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/core/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/@jest/core/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/@jest/core/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/@jest/core/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -2120,6 +2404,18 @@ "node": ">=4" } }, + "node_modules/@jest/core/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/environment": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-24.9.0.tgz", @@ -2215,6 +2511,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/@jest/reporters/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/@jest/reporters/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -2223,6 +2527,14 @@ "node": ">=4" } }, + "node_modules/@jest/reporters/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/@jest/reporters/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -2258,14 +2570,6 @@ "node": ">= 6" } }, - "node_modules/@jest/source-map/node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/@jest/test-result": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-24.9.0.tgz", @@ -2330,6 +2634,37 @@ "node": ">=4" } }, + "node_modules/@jest/transform/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/transform/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -2356,6 +2691,39 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/@jest/transform/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@jest/transform/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/transform/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -2364,6 +2732,67 @@ "node": ">=4" } }, + "node_modules/@jest/transform/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@jest/transform/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/@jest/transform/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -2375,6 +2804,18 @@ "node": ">=4" } }, + "node_modules/@jest/transform/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@jest/types": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/@jest/types/-/types-24.9.0.tgz", @@ -2555,12 +2996,39 @@ "node": ">=4" } }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/@nodelib/fs.stat": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", - "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, "engines": { - "node": ">= 6" + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" } }, "node_modules/@sinclair/typebox": { @@ -2864,6 +3332,11 @@ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + }, "node_modules/@types/jss": { "version": "9.5.8", "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.5.8.tgz", @@ -3030,25 +3503,31 @@ "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.34.0.tgz", - "integrity": "sha512-4zY3Z88rEE99+CNvTbXSyovv2z9PNOVffTWD2W8QF5s2prBQtwN2zadqERcrHpcR7O/+KMI3fcTAmUUhK/iQcQ==", - "dependencies": { - "@typescript-eslint/experimental-utils": "2.34.0", + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.28.0.tgz", + "integrity": "sha512-DXVU6Cg29H2M6EybqSg2A+x8DgO9TCUBRp4QEXQHJceLS7ogVDP0g3Lkg/SZCqcvkAP/RruuQqK0gdlkgmhSUA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/type-utils": "5.28.0", + "@typescript-eslint/utils": "5.28.0", + "debug": "^4.3.4", "functional-red-black-tree": "^1.0.1", - "regexpp": "^3.0.0", - "tsutils": "^3.17.1" + "ignore": "^5.2.0", + "regexpp": "^3.2.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" }, "engines": { - "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^2.0.0", - "eslint": "^5.0.0 || ^6.0.0" + "@typescript-eslint/parser": "^5.0.0", + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -3077,15 +3556,18 @@ "eslint": "*" } }, - "node_modules/@typescript-eslint/parser": { + "node_modules/@typescript-eslint/experimental-utils/node_modules/@typescript-eslint/typescript-estree": { "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.34.0.tgz", - "integrity": "sha512-03ilO0ucSD0EPTw2X4PntSIRFtDPWjrVq7C3/Z3VQHRC7+13YB55rcJI3Jt+YgeHbjUdJPcPa7b23rXCBokuyA==", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", + "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", "dependencies": { - "@types/eslint-visitor-keys": "^1.0.0", - "@typescript-eslint/experimental-utils": "2.34.0", - "@typescript-eslint/typescript-estree": "2.34.0", - "eslint-visitor-keys": "^1.1.0" + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash": "^4.17.15", + "semver": "^7.3.2", + "tsutils": "^3.17.1" }, "engines": { "node": "^8.10.0 || ^10.13.0 || >=11.10.1" @@ -3094,8 +3576,54 @@ "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/experimental-utils/node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/@typescript-eslint/experimental-utils/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.28.0.tgz", + "integrity": "sha512-ekqoNRNK1lAcKhZESN/PdpVsWbP9jtiNqzFWkp/yAUdZvJalw2heCYuqRmM5eUJSIYEkgq5sGOjq+ZqsLMjtRA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/typescript-estree": "5.28.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, "peerDependencies": { - "eslint": "^5.0.0 || ^6.0.0" + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -3103,25 +3631,42 @@ } } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", - "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", + "node_modules/@typescript-eslint/scope-manager": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.28.0.tgz", + "integrity": "sha512-LeBLTqF/he1Z+boRhSqnso6YrzcKMTQ8bO/YKEe+6+O/JGof9M0g3IJlIsqfrK/6K03MlFIlycbf1uQR1IjE+w==", + "dev": true, "dependencies": { - "debug": "^4.1.1", - "eslint-visitor-keys": "^1.1.0", - "glob": "^7.1.6", - "is-glob": "^4.0.1", - "lodash": "^4.17.15", - "semver": "^7.3.2", - "tsutils": "^3.17.1" + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/visitor-keys": "5.28.0" }, "engines": { - "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.28.0.tgz", + "integrity": "sha512-SyKjKh4CXPglueyC6ceAFytjYWMoPHMswPQae236zqe1YbhvCVQyIawesYywGiu98L9DwrxsBN69vGIVxJ4mQQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/utils": "5.28.0", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "*" }, "peerDependenciesMeta": { "typescript": { @@ -3129,35 +3674,86 @@ } } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "node_modules/@typescript-eslint/types": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.28.0.tgz", + "integrity": "sha512-2OOm8ZTOQxqkPbf+DAo8oc16sDlVR5owgJfKheBkxBKg1vAfw2JsSofH9+16VPlN9PWtv8Wzhklkqw3k/zCVxA==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.28.0.tgz", + "integrity": "sha512-9GX+GfpV+F4hdTtYc6OV9ZkyYilGXPmQpm6AThInpBmKJEyRSIjORJd1G9+bknb7OTFYL+Vd4FBJAO6T78OVqA==", + "dev": true, "dependencies": { - "yallist": "^4.0.0" + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/visitor-keys": "5.28.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" }, "engines": { - "node": ">=10" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "node_modules/@typescript-eslint/utils": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.28.0.tgz", + "integrity": "sha512-E60N5L0fjv7iPJV3UGc4EC+A3Lcj4jle9zzR0gW7vXhflO7/J29kwiTGITA2RlrmPokKiZbBy2DgaclCaEUs6g==", + "dev": true, "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" + "@types/json-schema": "^7.0.9", + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/typescript-estree": "5.28.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0" }, "engines": { - "node": ">=10" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "node_modules/@typescript-eslint/visitor-keys": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.28.0.tgz", + "integrity": "sha512-BtfP1vCor8cWacovzzPFOoeW4kBQxzmhxGoOpt0v1SFvG+nJ0cWaVdJk7cky1ArTcFHHKNIxyo2LLr3oNkSuXA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "5.28.0", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } }, "node_modules/@webassemblyjs/ast": { "version": "1.8.5", @@ -3345,9 +3941,9 @@ } }, "node_modules/acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", "bin": { "acorn": "bin/acorn" }, @@ -3481,17 +4077,11 @@ } }, "node_modules/ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "dependencies": { - "type-fest": "^0.21.3" - }, + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=4" } }, "node_modules/ansi-html": { @@ -3536,6 +4126,115 @@ "normalize-path": "^2.1.1" } }, + "node_modules/anymatch/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/anymatch/node_modules/normalize-path": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", @@ -3547,6 +4246,18 @@ "node": ">=0.10.0" } }, + "node_modules/anymatch/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/aproba": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", @@ -3561,12 +4272,15 @@ } }, "node_modules/aria-query": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", - "integrity": "sha512-majUxHgLehQTeSA+hClx+DY09OVUqG3GtezWkF1krgLGNdlDu9l9V8DaqNMWbq4Eddc8wsyDA0hpDUtnYxQEXw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", "dependencies": { - "ast-types-flow": "0.0.7", - "commander": "^2.11.0" + "@babel/runtime": "^7.10.2", + "@babel/runtime-corejs3": "^7.10.2" + }, + "engines": { + "node": ">=6.0" } }, "node_modules/arity-n": { @@ -3627,14 +4341,12 @@ } }, "node_modules/array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", - "dependencies": { - "array-uniq": "^1.0.1" - }, + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/array-uniq": { @@ -3670,6 +4382,23 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", + "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array.prototype.reduce": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz", @@ -3849,6 +4578,14 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, + "node_modules/axe-core": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.2.tgz", + "integrity": "sha512-LVAaGp/wkkgYJcjmHsoKx4juT1aQvJyPcW09MLCjVTh3V2cc6PnyempiLMNH5iMdfIX/zdbjUx2KDjMLCTdPeA==", + "engines": { + "node": ">=12" + } + }, "node_modules/axobject-query": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", @@ -3895,6 +4632,14 @@ "node": ">=0.10.0" } }, + "node_modules/babel-code-frame/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/babel-code-frame/node_modules/js-tokens": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", @@ -3939,6 +4684,14 @@ "eslint": ">= 4.12.1" } }, + "node_modules/babel-eslint/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "engines": { + "node": ">=4" + } + }, "node_modules/babel-extract-comments": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz", @@ -4007,6 +4760,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/babel-jest/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/babel-jest/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -4015,6 +4776,14 @@ "node": ">=4" } }, + "node_modules/babel-jest/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/babel-jest/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -4067,6 +4836,62 @@ "node": ">=6" } }, + "node_modules/babel-plugin-istanbul/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/babel-plugin-jest-hoist": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-24.9.0.tgz", @@ -4103,21 +4928,6 @@ "node": ">=8" } }, - "node_modules/babel-plugin-macros/node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/babel-plugin-macros/node_modules/parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -4135,22 +4945,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/babel-plugin-macros/node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-macros/node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "engines": { - "node": ">=4" - } - }, "node_modules/babel-plugin-named-asset-import": { "version": "0.3.8", "resolved": "https://registry.npmjs.org/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.8.tgz", @@ -4172,6 +4966,14 @@ "@babel/core": "^7.0.0-0" } }, + "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/babel-plugin-polyfill-corejs3": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz", @@ -4694,42 +5496,14 @@ } }, "node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "dependencies": { - "is-extendable": "^0.1.0" + "fill-range": "^7.0.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/braces/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/brcast": { @@ -4966,6 +5740,14 @@ "node": ">= 8" } }, + "node_modules/cacache/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" + } + }, "node_modules/cacache/node_modules/rimraf": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", @@ -4977,6 +5759,11 @@ "rimraf": "bin.js" } }, + "node_modules/cacache/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, "node_modules/cache-base": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", @@ -5024,6 +5811,14 @@ "node": ">=4" } }, + "node_modules/caller-callsite/node_modules/callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==", + "engines": { + "node": ">=4" + } + }, "node_modules/caller-path": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", @@ -5036,11 +5831,11 @@ } }, "node_modules/callsites": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", - "integrity": "sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/camel-case": { @@ -5077,9 +5872,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001352", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001352.tgz", - "integrity": "sha512-GUgH8w6YergqPQDGWhJGt8GDRnY0L/iJVQcU3eJ46GYf52R8tk0Wxp0PymuFVZboJYXGiCqwozAYZNRjVj6IcA==", + "version": "1.0.30001355", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001355.tgz", + "integrity": "sha512-Sd6pjJHF27LzCB7pT7qs+kuX2ndurzCzkpJl6Qct7LPSZ9jn0bkOA8mdgMgmqnQAWLVOOGjLpc+66V57eLtb1g==", "funding": [ { "type": "opencollective", @@ -5196,28 +5991,6 @@ "node": ">= 8" } }, - "node_modules/chokidar/node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/chokidar/node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/chokidar/node_modules/fsevents": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", @@ -5231,23 +6004,15 @@ "node": "^8.16.0 || ^10.6.0 || >=11.0.0" } }, - "node_modules/chokidar/node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/chokidar/node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dependencies": { - "is-number": "^7.0.0" + "is-glob": "^4.0.1" }, "engines": { - "node": ">=8.0" + "node": ">= 6" } }, "node_modules/chownr": { @@ -5381,12 +6146,9 @@ } }, "node_modules/cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", - "engines": { - "node": ">= 10" - } + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==" }, "node_modules/cliui": { "version": "5.0.0", @@ -5398,6 +6160,19 @@ "wrap-ansi": "^5.1.0" } }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "node_modules/cliui/node_modules/is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -5419,6 +6194,17 @@ "node": ">=6" } }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/clone-deep": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-0.2.4.tgz", @@ -5493,6 +6279,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/coa/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/coa/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -5597,9 +6391,12 @@ } }, "node_modules/commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "engines": { + "node": ">= 6" + } }, "node_modules/common-tags": { "version": "1.8.2", @@ -5790,6 +6587,17 @@ "run-queue": "^1.0.0" } }, + "node_modules/copy-concurrently/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/copy-descriptor": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", @@ -5858,6 +6666,26 @@ "node": ">=4" } }, + "node_modules/cosmiconfig/node_modules/import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==", + "dependencies": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/cosmiconfig/node_modules/resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==", + "engines": { + "node": ">=4" + } + }, "node_modules/create-ecdh": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", @@ -5898,26 +6726,16 @@ } }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, "engines": { - "node": ">=4.8" - } - }, - "node_modules/cross-spawn/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "bin": { - "semver": "bin/semver" + "node": ">= 8" } }, "node_modules/crypto-browserify": { @@ -6487,6 +7305,17 @@ "node": ">=6" } }, + "node_modules/del/node_modules/array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", + "dependencies": { + "array-uniq": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/del/node_modules/globby": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", @@ -6518,6 +7347,17 @@ "node": ">=6" } }, + "node_modules/del/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", @@ -6634,15 +7474,15 @@ "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/dir-glob": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", - "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, "dependencies": { - "arrify": "^1.0.1", - "path-type": "^3.0.0" + "path-type": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/dns-equal": { @@ -6834,9 +7674,9 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { - "version": "1.4.154", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.154.tgz", - "integrity": "sha512-GbV9djOkrnj6xmW+YYVVEI3VCQnJ0pnSTu7TW2JyjKd5cakoiSaG5R4RbEtfaD92GsY10DzbU3GYRe+IOA9kqA==" + "version": "1.4.160", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.160.tgz", + "integrity": "sha512-O1Z12YfyeX2LXYO7MdHIPazGXzLzQnr1ADW55U2ARQsJBPgfpJz3u+g3Mo2l1wSyfOCdiGqaX9qtV4XKZ0HNRA==" }, "node_modules/elliptic": { "version": "6.5.4", @@ -6858,9 +7698,9 @@ "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, "node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "node_modules/emojis-list": { "version": "3.0.0", @@ -7069,11 +7909,14 @@ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "engines": { - "node": ">=0.8.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/escodegen": { @@ -7097,46 +7940,99 @@ "source-map": "~0.6.1" } }, + "node_modules/escodegen/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/escodegen/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.17.0.tgz", + "integrity": "sha512-gq0m0BTJfci60Fz4nczYxNAlED+sMcihltndR8t9t1evnU/azx53x3t2UHXC/uRjcbvRw/XctpaNygSTcQD+Iw==", "dependencies": { - "@babel/code-frame": "^7.0.0", + "@eslint/eslintrc": "^1.3.0", + "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", - "esquery": "^1.0.1", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.3.2", + "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", - "ignore": "^4.0.6", + "glob-parent": "^6.0.1", + "globals": "^13.15.0", + "ignore": "^5.2.0", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.14", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.8.3", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", - "table": "^5.2.3", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, @@ -7144,29 +8040,71 @@ "eslint": "bin/eslint.js" }, "engines": { - "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, "funding": { "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint-config-react-app": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.2.1.tgz", - "integrity": "sha512-pGIZ8t0mFLcV+6ZirRgYK6RVqUIKRIi9MmgzUEmrIknsn3AdO0I32asO86dJgloHq+9ZPl8UIg8mYrvgP5u2wQ==", + "node_modules/eslint-config-airbnb": { + "version": "19.0.4", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz", + "integrity": "sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew==", "dependencies": { - "confusing-browser-globals": "^1.0.9" + "eslint-config-airbnb-base": "^15.0.0", + "object.assign": "^4.1.2", + "object.entries": "^1.1.5" + }, + "engines": { + "node": "^10.12.0 || ^12.22.0 || ^14.17.0 || >=16.0.0" }, "peerDependencies": { - "@typescript-eslint/eslint-plugin": "2.x", - "@typescript-eslint/parser": "2.x", - "babel-eslint": "10.x", - "eslint": "6.x", - "eslint-plugin-flowtype": "3.x || 4.x", - "eslint-plugin-import": "2.x", - "eslint-plugin-jsx-a11y": "6.x", - "eslint-plugin-react": "7.x", - "eslint-plugin-react-hooks": "1.x || 2.x" + "eslint": "^7.32.0 || ^8.2.0", + "eslint-plugin-import": "^2.25.3", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.28.0", + "eslint-plugin-react-hooks": "^4.3.0" + } + }, + "node_modules/eslint-config-airbnb-base": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz", + "integrity": "sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==", + "dependencies": { + "confusing-browser-globals": "^1.0.10", + "object.assign": "^4.1.2", + "object.entries": "^1.1.5", + "semver": "^6.3.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "peerDependencies": { + "eslint": "^7.32.0 || ^8.2.0", + "eslint-plugin-import": "^2.25.2" + } + }, + "node_modules/eslint-config-airbnb-base/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-config-airbnb-typescript": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-17.0.0.tgz", + "integrity": "sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g==", + "dev": true, + "dependencies": { + "eslint-config-airbnb-base": "^15.0.0" + }, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "^5.13.0", + "@typescript-eslint/parser": "^5.0.0", + "eslint": "^7.32.0 || ^8.2.0", + "eslint-plugin-import": "^2.25.3" } }, "node_modules/eslint-import-resolver-node": { @@ -7186,42 +8124,6 @@ "ms": "^2.1.1" } }, - "node_modules/eslint-import-resolver-node/node_modules/resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", - "dependencies": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/eslint-loader": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-3.0.3.tgz", - "integrity": "sha512-+YRqB95PnNvxNp1HEjQmvf9KNvCin5HXYYseOXVC2U0KEcw4IkQ2IQEBG46j7+gW39bMzeu0GsUhVbBY3Votpw==", - "deprecated": "This loader has been deprecated. Please use eslint-webpack-plugin", - "dependencies": { - "fs-extra": "^8.1.0", - "loader-fs-cache": "^1.0.2", - "loader-utils": "^1.2.3", - "object-hash": "^2.0.1", - "schema-utils": "^2.6.1" - }, - "engines": { - "node": ">= 8.9.0" - }, - "peerDependencies": { - "eslint": "^5.0.0 || ^6.0.0", - "webpack": "^4.0.0 || ^5.0.0" - } - }, "node_modules/eslint-module-utils": { "version": "2.7.3", "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz", @@ -7242,59 +8144,6 @@ "ms": "^2.1.1" } }, - "node_modules/eslint-module-utils/node_modules/find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", - "dependencies": { - "locate-path": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-module-utils/node_modules/locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", - "dependencies": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-module-utils/node_modules/p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "dependencies": { - "p-try": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-module-utils/node_modules/p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", - "dependencies": { - "p-limit": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint-module-utils/node_modules/p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", - "engines": { - "node": ">=4" - } - }, "node_modules/eslint-plugin-flowtype": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-4.6.0.tgz", @@ -7310,28 +8159,29 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.20.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", - "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "dependencies": { - "array-includes": "^3.0.3", - "array.prototype.flat": "^1.2.1", - "contains-path": "^0.1.0", + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", "debug": "^2.6.9", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.2", - "eslint-module-utils": "^2.4.1", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.3", "has": "^1.0.3", - "minimatch": "^3.0.4", - "object.values": "^1.1.0", - "read-pkg-up": "^2.0.0", - "resolve": "^1.12.0" + "is-core-module": "^2.8.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.5", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "2.x - 6.x" + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" } }, "node_modules/eslint-plugin-import/node_modules/debug": { @@ -7343,83 +8193,82 @@ } }, "node_modules/eslint-plugin-import/node_modules/doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dependencies": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" }, "engines": { "node": ">=0.10.0" } }, - "node_modules/eslint-plugin-import/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, "node_modules/eslint-plugin-import/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", - "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz", + "integrity": "sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g==", "dependencies": { - "@babel/runtime": "^7.4.5", - "aria-query": "^3.0.0", - "array-includes": "^3.0.3", + "@babel/runtime": "^7.16.3", + "aria-query": "^4.2.2", + "array-includes": "^3.1.4", "ast-types-flow": "^0.0.7", - "axobject-query": "^2.0.2", - "damerau-levenshtein": "^1.0.4", - "emoji-regex": "^7.0.2", + "axe-core": "^4.3.5", + "axobject-query": "^2.2.0", + "damerau-levenshtein": "^1.0.7", + "emoji-regex": "^9.2.2", "has": "^1.0.3", - "jsx-ast-utils": "^2.2.1" + "jsx-ast-utils": "^3.2.1", + "language-tags": "^1.0.5", + "minimatch": "^3.0.4" }, "engines": { "node": ">=4.0" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6" + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" } }, "node_modules/eslint-plugin-react": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", - "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.30.0.tgz", + "integrity": "sha512-RgwH7hjW48BleKsYyHK5vUAvxtE9SMPDKmcPRQgtRCYaZA0XQPt5FSkrU3nhz5ifzMZcA8opwmRJ2cmOO8tr5A==", "dependencies": { - "array-includes": "^3.1.1", + "array-includes": "^3.1.5", + "array.prototype.flatmap": "^1.3.0", "doctrine": "^2.1.0", - "has": "^1.0.3", - "jsx-ast-utils": "^2.2.3", - "object.entries": "^1.1.1", - "object.fromentries": "^2.0.2", - "object.values": "^1.1.1", - "prop-types": "^15.7.2", - "resolve": "^1.15.1", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.5", + "object.fromentries": "^2.0.5", + "object.hasown": "^1.1.1", + "object.values": "^1.1.5", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.3", "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.2", - "xregexp": "^4.3.0" + "string.prototype.matchall": "^4.0.7" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" } }, "node_modules/eslint-plugin-react-hooks": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", - "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==", + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", "engines": { - "node": ">=7" + "node": ">=10" }, "peerDependencies": { - "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" } }, "node_modules/eslint-plugin-react/node_modules/doctrine": { @@ -7434,24 +8283,28 @@ } }, "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "version": "2.0.0-next.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.3.tgz", + "integrity": "sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q==", "dependencies": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dependencies": { "esrecurse": "^4.3.0", @@ -7461,159 +8314,86 @@ "node": ">=8.0.0" } }, - "node_modules/eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/eslint/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "node_modules/eslint/node_modules/eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", - "dependencies": { - "eslint-visitor-keys": "^1.1.0" - }, + "node_modules/eslint-scope/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "engines": { - "node": ">=6" + "node": ">=4.0" } }, - "node_modules/eslint/node_modules/globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", "dependencies": { - "type-fest": "^0.8.1" + "eslint-visitor-keys": "^2.0.0" }, "engines": { - "node": ">=8" + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" } }, - "node_modules/eslint/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", "engines": { - "node": ">=4" + "node": ">=10" } }, - "node_modules/eslint/node_modules/import-fresh": { + "node_modules/eslint-visitor-keys": { "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/eslint/node_modules/regexpp": { + "node_modules/eslint/node_modules/argparse": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", - "engines": { - "node": ">=6.5.0" - } + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, - "node_modules/eslint/node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "engines": { - "node": ">=4" - } - }, - "node_modules/eslint/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "node_modules/eslint/node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", "dependencies": { - "has-flag": "^3.0.0" + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" }, "engines": { - "node": ">=4" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, - "node_modules/eslint/node_modules/type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", - "engines": { - "node": ">=8" + "node_modules/eslint/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" } }, "node_modules/espree": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", - "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz", + "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==", "dependencies": { - "acorn": "^7.1.1", - "acorn-jsx": "^5.2.0", - "eslint-visitor-keys": "^1.1.0" + "acorn": "^8.7.1", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" }, "engines": { - "node": ">=6.0.0" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/esprima": { @@ -7639,14 +8419,6 @@ "node": ">=0.10" } }, - "node_modules/esquery/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "engines": { - "node": ">=4.0" - } - }, "node_modules/esrecurse": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", @@ -7658,7 +8430,7 @@ "node": ">=4.0" } }, - "node_modules/esrecurse/node_modules/estraverse": { + "node_modules/estraverse": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", @@ -7666,14 +8438,6 @@ "node": ">=4.0" } }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "engines": { - "node": ">=4.0" - } - }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -7742,6 +8506,67 @@ "node": ">=6" } }, + "node_modules/execa/node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/execa/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/execa/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/execa/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/execa/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/execa/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, "node_modules/exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", @@ -7840,14 +8665,6 @@ "node": ">=0.10.0" } }, - "node_modules/expand-brackets/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/expand-brackets/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -8028,6 +8845,17 @@ "node": ">=0.10.0" } }, + "node_modules/extend-shallow/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/external-editor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", @@ -8081,14 +8909,6 @@ "node": ">=0.10.0" } }, - "node_modules/extglob/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -8103,39 +8923,31 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "node_modules/fast-glob": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", - "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, "dependencies": { - "@mrmlnc/readdir-enhanced": "^2.2.1", - "@nodelib/fs.stat": "^1.1.2", - "glob-parent": "^3.1.0", - "is-glob": "^4.0.0", - "merge2": "^1.2.3", - "micromatch": "^3.1.10" + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" }, "engines": { - "node": ">=4.0.0" + "node": ">=8.6.0" } }, "node_modules/fast-glob/node_modules/glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", - "dependencies": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent/node_modules/is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, "dependencies": { - "is-extglob": "^2.1.0" + "is-glob": "^4.0.1" }, "engines": { - "node": ">=0.10.0" + "node": ">= 6" } }, "node_modules/fast-json-stable-stringify": { @@ -8148,6 +8960,15 @@ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, "node_modules/faye-websocket": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", @@ -8214,15 +9035,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/figures/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/file-entry-cache": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", - "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dependencies": { - "flat-cache": "^2.0.1" + "flat-cache": "^3.0.4" }, "engines": { - "node": ">=4" + "node": "^10.12.0 || >=12.0.0" } }, "node_modules/file-loader": { @@ -8255,36 +9084,14 @@ } }, "node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "dependencies": { - "is-extendable": "^0.1.0" + "to-regex-range": "^5.0.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fill-range/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/finalhandler": { @@ -8331,33 +9138,32 @@ } }, "node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", "dependencies": { - "locate-path": "^3.0.0" + "locate-path": "^2.0.0" }, "engines": { - "node": ">=6" + "node": ">=4" } }, "node_modules/flat-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", - "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", "dependencies": { - "flatted": "^2.0.0", - "rimraf": "2.6.3", - "write": "1.0.3" + "flatted": "^3.1.0", + "rimraf": "^3.0.2" }, "engines": { - "node": ">=4" + "node": "^10.12.0 || >=12.0.0" } }, "node_modules/flatted": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", - "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==" + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", + "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==" }, "node_modules/flatten": { "version": "1.0.3", @@ -8450,6 +9256,37 @@ "node": ">=4" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -8476,6 +9313,39 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -8484,6 +9354,59 @@ "node": ">=4" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/fork-ts-checker-webpack-plugin/node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -8503,6 +9426,18 @@ "node": ">=4" } }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/form-data": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", @@ -8752,14 +9687,14 @@ } }, "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dependencies": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" }, "engines": { - "node": ">= 6" + "node": ">=10.13.0" } }, "node_modules/glob-to-regexp": { @@ -8799,50 +9734,49 @@ "node": ">=0.10.0" } }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "engines": { - "node": ">=4" - } - }, - "node_modules/globby": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", - "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "node_modules/global-prefix/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "dependencies": { - "array-union": "^1.0.1", - "dir-glob": "2.0.0", - "fast-glob": "^2.0.2", - "glob": "^7.1.2", - "ignore": "^3.3.5", - "pify": "^3.0.0", - "slash": "^1.0.0" + "isexe": "^2.0.0" }, - "engines": { - "node": ">=4" + "bin": { + "which": "bin/which" } }, - "node_modules/globby/node_modules/ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" - }, - "node_modules/globby/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "node_modules/globals": { + "version": "13.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.15.0.tgz", + "integrity": "sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog==", + "dependencies": { + "type-fest": "^0.20.2" + }, "engines": { - "node": ">=4" + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/globby/node_modules/slash": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", - "integrity": "sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg==", + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/graceful-fs": { @@ -9018,6 +9952,28 @@ "node": ">=0.10.0" } }, + "node_modules/has-values/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/has-values/node_modules/kind-of": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", @@ -9228,14 +10184,6 @@ "node": ">=6" } }, - "node_modules/html-minifier-terser/node_modules/commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", - "engines": { - "node": ">= 6" - } - }, "node_modules/html-webpack-plugin": { "version": "4.0.0-beta.11", "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-4.0.0-beta.11.tgz", @@ -9321,6 +10269,127 @@ "node": ">=4.0.0" } }, + "node_modules/http-proxy-middleware/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/http-proxy-middleware/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", @@ -9403,9 +10472,9 @@ "integrity": "sha512-DUNFN5j7Tln0D+TxzloUjKB+CtVu6myn0JEFak6dG18mNt9YkQ6lzGCdafwofISZ1lLF3xRHJ98VKy9ynkcFaA==" }, "node_modules/ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "engines": { "node": ">= 4" } @@ -9427,15 +10496,18 @@ } }, "node_modules/import-fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", - "integrity": "sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dependencies": { - "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/import-from": { @@ -9449,6 +10521,14 @@ "node": ">=4" } }, + "node_modules/import-from/node_modules/resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==", + "engines": { + "node": ">=4" + } + }, "node_modules/import-local": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", @@ -9523,37 +10603,134 @@ "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "node_modules/inquirer": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", - "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", + "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", "dependencies": { "ansi-escapes": "^4.2.1", - "chalk": "^4.1.0", + "chalk": "^2.4.2", "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", + "cli-width": "^2.0.0", "external-editor": "^3.0.3", "figures": "^3.0.0", - "lodash": "^4.17.19", + "lodash": "^4.17.15", "mute-stream": "0.0.8", - "run-async": "^2.4.0", - "rxjs": "^6.6.0", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", + "strip-ansi": "^5.1.0", "through": "^2.3.6" }, "engines": { - "node": ">=8.0.0" + "node": ">=6.0.0" } }, - "node_modules/inquirer/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/inquirer/node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", "dependencies": { - "ansi-regex": "^5.0.1" + "type-fest": "^0.21.3" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/inquirer/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/inquirer/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/inquirer/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/inquirer/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/inquirer/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/inquirer/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/inquirer/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/inquirer/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/inquirer/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/inquirer/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/internal-ip": { @@ -9822,12 +10999,9 @@ } }, "node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dependencies": { - "is-plain-object": "^2.0.4" - }, + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", "engines": { "node": ">=0.10.0" } @@ -9884,14 +11058,11 @@ } }, "node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "dependencies": { - "kind-of": "^3.0.2" - }, + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "engines": { - "node": ">=0.10.0" + "node": ">=0.12.0" } }, "node_modules/is-number-object": { @@ -10142,6 +11313,14 @@ "node": ">=6" } }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/istanbul-lib-report": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", @@ -10189,6 +11368,17 @@ "node": ">=6" } }, + "node_modules/istanbul-lib-source-maps/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/istanbul-reports": { "version": "2.2.7", "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", @@ -10291,6 +11481,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-cli/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-cli/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -10356,6 +11554,37 @@ "node": ">=4" } }, + "node_modules/jest-config/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-config/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-config/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -10382,6 +11611,39 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-config/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/jest-config/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-config/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-config/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -10390,6 +11652,28 @@ "node": ">=4" } }, + "node_modules/jest-config/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-config/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-config/node_modules/jest-get-type": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-24.9.0.tgz", @@ -10398,6 +11682,37 @@ "node": ">= 6" } }, + "node_modules/jest-config/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-config/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-config/node_modules/pretty-format": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", @@ -10423,6 +11738,18 @@ "node": ">=4" } }, + "node_modules/jest-config/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-diff": { "version": "28.1.1", "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-28.1.1.tgz", @@ -10508,6 +11835,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-each/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-each/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -10695,6 +12030,62 @@ "fsevents": "^1.2.7" } }, + "node_modules/jest-haste-map/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-haste-map/node_modules/fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -10713,6 +12104,71 @@ "node": ">= 4.0" } }, + "node_modules/jest-haste-map/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-haste-map/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-jasmine2": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-24.9.0.tgz", @@ -10784,6 +12240,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-jasmine2/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-jasmine2/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -10950,6 +12414,14 @@ "node": ">= 6" } }, + "node_modules/jest-matcher-utils/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-matcher-utils/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11034,6 +12506,37 @@ "node": ">=4" } }, + "node_modules/jest-message-util/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-message-util/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -11060,6 +12563,39 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-message-util/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/jest-message-util/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-message-util/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11068,6 +12604,67 @@ "node": ">=4" } }, + "node_modules/jest-message-util/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-message-util/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/jest-message-util/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -11079,6 +12676,18 @@ "node": ">=4" } }, + "node_modules/jest-message-util/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/jest-mock": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-24.9.0.tgz", @@ -11179,6 +12788,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-resolve/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-resolve/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11264,6 +12881,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-runner/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-runner/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11356,6 +12981,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-runtime/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-runtime/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11364,6 +12997,14 @@ "node": ">=4" } }, + "node_modules/jest-runtime/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/jest-runtime/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -11459,6 +13100,14 @@ "node": ">= 6" } }, + "node_modules/jest-snapshot/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-snapshot/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11503,6 +13152,14 @@ "node": ">= 6" } }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/jest-snapshot/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -11547,14 +13204,6 @@ "node": ">=4" } }, - "node_modules/jest-util/node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/jest-util/node_modules/chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -11581,6 +13230,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-util/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-util/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11589,6 +13246,14 @@ "node": ">=4" } }, + "node_modules/jest-util/node_modules/slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "engines": { + "node": ">=6" + } + }, "node_modules/jest-util/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -11661,6 +13326,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-validate/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-validate/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11716,6 +13389,28 @@ "strip-ansi": "^5.0.0" } }, + "node_modules/jest-watch-typeahead/node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watch-typeahead/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, "node_modules/jest-watch-typeahead/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -11753,6 +13448,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-watch-typeahead/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-watch-typeahead/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -11761,14 +13464,6 @@ "node": ">=4" } }, - "node_modules/jest-watch-typeahead/node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "engines": { - "node": ">=8" - } - }, "node_modules/jest-watch-typeahead/node_modules/string-length": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-length/-/string-length-3.1.0.tgz", @@ -11781,6 +13476,17 @@ "node": ">=8" } }, + "node_modules/jest-watch-typeahead/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/jest-watch-typeahead/node_modules/supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -11792,6 +13498,17 @@ "node": ">=4" } }, + "node_modules/jest-watch-typeahead/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/jest-watcher": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-24.9.0.tgz", @@ -11809,14 +13526,6 @@ "node": ">= 6" } }, - "node_modules/jest-watcher/node_modules/ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "engines": { - "node": ">=4" - } - }, "node_modules/jest-watcher/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -11854,6 +13563,14 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/jest-watcher/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/jest-watcher/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -12158,12 +13875,12 @@ } }, "node_modules/jsx-ast-utils": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.4.1.tgz", - "integrity": "sha512-z1xSldJ6imESSzOjd3NNkieVJKRlKYSOtMG8SFyCj2FIrvSaSuli/WjpBkEzCBoR9bYYYFgqJw61Xhu7Lcgk+w==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.0.tgz", + "integrity": "sha512-XzO9luP6L0xkxwhIJMTJQpZo/eeN60K08jHdexfD569AGxeNug6UketeHXEhROoM8aR7EcUoOQmIhcJQjcuq8Q==", "dependencies": { - "array-includes": "^3.1.1", - "object.assign": "^4.1.0" + "array-includes": "^3.1.4", + "object.assign": "^4.1.2" }, "engines": { "node": ">=4.0" @@ -12193,6 +13910,19 @@ "node": ">=6" } }, + "node_modules/language-subtag-registry": { + "version": "0.3.21", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz", + "integrity": "sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg==" + }, + "node_modules/language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", + "dependencies": { + "language-subtag-registry": "~0.3.2" + } + }, "node_modules/last-call-webpack-plugin": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz", @@ -12236,12 +13966,12 @@ } }, "node_modules/levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dependencies": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" }, "engines": { "node": ">= 0.8.0" @@ -12253,36 +13983,25 @@ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, "node_modules/load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", "dependencies": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" }, "engines": { "node": ">=4" } }, - "node_modules/load-json-file/node_modules/parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", - "dependencies": { - "error-ex": "^1.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/load-json-file/node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/loader-fs-cache": { @@ -12374,15 +14093,15 @@ } }, "node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", "dependencies": { - "p-locate": "^3.0.0", + "p-locate": "^2.0.0", "path-exists": "^3.0.0" }, "engines": { - "node": ">=6" + "node": ">=4" } }, "node_modules/lodash": { @@ -12405,6 +14124,11 @@ "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, "node_modules/lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", @@ -12473,11 +14197,14 @@ } }, "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dependencies": { - "yallist": "^3.0.2" + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" } }, "node_modules/make-dir": { @@ -13397,34 +15124,16 @@ ] }, "node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" + "braces": "^3.0.2", + "picomatch": "^2.3.1" }, "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/micromatch/node_modules/kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", - "engines": { - "node": ">=0.10.0" + "node": ">=8.6" } }, "node_modules/miller-rabin": { @@ -13582,11 +15291,6 @@ "node": ">=8" } }, - "node_modules/minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/mississippi": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-3.0.0.tgz", @@ -13619,6 +15323,17 @@ "node": ">=0.10.0" } }, + "node_modules/mixin-deep/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/mixin-object": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/mixin-object/-/mixin-object-2.0.1.tgz", @@ -13639,14 +15354,6 @@ "node": ">=0.10.0" } }, - "node_modules/mixin-object/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/mkdirp": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", @@ -13671,6 +15378,17 @@ "run-queue": "^1.0.3" } }, + "node_modules/move-concurrently/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -13855,6 +15573,17 @@ "semver": "bin/semver" } }, + "node_modules/node-notifier/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, "node_modules/node-releases": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.5.tgz", @@ -13925,6 +15654,14 @@ "node": ">=4" } }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "engines": { + "node": ">=4" + } + }, "node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", @@ -14142,6 +15879,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/object.hasown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", + "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", + "dependencies": { + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/object.pick": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", @@ -14265,16 +16014,16 @@ } }, "node_modules/optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", "dependencies": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" }, "engines": { "node": ">= 0.8.0" @@ -14313,28 +16062,25 @@ } }, "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", "dependencies": { - "p-try": "^2.0.0" + "p-try": "^1.0.0" }, "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=4" } }, "node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", "dependencies": { - "p-limit": "^2.0.0" + "p-limit": "^1.1.0" }, "engines": { - "node": ">=6" + "node": ">=4" } }, "node_modules/p-map": { @@ -14368,11 +16114,11 @@ } }, "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", "engines": { - "node": ">=6" + "node": ">=4" } }, "node_modules/pako": { @@ -14410,14 +16156,6 @@ "node": ">=6" } }, - "node_modules/parent-module/node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/parse-asn1": { "version": "5.1.6", "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", @@ -14504,11 +16242,11 @@ "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" }, "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/path-parse": { @@ -14525,22 +16263,11 @@ } }, "node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/path-type/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/pbkdf2": { @@ -14625,6 +16352,62 @@ "node": ">=6" } }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-dir/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/pkg-up": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", @@ -14636,6 +16419,62 @@ "node": ">=8" } }, + "node_modules/pkg-up/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-up/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/pkg-up/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/pn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz", @@ -15787,9 +17626,9 @@ "integrity": "sha512-cMlDqaLEqfSaW8Z7N5Jw+lyIW869EzT73/F5lhtY9cLGoVxSXznfgfXMO0Z5K0o0Q2TkTXq+0KFsdnSe3jDViA==" }, "node_modules/prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "engines": { "node": ">= 0.8.0" } @@ -15848,9 +17687,9 @@ } }, "node_modules/pretty-format/node_modules/react-is": { - "version": "18.1.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz", - "integrity": "sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" }, "node_modules/process": { "version": "0.11.10", @@ -16044,6 +17883,26 @@ "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, "node_modules/raf": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", @@ -16179,6 +18038,14 @@ "@babel/highlight": "^7.8.3" } }, + "node_modules/react-dev-utils/node_modules/@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==", + "engines": { + "node": ">= 6" + } + }, "node_modules/react-dev-utils/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -16190,6 +18057,48 @@ "node": ">=4" } }, + "node_modules/react-dev-utils/node_modules/array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", + "dependencies": { + "array-uniq": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dev-utils/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dev-utils/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/react-dev-utils/node_modules/browserslist": { "version": "4.10.0", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.10.0.tgz", @@ -16229,11 +18138,6 @@ "node": ">=0.8.0" } }, - "node_modules/react-dev-utils/node_modules/cli-width": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", - "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==" - }, "node_modules/react-dev-utils/node_modules/color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -16260,6 +18164,18 @@ "node": ">= 8" } }, + "node_modules/react-dev-utils/node_modules/dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "dependencies": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/react-dev-utils/node_modules/emojis-list": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", @@ -16276,6 +18192,47 @@ "node": ">=8" } }, + "node_modules/react-dev-utils/node_modules/fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "dependencies": { + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/react-dev-utils/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dev-utils/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/react-dev-utils/node_modules/find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", @@ -16288,6 +18245,43 @@ "node": ">=8" } }, + "node_modules/react-dev-utils/node_modules/glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", + "dependencies": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + } + }, + "node_modules/react-dev-utils/node_modules/glob-parent/node_modules/is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", + "dependencies": { + "is-extglob": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dev-utils/node_modules/globby": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "dependencies": { + "array-union": "^1.0.1", + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/react-dev-utils/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -16296,46 +18290,31 @@ "node": ">=4" } }, - "node_modules/react-dev-utils/node_modules/inquirer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", - "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "node_modules/react-dev-utils/node_modules/ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" + }, + "node_modules/react-dev-utils/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", "dependencies": { - "ansi-escapes": "^4.2.1", - "chalk": "^2.4.2", - "cli-cursor": "^3.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.15", - "mute-stream": "0.0.8", - "run-async": "^2.2.0", - "rxjs": "^6.5.3", - "string-width": "^4.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" + "kind-of": "^3.0.2" }, "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/react-dev-utils/node_modules/inquirer/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "engines": { - "node": ">=6" + "node": ">=0.10.0" } }, - "node_modules/react-dev-utils/node_modules/inquirer/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "node_modules/react-dev-utils/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", "dependencies": { - "ansi-regex": "^4.1.0" + "is-buffer": "^1.1.5" }, "engines": { - "node": ">=6" + "node": ">=0.10.0" } }, "node_modules/react-dev-utils/node_modules/json5": { @@ -16349,6 +18328,14 @@ "json5": "lib/cli.js" } }, + "node_modules/react-dev-utils/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/react-dev-utils/node_modules/loader-utils": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.2.3.tgz", @@ -16373,11 +18360,48 @@ "node": ">=8" } }, + "node_modules/react-dev-utils/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/react-dev-utils/node_modules/node-releases": { "version": "1.1.77", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.77.tgz", "integrity": "sha512-rB1DUFUNAN4Gn9keO2K1efO35IDK7yKHCdCaIMvFO7yUYmmZYeDjnGKle26G4rwj+LKRQpjyUUvMkPglwGCYNQ==" }, + "node_modules/react-dev-utils/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/react-dev-utils/node_modules/p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", @@ -16389,6 +18413,14 @@ "node": ">=8" } }, + "node_modules/react-dev-utils/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/react-dev-utils/node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -16397,31 +18429,31 @@ "node": ">=8" } }, - "node_modules/react-dev-utils/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "node_modules/react-dev-utils/node_modules/path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dependencies": { - "shebang-regex": "^3.0.0" + "pify": "^3.0.0" }, "engines": { - "node": ">=8" + "node": ">=4" } }, - "node_modules/react-dev-utils/node_modules/shebang-regex": { + "node_modules/react-dev-utils/node_modules/pify": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", "engines": { - "node": ">=8" + "node": ">=4" + } + }, + "node_modules/react-dev-utils/node_modules/slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg==", + "engines": { + "node": ">=0.10.0" } }, "node_modules/react-dev-utils/node_modules/strip-ansi": { @@ -16446,18 +18478,16 @@ "node": ">=4" } }, - "node_modules/react-dev-utils/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "node_modules/react-dev-utils/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" }, "engines": { - "node": ">= 8" + "node": ">=0.10.0" } }, "node_modules/react-dom": { @@ -16538,9 +18568,9 @@ } }, "node_modules/react-markdown/node_modules/react-is": { - "version": "18.1.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz", - "integrity": "sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" }, "node_modules/react-popper": { "version": "1.3.11", @@ -16560,20 +18590,14 @@ } }, "node_modules/react-router": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-4.3.1.tgz", - "integrity": "sha512-yrvL8AogDh2X42Dt9iknk4wF4V8bWREPirFfS9gLU1huk6qK41sg7Z/1S81jjTrGHxa3B8R3J6xIkDAA6CVarg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.3.0.tgz", + "integrity": "sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ==", "dependencies": { - "history": "^4.7.2", - "hoist-non-react-statics": "^2.5.0", - "invariant": "^2.2.4", - "loose-envify": "^1.3.1", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.1", - "warning": "^4.0.1" + "history": "^5.2.0" }, "peerDependencies": { - "react": ">=15" + "react": ">=16.8" } }, "node_modules/react-router-dom": { @@ -16605,24 +18629,28 @@ "value-equal": "^1.0.1" } }, - "node_modules/react-router/node_modules/history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", - "dependencies": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" - } - }, - "node_modules/react-router/node_modules/hoist-non-react-statics": { + "node_modules/react-router-dom/node_modules/hoist-non-react-statics": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" }, + "node_modules/react-router-dom/node_modules/react-router": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-4.3.1.tgz", + "integrity": "sha512-yrvL8AogDh2X42Dt9iknk4wF4V8bWREPirFfS9gLU1huk6qK41sg7Z/1S81jjTrGHxa3B8R3J6xIkDAA6CVarg==", + "dependencies": { + "history": "^4.7.2", + "hoist-non-react-statics": "^2.5.0", + "invariant": "^2.2.4", + "loose-envify": "^1.3.1", + "path-to-regexp": "^1.7.0", + "prop-types": "^15.6.1", + "warning": "^4.0.1" + }, + "peerDependencies": { + "react": ">=15" + } + }, "node_modules/react-scripts": { "version": "3.4.4", "resolved": "https://registry.npmjs.org/react-scripts/-/react-scripts-3.4.4.tgz", @@ -16737,6 +18765,721 @@ "semver": "bin/semver" } }, + "node_modules/react-scripts/node_modules/@typescript-eslint/eslint-plugin": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.34.0.tgz", + "integrity": "sha512-4zY3Z88rEE99+CNvTbXSyovv2z9PNOVffTWD2W8QF5s2prBQtwN2zadqERcrHpcR7O/+KMI3fcTAmUUhK/iQcQ==", + "dependencies": { + "@typescript-eslint/experimental-utils": "2.34.0", + "functional-red-black-tree": "^1.0.1", + "regexpp": "^3.0.0", + "tsutils": "^3.17.1" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^2.0.0", + "eslint": "^5.0.0 || ^6.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/react-scripts/node_modules/@typescript-eslint/parser": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.34.0.tgz", + "integrity": "sha512-03ilO0ucSD0EPTw2X4PntSIRFtDPWjrVq7C3/Z3VQHRC7+13YB55rcJI3Jt+YgeHbjUdJPcPa7b23rXCBokuyA==", + "dependencies": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "2.34.0", + "@typescript-eslint/typescript-estree": "2.34.0", + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^5.0.0 || ^6.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/react-scripts/node_modules/@typescript-eslint/typescript-estree": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", + "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", + "dependencies": { + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash": "^4.17.15", + "semver": "^7.3.2", + "tsutils": "^3.17.1" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/react-scripts/node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/react-scripts/node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/react-scripts/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/react-scripts/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha512-majUxHgLehQTeSA+hClx+DY09OVUqG3GtezWkF1krgLGNdlDu9l9V8DaqNMWbq4Eddc8wsyDA0hpDUtnYxQEXw==", + "dependencies": { + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" + } + }, + "node_modules/react-scripts/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/react-scripts/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/react-scripts/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/react-scripts/node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/react-scripts/node_modules/cross-spawn/node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/react-scripts/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "node_modules/react-scripts/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/react-scripts/node_modules/eslint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.3", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/react-scripts/node_modules/eslint-config-react-app": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.2.1.tgz", + "integrity": "sha512-pGIZ8t0mFLcV+6ZirRgYK6RVqUIKRIi9MmgzUEmrIknsn3AdO0I32asO86dJgloHq+9ZPl8UIg8mYrvgP5u2wQ==", + "dependencies": { + "confusing-browser-globals": "^1.0.9" + }, + "peerDependencies": { + "@typescript-eslint/eslint-plugin": "2.x", + "@typescript-eslint/parser": "2.x", + "babel-eslint": "10.x", + "eslint": "6.x", + "eslint-plugin-flowtype": "3.x || 4.x", + "eslint-plugin-import": "2.x", + "eslint-plugin-jsx-a11y": "6.x", + "eslint-plugin-react": "7.x", + "eslint-plugin-react-hooks": "1.x || 2.x" + } + }, + "node_modules/react-scripts/node_modules/eslint-loader": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-3.0.3.tgz", + "integrity": "sha512-+YRqB95PnNvxNp1HEjQmvf9KNvCin5HXYYseOXVC2U0KEcw4IkQ2IQEBG46j7+gW39bMzeu0GsUhVbBY3Votpw==", + "deprecated": "This loader has been deprecated. Please use eslint-webpack-plugin", + "dependencies": { + "fs-extra": "^8.1.0", + "loader-fs-cache": "^1.0.2", + "loader-utils": "^1.2.3", + "object-hash": "^2.0.1", + "schema-utils": "^2.6.1" + }, + "engines": { + "node": ">= 8.9.0" + }, + "peerDependencies": { + "eslint": "^5.0.0 || ^6.0.0", + "webpack": "^4.0.0 || ^5.0.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-import": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", + "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "dependencies": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "2.x - 6.x" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-import/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "dependencies": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "dependencies": { + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-react": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", + "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", + "dependencies": { + "array-includes": "^3.1.1", + "doctrine": "^2.1.0", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.1", + "object.fromentries": "^2.0.2", + "object.values": "^1.1.1", + "prop-types": "^15.7.2", + "resolve": "^1.15.1", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.2", + "xregexp": "^4.3.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-react-hooks": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", + "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==", + "engines": { + "node": ">=7" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-react/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-scripts/node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/react-scripts/node_modules/eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/react-scripts/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/eslint/node_modules/regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "engines": { + "node": ">=6.5.0" + } + }, + "node_modules/react-scripts/node_modules/espree": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "dependencies": { + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/react-scripts/node_modules/file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dependencies": { + "flat-cache": "^2.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dependencies": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==" + }, + "node_modules/react-scripts/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/react-scripts/node_modules/globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "dependencies": { + "type-fest": "^0.8.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/react-scripts/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/react-scripts/node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/react-scripts/node_modules/jsx-ast-utils": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.4.1.tgz", + "integrity": "sha512-z1xSldJ6imESSzOjd3NNkieVJKRlKYSOtMG8SFyCj2FIrvSaSuli/WjpBkEzCBoR9bYYYFgqJw61Xhu7Lcgk+w==", + "dependencies": { + "array-includes": "^3.1.1", + "object.assign": "^4.1.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/react-scripts/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/react-scripts/node_modules/load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/react-scripts/node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/react-scripts/node_modules/parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", + "dependencies": { + "error-ex": "^1.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-scripts/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "dependencies": { + "pify": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-scripts/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/react-scripts/node_modules/read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "dependencies": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "dependencies": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/resolve": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", + "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", + "dependencies": { + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/react-scripts/node_modules/rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/react-scripts/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/react-scripts/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-scripts/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/react-scripts/node_modules/source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", @@ -16745,6 +19488,58 @@ "node": ">=0.10.0" } }, + "node_modules/react-scripts/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/react-scripts/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/react-scripts/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/react-scripts/node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "engines": { + "node": ">=8" + } + }, + "node_modules/react-scripts/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, "node_modules/react-sidebar": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/react-sidebar/-/react-sidebar-3.0.2.tgz", @@ -16831,100 +19626,103 @@ } }, "node_modules/read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", "dependencies": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", "dependencies": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/read-pkg-up/node_modules/find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dependencies": { - "locate-path": "^2.0.0" + "locate-path": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/read-pkg-up/node_modules/locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dependencies": { - "p-locate": "^2.0.0", + "p-locate": "^3.0.0", "path-exists": "^3.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/read-pkg-up/node_modules/p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dependencies": { - "p-try": "^1.0.0" + "p-try": "^2.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/read-pkg-up/node_modules/p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dependencies": { - "p-limit": "^1.1.0" + "p-limit": "^2.0.0" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/read-pkg-up/node_modules/p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/read-pkg/node_modules/path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dependencies": { - "pify": "^2.0.0" + "pify": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/read-pkg/node_modules/pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", "engines": { - "node": ">=0.10.0" + "node": ">=4" } }, "node_modules/readable-stream": { @@ -17336,11 +20134,16 @@ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "node_modules/resolve": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", - "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", "dependencies": { - "path-parse": "^1.0.6" + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -17357,7 +20160,7 @@ "node": ">=4" } }, - "node_modules/resolve-from": { + "node_modules/resolve-cwd/node_modules/resolve-from": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==", @@ -17365,6 +20168,14 @@ "node": ">=4" } }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "engines": { + "node": ">=4" + } + }, "node_modules/resolve-pathname": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz", @@ -17460,6 +20271,14 @@ "node": ">= 0.10" } }, + "node_modules/resolve-url-loader/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/resolve-url-loader/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -17544,6 +20363,16 @@ "node": ">= 4" } }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, "node_modules/rework": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/rework/-/rework-1.0.1.tgz", @@ -17574,14 +20403,17 @@ "integrity": "sha512-zgn5OjNQXLUTdq8m17KdaicF6w89TZs8ZU8y0AYENIU6wG8GG6LLm0yLSiPY8DmaYmHdgRW8rnApjoT0fQRfMg==" }, "node_modules/rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/ripemd160": { @@ -17609,6 +20441,29 @@ "node": ">=0.12.0" } }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, "node_modules/run-queue": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/run-queue/-/run-queue-1.0.3.tgz", @@ -17685,6 +20540,127 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/sane/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sane/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/sanitize.css": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/sanitize.css/-/sanitize.css-10.0.0.tgz", @@ -17747,6 +20723,14 @@ "node": ">=0.10.0" } }, + "node_modules/sass-loader/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/sass-loader/node_modules/shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", @@ -17814,11 +20798,17 @@ } }, "node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dependencies": { + "lru-cache": "^6.0.0" + }, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/send": { @@ -17995,14 +20985,6 @@ "node": ">=0.10.0" } }, - "node_modules/set-value/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", @@ -18039,14 +21021,6 @@ "node": ">=0.10.0" } }, - "node_modules/shallow-clone/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/shallow-clone/node_modules/kind-of": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-2.0.1.tgz", @@ -18067,22 +21041,22 @@ } }, "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shell-quote": { @@ -18132,11 +21106,11 @@ "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "node_modules/slash": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", - "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/slice-ansi": { @@ -18310,14 +21284,6 @@ "node": ">=0.10.0" } }, - "node_modules/snapdragon/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/snapdragon/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -18770,17 +21736,6 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, - "node_modules/string-width/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/string.prototype.matchall": { "version": "4.0.7", "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz", @@ -18839,22 +21794,14 @@ } }, "node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dependencies": { - "ansi-regex": "^4.1.0" + "ansi-regex": "^5.0.1" }, "engines": { - "node": ">=6" - } - }, - "node_modules/strip-ansi/node_modules/ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", - "engines": { - "node": ">=6" + "node": ">=8" } }, "node_modules/strip-bom": { @@ -19148,6 +22095,14 @@ "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==" }, + "node_modules/svgo/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, "node_modules/svgo/node_modules/has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -19202,6 +22157,19 @@ "node": ">=6.0.0" } }, + "node_modules/table/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/table/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "node_modules/table/node_modules/is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -19223,6 +22191,17 @@ "node": ">=6" } }, + "node_modules/table/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/tapable": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", @@ -19338,6 +22317,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/terser-webpack-plugin/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/terser-webpack-plugin/node_modules/p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", @@ -19349,6 +22342,14 @@ "node": ">=8" } }, + "node_modules/terser-webpack-plugin/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/terser-webpack-plugin/node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -19368,6 +22369,19 @@ "node": ">=8" } }, + "node_modules/terser-webpack-plugin/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/terser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, "node_modules/test-exclude": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", @@ -19382,53 +22396,6 @@ "node": ">=6" } }, - "node_modules/test-exclude/node_modules/load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", - "dependencies": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", - "dependencies": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/test-exclude/node_modules/read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", - "dependencies": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -19547,15 +22514,14 @@ } }, "node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" + "is-number": "^7.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8.0" } }, "node_modules/toidentifier": { @@ -19608,6 +22574,28 @@ } } }, + "node_modules/tsconfig-paths": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, "node_modules/tslib": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", @@ -19659,20 +22647,20 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" }, "node_modules/type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dependencies": { - "prelude-ls": "~1.1.2" + "prelude-ls": "^1.2.1" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "engines": { "node": ">=10" }, @@ -19837,9 +22825,9 @@ } }, "node_modules/unified/node_modules/is-plain-obj": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.0.0.tgz", - "integrity": "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", "engines": { "node": ">=12" }, @@ -19861,14 +22849,6 @@ "node": ">=0.10.0" } }, - "node_modules/union-value/node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/uniq": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", @@ -20369,6 +23349,27 @@ "node": ">=0.10.0" } }, + "node_modules/watchpack-chokidar2/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "optional": true, + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/watchpack-chokidar2/node_modules/chokidar": { "version": "2.1.8", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", @@ -20392,6 +23393,33 @@ "fsevents": "^1.2.7" } }, + "node_modules/watchpack-chokidar2/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "optional": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack-chokidar2/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "optional": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/watchpack-chokidar2/node_modules/fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -20444,6 +23472,76 @@ "node": ">=0.10.0" } }, + "node_modules/watchpack-chokidar2/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "optional": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack-chokidar2/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "optional": true, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack-chokidar2/node_modules/micromatch/node_modules/extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "optional": true, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack-chokidar2/node_modules/micromatch/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "optional": true, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack-chokidar2/node_modules/micromatch/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/watchpack-chokidar2/node_modules/readdirp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", @@ -20458,6 +23556,19 @@ "node": ">=0.10" } }, + "node_modules/watchpack-chokidar2/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "optional": true, + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/wbuf": { "version": "1.7.3", "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", @@ -20599,6 +23710,26 @@ "node": ">=0.10.0" } }, + "node_modules/webpack-dev-server/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack-dev-server/node_modules/chokidar": { "version": "2.1.8", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", @@ -20621,6 +23752,31 @@ "fsevents": "^1.2.7" } }, + "node_modules/webpack-dev-server/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-dev-server/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack-dev-server/node_modules/fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -20686,6 +23842,71 @@ "node": ">=0.10.0" } }, + "node_modules/webpack-dev-server/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-dev-server/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-dev-server/node_modules/micromatch/node_modules/extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-dev-server/node_modules/micromatch/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack-dev-server/node_modules/micromatch/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack-dev-server/node_modules/readdirp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", @@ -20712,6 +23933,14 @@ "node": ">= 4" } }, + "node_modules/webpack-dev-server/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/webpack-dev-server/node_modules/strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -20734,6 +23963,18 @@ "node": ">=6" } }, + "node_modules/webpack-dev-server/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack-dev-server/node_modules/ws": { "version": "6.2.2", "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", @@ -20813,6 +24054,37 @@ "node": ">=0.4.0" } }, + "node_modules/webpack/node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/webpack/node_modules/cacache": { "version": "12.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.4.tgz", @@ -20847,6 +24119,111 @@ "node": ">=4.0.0" } }, + "node_modules/webpack/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/webpack/node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/webpack/node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, "node_modules/webpack/node_modules/schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -20890,6 +24267,23 @@ "webpack": "^4.0.0" } }, + "node_modules/webpack/node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/webpack/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, "node_modules/websocket-driver": { "version": "0.6.5", "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.6.5.tgz", @@ -20938,14 +24332,17 @@ } }, "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dependencies": { "isexe": "^2.0.0" }, "bin": { - "which": "bin/which" + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, "node_modules/which-boxed-primitive": { @@ -21173,6 +24570,14 @@ "node": ">=6" } }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, "node_modules/wrap-ansi/node_modules/ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -21197,6 +24602,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -21218,6 +24628,17 @@ "node": ">=6" } }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -21284,9 +24705,9 @@ "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" }, "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yaml": { "version": "1.10.2", @@ -21335,6 +24756,30 @@ "decamelize": "^1.2.0" } }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==", + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "node_modules/yargs/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/yargs/node_modules/is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -21343,6 +24788,51 @@ "node": ">=4" } }, + "node_modules/yargs/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs/node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, "node_modules/yargs/node_modules/string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -21356,6 +24846,17 @@ "node": ">=6" } }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dependencies": { + "ansi-regex": "^4.1.0" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/zwitch": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz", @@ -21420,6 +24921,13 @@ "gensync": "^1.0.0-beta.2", "json5": "^2.2.1", "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "@babel/generator": { @@ -21458,6 +24966,13 @@ "@babel/helper-validator-option": "^7.16.7", "browserslist": "^4.20.2", "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "@babel/helper-create-class-features-plugin": { @@ -21496,6 +25011,13 @@ "lodash.debounce": "^4.0.8", "resolve": "^1.14.2", "semver": "^6.1.2" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "@babel/helper-environment-visitor": { @@ -21690,6 +25212,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -22080,6 +25607,13 @@ "@babel/helper-replace-supers": "^7.18.2", "@babel/helper-split-export-declaration": "^7.16.7", "globals": "^11.1.0" + }, + "dependencies": { + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + } } }, "@babel/plugin-transform-computed-properties": { @@ -22495,6 +26029,13 @@ "babel-plugin-polyfill-regenerator": "^0.3.0", "core-js-compat": "^3.22.1", "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "@babel/preset-modules": { @@ -22573,6 +26114,13 @@ "@babel/types": "^7.18.4", "debug": "^4.1.0", "globals": "^11.1.0" + }, + "dependencies": { + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + } } }, "@babel/types": { @@ -22621,6 +26169,37 @@ "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz", "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==" }, + "@eslint/eslintrc": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz", + "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==", + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.3.2", + "globals": "^13.15.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "requires": { + "argparse": "^2.0.1" + } + } + } + }, "@hapi/address": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", @@ -22655,6 +26234,21 @@ "@hapi/hoek": "^8.3.0" } }, + "@humanwhocodes/config-array": { + "version": "0.9.5", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz", + "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==", + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + }, "@hypnosphi/create-react-context": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/@hypnosphi/create-react-context/-/create-react-context-0.3.1.tgz", @@ -22705,11 +26299,21 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -22755,10 +26359,10 @@ "strip-ansi": "^5.0.0" }, "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==" + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" }, "ansi-styles": { "version": "3.2.1", @@ -22768,6 +26372,33 @@ "color-convert": "^1.9.0" } }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -22791,11 +26422,101 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -22803,6 +26524,15 @@ "requires": { "has-flag": "^3.0.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -22886,11 +26616,21 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -22917,13 +26657,6 @@ "callsites": "^3.0.0", "graceful-fs": "^4.1.15", "source-map": "^0.6.0" - }, - "dependencies": { - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - } } }, "@jest/test-result": { @@ -22978,6 +26711,33 @@ "color-convert": "^1.9.0" } }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -23001,11 +26761,85 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -23013,6 +26847,15 @@ "requires": { "has-flag": "^3.0.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -23147,10 +26990,31 @@ "glob-to-regexp": "^0.3.0" } }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, "@nodelib/fs.stat": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", - "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } }, "@sinclair/typebox": { "version": "0.23.5", @@ -23407,6 +27271,11 @@ "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==" }, + "@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + }, "@types/jss": { "version": "9.5.8", "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.5.8.tgz", @@ -23574,14 +27443,20 @@ "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==" }, "@typescript-eslint/eslint-plugin": { - "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.34.0.tgz", - "integrity": "sha512-4zY3Z88rEE99+CNvTbXSyovv2z9PNOVffTWD2W8QF5s2prBQtwN2zadqERcrHpcR7O/+KMI3fcTAmUUhK/iQcQ==", - "requires": { - "@typescript-eslint/experimental-utils": "2.34.0", + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.28.0.tgz", + "integrity": "sha512-DXVU6Cg29H2M6EybqSg2A+x8DgO9TCUBRp4QEXQHJceLS7ogVDP0g3Lkg/SZCqcvkAP/RruuQqK0gdlkgmhSUA==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/type-utils": "5.28.0", + "@typescript-eslint/utils": "5.28.0", + "debug": "^4.3.4", "functional-red-black-tree": "^1.0.1", - "regexpp": "^3.0.0", - "tsutils": "^3.17.1" + "ignore": "^5.2.0", + "regexpp": "^3.2.0", + "semver": "^7.3.7", + "tsutils": "^3.21.0" } }, "@typescript-eslint/experimental-utils": { @@ -23593,56 +27468,115 @@ "@typescript-eslint/typescript-estree": "2.34.0", "eslint-scope": "^5.0.0", "eslint-utils": "^2.0.0" - } - }, - "@typescript-eslint/parser": { - "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.34.0.tgz", - "integrity": "sha512-03ilO0ucSD0EPTw2X4PntSIRFtDPWjrVq7C3/Z3VQHRC7+13YB55rcJI3Jt+YgeHbjUdJPcPa7b23rXCBokuyA==", - "requires": { - "@types/eslint-visitor-keys": "^1.0.0", - "@typescript-eslint/experimental-utils": "2.34.0", - "@typescript-eslint/typescript-estree": "2.34.0", - "eslint-visitor-keys": "^1.1.0" - } - }, - "@typescript-eslint/typescript-estree": { - "version": "2.34.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", - "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", - "requires": { - "debug": "^4.1.1", - "eslint-visitor-keys": "^1.1.0", - "glob": "^7.1.6", - "is-glob": "^4.0.1", - "lodash": "^4.17.15", - "semver": "^7.3.2", - "tsutils": "^3.17.1" }, "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "@typescript-eslint/typescript-estree": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", + "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", "requires": { - "yallist": "^4.0.0" + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash": "^4.17.15", + "semver": "^7.3.2", + "tsutils": "^3.17.1" } }, - "semver": { - "version": "7.3.7", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", - "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", "requires": { - "lru-cache": "^6.0.0" + "eslint-visitor-keys": "^1.1.0" } }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" } } }, + "@typescript-eslint/parser": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.28.0.tgz", + "integrity": "sha512-ekqoNRNK1lAcKhZESN/PdpVsWbP9jtiNqzFWkp/yAUdZvJalw2heCYuqRmM5eUJSIYEkgq5sGOjq+ZqsLMjtRA==", + "dev": true, + "requires": { + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/typescript-estree": "5.28.0", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.28.0.tgz", + "integrity": "sha512-LeBLTqF/he1Z+boRhSqnso6YrzcKMTQ8bO/YKEe+6+O/JGof9M0g3IJlIsqfrK/6K03MlFIlycbf1uQR1IjE+w==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/visitor-keys": "5.28.0" + } + }, + "@typescript-eslint/type-utils": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.28.0.tgz", + "integrity": "sha512-SyKjKh4CXPglueyC6ceAFytjYWMoPHMswPQae236zqe1YbhvCVQyIawesYywGiu98L9DwrxsBN69vGIVxJ4mQQ==", + "dev": true, + "requires": { + "@typescript-eslint/utils": "5.28.0", + "debug": "^4.3.4", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/types": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.28.0.tgz", + "integrity": "sha512-2OOm8ZTOQxqkPbf+DAo8oc16sDlVR5owgJfKheBkxBKg1vAfw2JsSofH9+16VPlN9PWtv8Wzhklkqw3k/zCVxA==", + "dev": true + }, + "@typescript-eslint/typescript-estree": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.28.0.tgz", + "integrity": "sha512-9GX+GfpV+F4hdTtYc6OV9ZkyYilGXPmQpm6AThInpBmKJEyRSIjORJd1G9+bknb7OTFYL+Vd4FBJAO6T78OVqA==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/visitor-keys": "5.28.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/utils": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.28.0.tgz", + "integrity": "sha512-E60N5L0fjv7iPJV3UGc4EC+A3Lcj4jle9zzR0gW7vXhflO7/J29kwiTGITA2RlrmPokKiZbBy2DgaclCaEUs6g==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.9", + "@typescript-eslint/scope-manager": "5.28.0", + "@typescript-eslint/types": "5.28.0", + "@typescript-eslint/typescript-estree": "5.28.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0" + } + }, + "@typescript-eslint/visitor-keys": { + "version": "5.28.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.28.0.tgz", + "integrity": "sha512-BtfP1vCor8cWacovzzPFOoeW4kBQxzmhxGoOpt0v1SFvG+nJ0cWaVdJk7cky1ArTcFHHKNIxyo2LLr3oNkSuXA==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.28.0", + "eslint-visitor-keys": "^3.3.0" + } + }, "@webassemblyjs/ast": { "version": "1.8.5", "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.8.5.tgz", @@ -23826,9 +27760,9 @@ } }, "acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==" }, "acorn-globals": { "version": "4.3.4", @@ -23926,12 +27860,9 @@ "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==" }, "ansi-escapes": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", - "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", - "requires": { - "type-fest": "^0.21.3" - } + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", + "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==" }, "ansi-html": { "version": "0.0.7", @@ -23960,6 +27891,97 @@ "normalize-path": "^2.1.1" }, "dependencies": { + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, "normalize-path": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", @@ -23967,6 +27989,15 @@ "requires": { "remove-trailing-separator": "^1.0.1" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -23984,12 +28015,12 @@ } }, "aria-query": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", - "integrity": "sha512-majUxHgLehQTeSA+hClx+DY09OVUqG3GtezWkF1krgLGNdlDu9l9V8DaqNMWbq4Eddc8wsyDA0hpDUtnYxQEXw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", "requires": { - "ast-types-flow": "0.0.7", - "commander": "^2.11.0" + "@babel/runtime": "^7.10.2", + "@babel/runtime-corejs3": "^7.10.2" } }, "arity-n": { @@ -24035,12 +28066,10 @@ } }, "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", - "requires": { - "array-uniq": "^1.0.1" - } + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true }, "array-uniq": { "version": "1.0.3", @@ -24063,6 +28092,17 @@ "es-shim-unscopables": "^1.0.0" } }, + "array.prototype.flatmap": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", + "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + } + }, "array.prototype.reduce": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/array.prototype.reduce/-/array.prototype.reduce-1.0.4.tgz", @@ -24214,6 +28254,11 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==" }, + "axe-core": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.2.tgz", + "integrity": "sha512-LVAaGp/wkkgYJcjmHsoKx4juT1aQvJyPcW09MLCjVTh3V2cc6PnyempiLMNH5iMdfIX/zdbjUx2KDjMLCTdPeA==" + }, "axobject-query": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", @@ -24251,6 +28296,11 @@ "supports-color": "^2.0.0" } }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "js-tokens": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", @@ -24282,6 +28332,13 @@ "@babel/types": "^7.7.0", "eslint-visitor-keys": "^1.0.0", "resolve": "^1.12.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + } } }, "babel-extract-comments": { @@ -24337,11 +28394,21 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -24381,6 +28448,46 @@ "find-up": "^3.0.0", "istanbul-lib-instrument": "^3.3.0", "test-exclude": "^5.2.3" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + } } }, "babel-plugin-jest-hoist": { @@ -24413,15 +28520,6 @@ "yaml": "^1.7.2" } }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, "parse-json": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", @@ -24432,16 +28530,6 @@ "json-parse-even-better-errors": "^2.3.0", "lines-and-columns": "^1.1.6" } - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" } } }, @@ -24459,6 +28547,13 @@ "@babel/compat-data": "^7.13.11", "@babel/helper-define-polyfill-provider": "^0.3.1", "semver": "^6.1.1" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "babel-plugin-polyfill-corejs3": { @@ -24894,35 +28989,11 @@ } }, "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - } + "fill-range": "^7.0.1" } }, "brcast": { @@ -25126,6 +29197,14 @@ "unique-filename": "^1.1.1" }, "dependencies": { + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "requires": { + "yallist": "^3.0.2" + } + }, "rimraf": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", @@ -25133,6 +29212,11 @@ "requires": { "glob": "^7.1.3" } + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" } } }, @@ -25172,6 +29256,13 @@ "integrity": "sha512-JuG3qI4QOftFsZyOn1qq87fq5grLIyk1JYd5lJmdA+fG7aQ9pA/i3JIJGcO3q0MrRcHlOt1U+ZeHW8Dq9axALQ==", "requires": { "callsites": "^2.0.0" + }, + "dependencies": { + "callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==" + } } }, "caller-path": { @@ -25183,9 +29274,9 @@ } }, "callsites": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", - "integrity": "sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" }, "camel-case": { "version": "4.1.2", @@ -25218,9 +29309,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001352", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001352.tgz", - "integrity": "sha512-GUgH8w6YergqPQDGWhJGt8GDRnY0L/iJVQcU3eJ46GYf52R8tk0Wxp0PymuFVZboJYXGiCqwozAYZNRjVj6IcA==" + "version": "1.0.30001355", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001355.tgz", + "integrity": "sha512-Sd6pjJHF27LzCB7pT7qs+kuX2ndurzCzkpJl6Qct7LPSZ9jn0bkOA8mdgMgmqnQAWLVOOGjLpc+66V57eLtb1g==" }, "capture-exit": { "version": "2.0.0", @@ -25293,39 +29384,18 @@ "picomatch": "^2.0.4" } }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "requires": { - "fill-range": "^7.0.1" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "requires": { - "to-regex-range": "^5.0.1" - } - }, "fsevents": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", "optional": true }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "requires": { - "is-number": "^7.0.0" + "is-glob": "^4.0.1" } } } @@ -25435,9 +29505,9 @@ } }, "cli-width": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", - "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==" + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", + "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==" }, "cliui": { "version": "5.0.0", @@ -25449,6 +29519,16 @@ "wrap-ansi": "^5.1.0" }, "dependencies": { + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -25463,6 +29543,14 @@ "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^5.1.0" } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } } } }, @@ -25524,6 +29612,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -25613,9 +29706,9 @@ "integrity": "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==" }, "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==" }, "common-tags": { "version": "1.8.2", @@ -25767,6 +29860,16 @@ "mkdirp": "^0.5.1", "rimraf": "^2.5.4", "run-queue": "^1.0.0" + }, + "dependencies": { + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } } }, "copy-descriptor": { @@ -25814,6 +29917,22 @@ "is-directory": "^0.3.1", "js-yaml": "^3.13.1", "parse-json": "^4.0.0" + }, + "dependencies": { + "import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==", + "requires": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==" + } } }, "create-ecdh": { @@ -25858,22 +29977,13 @@ } }, "cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "requires": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - } + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" } }, "crypto-browserify": { @@ -26316,6 +30426,14 @@ "rimraf": "^2.6.3" }, "dependencies": { + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", + "requires": { + "array-uniq": "^1.0.1" + } + }, "globby": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", @@ -26339,6 +30457,14 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==" + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } } } }, @@ -26433,12 +30559,12 @@ } }, "dir-glob": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", - "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, "requires": { - "arrify": "^1.0.1", - "path-type": "^3.0.0" + "path-type": "^4.0.0" } }, "dns-equal": { @@ -26598,9 +30724,9 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "electron-to-chromium": { - "version": "1.4.154", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.154.tgz", - "integrity": "sha512-GbV9djOkrnj6xmW+YYVVEI3VCQnJ0pnSTu7TW2JyjKd5cakoiSaG5R4RbEtfaD92GsY10DzbU3GYRe+IOA9kqA==" + "version": "1.4.160", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.160.tgz", + "integrity": "sha512-O1Z12YfyeX2LXYO7MdHIPazGXzLzQnr1ADW55U2ARQsJBPgfpJz3u+g3Mo2l1wSyfOCdiGqaX9qtV4XKZ0HNRA==" }, "elliptic": { "version": "6.5.4", @@ -26624,9 +30750,9 @@ } }, "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "emojis-list": { "version": "3.0.0", @@ -26799,9 +30925,9 @@ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" }, "escodegen": { "version": "1.14.3", @@ -26813,144 +30939,151 @@ "esutils": "^2.0.2", "optionator": "^0.8.1", "source-map": "~0.6.1" + }, + "dependencies": { + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==" + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "requires": { + "prelude-ls": "~1.1.2" + } + } } }, "eslint": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", - "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "version": "8.17.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.17.0.tgz", + "integrity": "sha512-gq0m0BTJfci60Fz4nczYxNAlED+sMcihltndR8t9t1evnU/azx53x3t2UHXC/uRjcbvRw/XctpaNygSTcQD+Iw==", "requires": { - "@babel/code-frame": "^7.0.0", + "@eslint/eslintrc": "^1.3.0", + "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", - "chalk": "^2.1.0", - "cross-spawn": "^6.0.5", - "debug": "^4.0.1", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", "doctrine": "^3.0.0", - "eslint-scope": "^5.0.0", - "eslint-utils": "^1.4.3", - "eslint-visitor-keys": "^1.1.0", - "espree": "^6.1.2", - "esquery": "^1.0.1", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.3.2", + "esquery": "^1.4.0", "esutils": "^2.0.2", - "file-entry-cache": "^5.0.1", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.0.0", - "globals": "^12.1.0", - "ignore": "^4.0.6", + "glob-parent": "^6.0.1", + "globals": "^13.15.0", + "ignore": "^5.2.0", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^7.0.0", "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.14", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.8.3", - "progress": "^2.0.0", - "regexpp": "^2.0.1", - "semver": "^6.1.2", - "strip-ansi": "^5.2.0", - "strip-json-comments": "^3.0.1", - "table": "^5.2.3", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "eslint-utils": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", - "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", - "requires": { - "eslint-visitor-keys": "^1.1.0" - } - }, - "globals": { - "version": "12.4.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", - "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", - "requires": { - "type-fest": "^0.8.1" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" } }, - "regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==" - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "requires": { - "has-flag": "^3.0.0" + "argparse": "^2.0.1" } - }, - "type-fest": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", - "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==" } } }, - "eslint-config-react-app": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.2.1.tgz", - "integrity": "sha512-pGIZ8t0mFLcV+6ZirRgYK6RVqUIKRIi9MmgzUEmrIknsn3AdO0I32asO86dJgloHq+9ZPl8UIg8mYrvgP5u2wQ==", + "eslint-config-airbnb": { + "version": "19.0.4", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb/-/eslint-config-airbnb-19.0.4.tgz", + "integrity": "sha512-T75QYQVQX57jiNgpF9r1KegMICE94VYwoFQyMGhrvc+lB8YF2E/M/PYDaQe1AJcWaEgqLE+ErXV1Og/+6Vyzew==", "requires": { - "confusing-browser-globals": "^1.0.9" + "eslint-config-airbnb-base": "^15.0.0", + "object.assign": "^4.1.2", + "object.entries": "^1.1.5" + } + }, + "eslint-config-airbnb-base": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz", + "integrity": "sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==", + "requires": { + "confusing-browser-globals": "^1.0.10", + "object.assign": "^4.1.2", + "object.entries": "^1.1.5", + "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "eslint-config-airbnb-typescript": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-17.0.0.tgz", + "integrity": "sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g==", + "dev": true, + "requires": { + "eslint-config-airbnb-base": "^15.0.0" } }, "eslint-import-resolver-node": { @@ -26969,31 +31102,9 @@ "requires": { "ms": "^2.1.1" } - }, - "resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", - "requires": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - } } } }, - "eslint-loader": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-3.0.3.tgz", - "integrity": "sha512-+YRqB95PnNvxNp1HEjQmvf9KNvCin5HXYYseOXVC2U0KEcw4IkQ2IQEBG46j7+gW39bMzeu0GsUhVbBY3Votpw==", - "requires": { - "fs-extra": "^8.1.0", - "loader-fs-cache": "^1.0.2", - "loader-utils": "^1.2.3", - "object-hash": "^2.0.1", - "schema-utils": "^2.6.1" - } - }, "eslint-module-utils": { "version": "2.7.3", "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz", @@ -27010,44 +31121,6 @@ "requires": { "ms": "^2.1.1" } - }, - "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", - "requires": { - "locate-path": "^2.0.0" - } - }, - "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", - "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" - } - }, - "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", - "requires": { - "p-try": "^1.0.0" - } - }, - "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", - "requires": { - "p-limit": "^1.1.0" - } - }, - "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==" } } }, @@ -27060,22 +31133,23 @@ } }, "eslint-plugin-import": { - "version": "2.20.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", - "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", "requires": { - "array-includes": "^3.0.3", - "array.prototype.flat": "^1.2.1", - "contains-path": "^0.1.0", + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", "debug": "^2.6.9", - "doctrine": "1.5.0", - "eslint-import-resolver-node": "^0.3.2", - "eslint-module-utils": "^2.4.1", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.3", "has": "^1.0.3", - "minimatch": "^3.0.4", - "object.values": "^1.1.0", - "read-pkg-up": "^2.0.0", - "resolve": "^1.12.0" + "is-core-module": "^2.8.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.5", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" }, "dependencies": { "debug": { @@ -27087,19 +31161,13 @@ } }, "doctrine": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", - "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "requires": { - "esutils": "^2.0.2", - "isarray": "^1.0.0" + "esutils": "^2.0.2" } }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -27108,38 +31176,43 @@ } }, "eslint-plugin-jsx-a11y": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", - "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.5.1.tgz", + "integrity": "sha512-sVCFKX9fllURnXT2JwLN5Qgo24Ug5NF6dxhkmxsMEUZhXRcGg+X3e1JbJ84YePQKBl5E0ZjAH5Q4rkdcGY99+g==", "requires": { - "@babel/runtime": "^7.4.5", - "aria-query": "^3.0.0", - "array-includes": "^3.0.3", + "@babel/runtime": "^7.16.3", + "aria-query": "^4.2.2", + "array-includes": "^3.1.4", "ast-types-flow": "^0.0.7", - "axobject-query": "^2.0.2", - "damerau-levenshtein": "^1.0.4", - "emoji-regex": "^7.0.2", + "axe-core": "^4.3.5", + "axobject-query": "^2.2.0", + "damerau-levenshtein": "^1.0.7", + "emoji-regex": "^9.2.2", "has": "^1.0.3", - "jsx-ast-utils": "^2.2.1" + "jsx-ast-utils": "^3.2.1", + "language-tags": "^1.0.5", + "minimatch": "^3.0.4" } }, "eslint-plugin-react": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", - "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", + "version": "7.30.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.30.0.tgz", + "integrity": "sha512-RgwH7hjW48BleKsYyHK5vUAvxtE9SMPDKmcPRQgtRCYaZA0XQPt5FSkrU3nhz5ifzMZcA8opwmRJ2cmOO8tr5A==", "requires": { - "array-includes": "^3.1.1", + "array-includes": "^3.1.5", + "array.prototype.flatmap": "^1.3.0", "doctrine": "^2.1.0", - "has": "^1.0.3", - "jsx-ast-utils": "^2.2.3", - "object.entries": "^1.1.1", - "object.fromentries": "^2.0.2", - "object.values": "^1.1.1", - "prop-types": "^15.7.2", - "resolve": "^1.15.1", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.5", + "object.fromentries": "^2.0.5", + "object.hasown": "^1.1.1", + "object.values": "^1.1.5", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.3", "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.2", - "xregexp": "^4.3.0" + "string.prototype.matchall": "^4.0.7" }, "dependencies": { "doctrine": { @@ -27151,21 +31224,25 @@ } }, "resolve": { - "version": "1.22.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", - "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "version": "2.0.0-next.3", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.3.tgz", + "integrity": "sha512-W8LucSynKUIDu9ylraa7ueVZ7hc0uAgJBxVsQSKOXOyle8a93qXhcz+XAXZ8bIq2d6i4Ehddn6Evt+0/UwKk6Q==", "requires": { - "is-core-module": "^2.8.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, "eslint-plugin-react-hooks": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", - "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==", + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", "requires": {} }, "eslint-scope": { @@ -27175,29 +31252,43 @@ "requires": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" + }, + "dependencies": { + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + } } }, "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", "requires": { - "eslint-visitor-keys": "^1.1.0" + "eslint-visitor-keys": "^2.0.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==" + } } }, "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==" }, "espree": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", - "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz", + "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==", "requires": { - "acorn": "^7.1.1", - "acorn-jsx": "^5.2.0", - "eslint-visitor-keys": "^1.1.0" + "acorn": "^8.7.1", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" } }, "esprima": { @@ -27211,13 +31302,6 @@ "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", "requires": { "estraverse": "^5.1.0" - }, - "dependencies": { - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" - } } }, "esrecurse": { @@ -27226,19 +31310,12 @@ "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "requires": { "estraverse": "^5.2.0" - }, - "dependencies": { - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" - } } }, "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" }, "esutils": { "version": "2.0.3", @@ -27291,6 +31368,51 @@ "p-finally": "^1.0.0", "signal-exit": "^3.0.0", "strip-eof": "^1.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } + } } }, "exit": { @@ -27369,11 +31491,6 @@ } } }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -27525,6 +31642,16 @@ "requires": { "assign-symbols": "^1.0.0", "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "requires": { + "is-plain-object": "^2.0.4" + } + } } }, "external-editor": { @@ -27567,11 +31694,6 @@ "requires": { "is-extendable": "^0.1.0" } - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" } } }, @@ -27586,35 +31708,25 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "fast-glob": { - "version": "2.2.7", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", - "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, "requires": { - "@mrmlnc/readdir-enhanced": "^2.2.1", - "@nodelib/fs.stat": "^1.1.2", - "glob-parent": "^3.1.0", - "is-glob": "^4.0.0", - "merge2": "^1.2.3", - "micromatch": "^3.1.10" + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" }, "dependencies": { "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", - "requires": { - "is-extglob": "^2.1.0" - } - } + "is-glob": "^4.0.1" } } } @@ -27629,6 +31741,15 @@ "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, + "fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, "faye-websocket": { "version": "0.10.0", "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", @@ -27685,14 +31806,21 @@ "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "requires": { "escape-string-regexp": "^1.0.5" + }, + "dependencies": { + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + } } }, "file-entry-cache": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", - "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "requires": { - "flat-cache": "^2.0.1" + "flat-cache": "^3.0.4" } }, "file-loader": { @@ -27716,29 +31844,11 @@ "integrity": "sha512-u4AYWPgbI5GBhs6id1KdImZWn5yfyFrrQ8OWZdN7ZMfA8Bf4HcO0BGo9bmUIEV8yrp8I1xVfJ/dn90GtFNNJcg==" }, "fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", "requires": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - } + "to-regex-range": "^5.0.1" } }, "finalhandler": { @@ -27781,27 +31891,26 @@ } }, "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", "requires": { - "locate-path": "^3.0.0" + "locate-path": "^2.0.0" } }, "flat-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", - "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", "requires": { - "flatted": "^2.0.0", - "rimraf": "2.6.3", - "write": "1.0.3" + "flatted": "^3.1.0", + "rimraf": "^3.0.2" } }, "flatted": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", - "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==" + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", + "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==" }, "flatten": { "version": "1.0.3", @@ -27863,6 +31972,33 @@ "color-convert": "^1.9.0" } }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -27886,11 +32022,80 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -27903,6 +32108,15 @@ "requires": { "has-flag": "^3.0.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -28093,11 +32307,11 @@ } }, "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "requires": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" } }, "glob-to-regexp": { @@ -28127,43 +32341,37 @@ "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } } } }, "globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==" + "version": "13.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.15.0.tgz", + "integrity": "sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog==", + "requires": { + "type-fest": "^0.20.2" + } }, "globby": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", - "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, "requires": { - "array-union": "^1.0.1", - "dir-glob": "2.0.0", - "fast-glob": "^2.0.2", - "glob": "^7.1.2", - "ignore": "^3.3.5", - "pify": "^3.0.0", - "slash": "^1.0.0" - }, - "dependencies": { - "ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" - }, - "slash": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", - "integrity": "sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg==" - } + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" } }, "graceful-fs": { @@ -28295,6 +32503,24 @@ "kind-of": "^4.0.0" }, "dependencies": { + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, "kind-of": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", @@ -28459,13 +32685,6 @@ "param-case": "^3.0.3", "relateurl": "^0.2.7", "terser": "^4.6.3" - }, - "dependencies": { - "commander": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", - "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==" - } } }, "html-webpack-plugin": { @@ -28528,6 +32747,108 @@ "is-glob": "^4.0.0", "lodash": "^4.17.11", "micromatch": "^3.1.10" + }, + "dependencies": { + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + } } }, "http-signature": { @@ -28585,9 +32906,9 @@ "integrity": "sha512-DUNFN5j7Tln0D+TxzloUjKB+CtVu6myn0JEFak6dG18mNt9YkQ6lzGCdafwofISZ1lLF3xRHJ98VKy9ynkcFaA==" }, "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" }, "immer": { "version": "1.10.0", @@ -28603,12 +32924,12 @@ } }, "import-fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", - "integrity": "sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "requires": { - "caller-path": "^2.0.0", - "resolve-from": "^3.0.0" + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" } }, "import-from": { @@ -28617,6 +32938,13 @@ "integrity": "sha512-0vdnLL2wSGnhlRmzHJAg5JHjt1l2vYhzJ7tNLGbeVg0fse56tpGaH0uzH+r9Slej+BSXXEHvBKDEnVSLLE9/+w==", "requires": { "resolve-from": "^3.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==" + } } }, "import-local": { @@ -28681,32 +33009,99 @@ "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, "inquirer": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", - "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", + "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", "requires": { "ansi-escapes": "^4.2.1", - "chalk": "^4.1.0", + "chalk": "^2.4.2", "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", + "cli-width": "^2.0.0", "external-editor": "^3.0.3", "figures": "^3.0.0", - "lodash": "^4.17.19", + "lodash": "^4.17.15", "mute-stream": "0.0.8", - "run-async": "^2.4.0", - "rxjs": "^6.6.0", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", + "strip-ansi": "^5.1.0", "through": "^2.3.6" }, "dependencies": { + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + }, "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "requires": { - "ansi-regex": "^5.0.1" + "ansi-regex": "^4.1.0" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" } + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" } } }, @@ -28901,12 +33296,9 @@ "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==" }, "is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "requires": { - "is-plain-object": "^2.0.4" - } + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" }, "is-extglob": { "version": "2.1.1", @@ -28942,12 +33334,9 @@ "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" }, "is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", - "requires": { - "kind-of": "^3.0.2" - } + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" }, "is-number-object": { "version": "1.0.7", @@ -29123,6 +33512,13 @@ "@babel/types": "^7.4.0", "istanbul-lib-coverage": "^2.0.5", "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } } }, "istanbul-lib-report": { @@ -29160,6 +33556,16 @@ "make-dir": "^2.1.0", "rimraf": "^2.6.3", "source-map": "^0.6.1" + }, + "dependencies": { + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } } }, "istanbul-reports": { @@ -29240,6 +33646,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -29292,6 +33703,33 @@ "color-convert": "^1.9.0" } }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -29315,16 +33753,85 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, "jest-get-type": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-24.9.0.tgz", "integrity": "sha512-lUseMzAley4LhIcpSP9Jf+fTrQ4a1yHQwLNeeVa2cEmbCGeoZAtYPOIv8JaxLD/sUpKxetKGP+gsHl8f8TSj8Q==" }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, "pretty-format": { "version": "24.9.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", @@ -29343,6 +33850,15 @@ "requires": { "has-flag": "^3.0.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -29413,6 +33929,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -29569,6 +34090,54 @@ "walker": "^1.0.7" }, "dependencies": { + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -29578,6 +34147,58 @@ "bindings": "^1.5.0", "nan": "^2.12.1" } + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -29640,6 +34261,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -29771,6 +34397,11 @@ "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-24.9.0.tgz", "integrity": "sha512-Dj6Wk3tWyTE+Fo1rW8v0Xhwk80um6yFYKbuAxc9c3EZxIHFDYwbi34Uk42u1CdnIiVorvt4RmlSDjIPyzGC2ew==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -29836,6 +34467,33 @@ "color-convert": "^1.9.0" } }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -29859,11 +34517,85 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -29871,6 +34603,15 @@ "requires": { "has-flag": "^3.0.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -29936,6 +34677,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -30018,6 +34764,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -30094,11 +34845,21 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -30175,6 +34936,11 @@ "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-24.9.0.tgz", "integrity": "sha512-Dj6Wk3tWyTE+Fo1rW8v0Xhwk80um6yFYKbuAxc9c3EZxIHFDYwbi34Uk42u1CdnIiVorvt4RmlSDjIPyzGC2ew==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -30207,6 +34973,11 @@ "react-is": "^16.8.4" } }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -30244,11 +35015,6 @@ "color-convert": "^1.9.0" } }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -30272,11 +35038,21 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -30336,6 +35112,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -30381,6 +35162,19 @@ "strip-ansi": "^5.0.0" }, "dependencies": { + "ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "requires": { + "type-fest": "^0.21.3" + } + }, + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -30412,16 +35206,16 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" - }, "string-length": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-length/-/string-length-3.1.0.tgz", @@ -30431,6 +35225,14 @@ "strip-ansi": "^5.2.0" } }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -30438,6 +35240,11 @@ "requires": { "has-flag": "^3.0.0" } + }, + "type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" } } }, @@ -30455,11 +35262,6 @@ "string-length": "^2.0.0" }, "dependencies": { - "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==" - }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -30491,6 +35293,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -30744,12 +35551,12 @@ } }, "jsx-ast-utils": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.4.1.tgz", - "integrity": "sha512-z1xSldJ6imESSzOjd3NNkieVJKRlKYSOtMG8SFyCj2FIrvSaSuli/WjpBkEzCBoR9bYYYFgqJw61Xhu7Lcgk+w==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.0.tgz", + "integrity": "sha512-XzO9luP6L0xkxwhIJMTJQpZo/eeN60K08jHdexfD569AGxeNug6UketeHXEhROoM8aR7EcUoOQmIhcJQjcuq8Q==", "requires": { - "array-includes": "^3.1.1", - "object.assign": "^4.1.0" + "array-includes": "^3.1.4", + "object.assign": "^4.1.2" } }, "killable": { @@ -30770,6 +35577,19 @@ "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==" }, + "language-subtag-registry": { + "version": "0.3.21", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.21.tgz", + "integrity": "sha512-L0IqwlIXjilBVVYKFT37X9Ih11Um5NEl9cbJIuU/SwP/zEEAbBPOnEeeuxVMf45ydWQRDQN3Nqc96OgbH1K+Pg==" + }, + "language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", + "requires": { + "language-subtag-registry": "~0.3.2" + } + }, "last-call-webpack-plugin": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz", @@ -30803,12 +35623,12 @@ } }, "levn": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", - "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "requires": { - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2" + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" } }, "lines-and-columns": { @@ -30817,28 +35637,20 @@ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, "load-json-file": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", "requires": { "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", + "parse-json": "^4.0.0", + "pify": "^3.0.0", "strip-bom": "^3.0.0" }, "dependencies": { - "parse-json": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", - "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", - "requires": { - "error-ex": "^1.2.0" - } - }, "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" } } }, @@ -30914,11 +35726,11 @@ } }, "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", "requires": { - "p-locate": "^3.0.0", + "p-locate": "^2.0.0", "path-exists": "^3.0.0" } }, @@ -30942,6 +35754,11 @@ "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, "lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", @@ -30996,11 +35813,11 @@ } }, "lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "requires": { - "yallist": "^3.0.2" + "yallist": "^4.0.0" } }, "make-dir": { @@ -31598,30 +36415,13 @@ "integrity": "sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w==" }, "micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, "requires": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", - "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" - } + "braces": "^3.0.2", + "picomatch": "^2.3.1" } }, "miller-rabin": { @@ -31715,13 +36515,6 @@ "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "requires": { "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } } }, "minipass-collect": { @@ -31772,6 +36565,16 @@ "requires": { "for-in": "^1.0.2", "is-extendable": "^1.0.1" + }, + "dependencies": { + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "requires": { + "is-plain-object": "^2.0.4" + } + } } }, "mixin-object": { @@ -31787,11 +36590,6 @@ "version": "0.1.8", "resolved": "https://registry.npmjs.org/for-in/-/for-in-0.1.8.tgz", "integrity": "sha512-F0to7vbBSHP8E3l6dCjxNOLuSFAACIxFy3UehTUlG7svlXi37HHsDkyVcHo0Pq8QwrE+pXvWSVX3ZT1T9wAZ9g==" - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" } } }, @@ -31814,6 +36612,16 @@ "mkdirp": "^0.5.1", "rimraf": "^2.5.4", "run-queue": "^1.0.3" + }, + "dependencies": { + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + } } }, "mri": { @@ -31982,6 +36790,14 @@ "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } } } }, @@ -32040,6 +36856,13 @@ "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", "requires": { "path-key": "^2.0.0" + }, + "dependencies": { + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + } } }, "nth-check": { @@ -32197,6 +37020,15 @@ "es-abstract": "^1.20.1" } }, + "object.hasown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", + "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", + "requires": { + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, "object.pick": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", @@ -32286,16 +37118,16 @@ } }, "optionator": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", - "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", "requires": { - "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.6", - "levn": "~0.3.0", - "prelude-ls": "~1.1.2", - "type-check": "~0.3.2", - "word-wrap": "~1.2.3" + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" } }, "os-browserify": { @@ -32322,19 +37154,19 @@ "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==" }, "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", "requires": { - "p-try": "^2.0.0" + "p-try": "^1.0.0" } }, "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", "requires": { - "p-limit": "^2.0.0" + "p-limit": "^1.1.0" } }, "p-map": { @@ -32359,9 +37191,9 @@ } }, "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==" }, "pako": { "version": "1.0.11", @@ -32393,13 +37225,6 @@ "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", "requires": { "callsites": "^3.0.0" - }, - "dependencies": { - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - } } }, "parse-asn1": { @@ -32473,9 +37298,9 @@ "integrity": "sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" }, "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" }, "path-parse": { "version": "1.0.7", @@ -32491,19 +37316,9 @@ } }, "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "requires": { - "pify": "^3.0.0" - }, - "dependencies": { - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" - } - } + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" }, "pbkdf2": { "version": "3.1.2", @@ -32561,6 +37376,46 @@ "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", "requires": { "find-up": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + } } }, "pkg-up": { @@ -32569,6 +37424,46 @@ "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", "requires": { "find-up": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + } } }, "pn": { @@ -33528,9 +38423,9 @@ } }, "prelude-ls": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" }, "prepend-http": { "version": "1.0.4", @@ -33568,9 +38463,9 @@ "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==" }, "react-is": { - "version": "18.1.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz", - "integrity": "sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" } } }, @@ -33734,6 +38629,12 @@ "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, "raf": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", @@ -33850,6 +38751,11 @@ "@babel/highlight": "^7.8.3" } }, + "@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==" + }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -33858,6 +38764,41 @@ "color-convert": "^1.9.0" } }, + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha512-Dxr6QJj/RdU/hCaBjOfxW+q6lyuVE6JFWIrAUpuOOhoJJoQ99cUn3igRaHVB5P9WrgFVN0FfArM3x0cueOU8ng==", + "requires": { + "array-uniq": "^1.0.1" + } + }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "browserslist": { "version": "4.10.0", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.10.0.tgz", @@ -33886,11 +38827,6 @@ } } }, - "cli-width": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.1.tgz", - "integrity": "sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw==" - }, "color-convert": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", @@ -33914,6 +38850,15 @@ "which": "^2.0.1" } }, + "dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "requires": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + } + }, "emojis-list": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", @@ -33924,6 +38869,40 @@ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==" }, + "fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", + "requires": { + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "find-up": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", @@ -33933,42 +38912,63 @@ "path-exists": "^4.0.0" } }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha512-E8Ak/2+dZY6fnzlR7+ueWvhsH1SjHr4jjss4YS/h4py44jY9MhK/VFdaZJAWDz6BbL21KeteKxFSFpq8OS5gVA==", + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha512-UFpDDrPgM6qpnFNI+rh/p3bUaq9hKLZN8bMUWzxmcnZVS3omf4IPK+BrewlnWjO1WmUsMYuSjKh4UJuV4+Lqmw==", + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "globby": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "requires": { + "array-union": "^1.0.1", + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + } + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" }, - "inquirer": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", - "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==" + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", "requires": { - "ansi-escapes": "^4.2.1", - "chalk": "^2.4.2", - "cli-cursor": "^3.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.15", - "mute-stream": "0.0.8", - "run-async": "^2.2.0", - "rxjs": "^6.5.3", - "string-width": "^4.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" + "kind-of": "^3.0.2" }, "dependencies": { - "ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", "requires": { - "ansi-regex": "^4.1.0" + "is-buffer": "^1.1.5" } } } @@ -33981,6 +38981,11 @@ "minimist": "^1.2.0" } }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, "loader-utils": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.2.3.tgz", @@ -33999,11 +39004,39 @@ "p-locate": "^4.1.0" } }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, "node-releases": { "version": "1.1.77", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.77.tgz", "integrity": "sha512-rB1DUFUNAN4Gn9keO2K1efO35IDK7yKHCdCaIMvFO7yUYmmZYeDjnGKle26G4rwj+LKRQpjyUUvMkPglwGCYNQ==" }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, "p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", @@ -34012,28 +39045,33 @@ "p-limit": "^2.2.0" } }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + }, "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "requires": { - "shebang-regex": "^3.0.0" + "pify": "^3.0.0" } }, - "shebang-regex": { + "pify": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg==" }, "strip-ansi": { "version": "6.0.0", @@ -34051,12 +39089,13 @@ "has-flag": "^3.0.0" } }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", "requires": { - "isexe": "^2.0.0" + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" } } } @@ -34125,9 +39164,9 @@ }, "dependencies": { "react-is": { - "version": "18.1.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.1.0.tgz", - "integrity": "sha512-Fl7FuabXsJnV5Q1qIOQwx/sagGF18kogb4gpfcG4gjLBWO0WDiiz1ko/ExayuxE7InyQkBLkxRFG5oxY6Uu3Kg==" + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" } } }, @@ -34146,37 +39185,11 @@ } }, "react-router": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-4.3.1.tgz", - "integrity": "sha512-yrvL8AogDh2X42Dt9iknk4wF4V8bWREPirFfS9gLU1huk6qK41sg7Z/1S81jjTrGHxa3B8R3J6xIkDAA6CVarg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.3.0.tgz", + "integrity": "sha512-7Wh1DzVQ+tlFjkeo+ujvjSqSJmkt1+8JO+T5xklPlgrh70y7ogx75ODRW0ThWhY7S+6yEDks8TYrtQe/aoboBQ==", "requires": { - "history": "^4.7.2", - "hoist-non-react-statics": "^2.5.0", - "invariant": "^2.2.4", - "loose-envify": "^1.3.1", - "path-to-regexp": "^1.7.0", - "prop-types": "^15.6.1", - "warning": "^4.0.1" - }, - "dependencies": { - "history": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/history/-/history-4.10.1.tgz", - "integrity": "sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==", - "requires": { - "@babel/runtime": "^7.1.2", - "loose-envify": "^1.2.0", - "resolve-pathname": "^3.0.0", - "tiny-invariant": "^1.0.2", - "tiny-warning": "^1.0.0", - "value-equal": "^1.0.1" - } - }, - "hoist-non-react-statics": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", - "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" - } + "history": "^5.2.0" } }, "react-router-dom": { @@ -34204,6 +39217,25 @@ "tiny-warning": "^1.0.0", "value-equal": "^1.0.1" } + }, + "hoist-non-react-statics": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", + "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" + }, + "react-router": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-4.3.1.tgz", + "integrity": "sha512-yrvL8AogDh2X42Dt9iknk4wF4V8bWREPirFfS9gLU1huk6qK41sg7Z/1S81jjTrGHxa3B8R3J6xIkDAA6CVarg==", + "requires": { + "history": "^4.7.2", + "hoist-non-react-statics": "^2.5.0", + "invariant": "^2.2.4", + "loose-envify": "^1.3.1", + "path-to-regexp": "^1.7.0", + "prop-types": "^15.6.1", + "warning": "^4.0.1" + } } } }, @@ -34297,10 +39329,555 @@ } } }, + "@typescript-eslint/eslint-plugin": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.34.0.tgz", + "integrity": "sha512-4zY3Z88rEE99+CNvTbXSyovv2z9PNOVffTWD2W8QF5s2prBQtwN2zadqERcrHpcR7O/+KMI3fcTAmUUhK/iQcQ==", + "requires": { + "@typescript-eslint/experimental-utils": "2.34.0", + "functional-red-black-tree": "^1.0.1", + "regexpp": "^3.0.0", + "tsutils": "^3.17.1" + } + }, + "@typescript-eslint/parser": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.34.0.tgz", + "integrity": "sha512-03ilO0ucSD0EPTw2X4PntSIRFtDPWjrVq7C3/Z3VQHRC7+13YB55rcJI3Jt+YgeHbjUdJPcPa7b23rXCBokuyA==", + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "2.34.0", + "@typescript-eslint/typescript-estree": "2.34.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "2.34.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.34.0.tgz", + "integrity": "sha512-OMAr+nJWKdlVM9LOqCqh3pQQPwxHAN7Du8DR6dmwCrAmxtiXQnhHJ6tBNtf+cggqfo51SG/FCwnKhXCIM7hnVg==", + "requires": { + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash": "^4.17.15", + "semver": "^7.3.2", + "tsutils": "^3.17.1" + }, + "dependencies": { + "semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "requires": { + "lru-cache": "^6.0.0" + } + } + } + }, + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + }, + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha512-majUxHgLehQTeSA+hClx+DY09OVUqG3GtezWkF1krgLGNdlDu9l9V8DaqNMWbq4Eddc8wsyDA0hpDUtnYxQEXw==", + "requires": { + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "dependencies": { + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" + } + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "eslint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.3", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==" + } + } + }, + "eslint-config-react-app": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.2.1.tgz", + "integrity": "sha512-pGIZ8t0mFLcV+6ZirRgYK6RVqUIKRIi9MmgzUEmrIknsn3AdO0I32asO86dJgloHq+9ZPl8UIg8mYrvgP5u2wQ==", + "requires": { + "confusing-browser-globals": "^1.0.9" + } + }, + "eslint-loader": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-3.0.3.tgz", + "integrity": "sha512-+YRqB95PnNvxNp1HEjQmvf9KNvCin5HXYYseOXVC2U0KEcw4IkQ2IQEBG46j7+gW39bMzeu0GsUhVbBY3Votpw==", + "requires": { + "fs-extra": "^8.1.0", + "loader-fs-cache": "^1.0.2", + "loader-utils": "^1.2.3", + "object-hash": "^2.0.1", + "schema-utils": "^2.6.1" + } + }, + "eslint-plugin-import": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", + "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", + "requires": { + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha512-lsGyRuYr4/PIB0txi+Fy2xOMI2dGaTguCaotzFGkVZuKR5usKfcRWIFKNM3QNrU7hh/+w2bwTW+ZeXPK5l8uVg==", + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", + "requires": { + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + } + }, + "eslint-plugin-react": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", + "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", + "requires": { + "array-includes": "^3.1.1", + "doctrine": "^2.1.0", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.1", + "object.fromentries": "^2.0.2", + "object.values": "^1.1.1", + "prop-types": "^15.7.2", + "resolve": "^1.15.1", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.2", + "xregexp": "^4.3.0" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + } + } + }, + "eslint-plugin-react-hooks": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", + "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==", + "requires": {} + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + }, + "espree": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "requires": { + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "requires": { + "flat-cache": "^2.0.1" + } + }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==" + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "requires": { + "is-glob": "^4.0.1" + } + }, + "globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "requires": { + "type-fest": "^0.8.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" + }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "jsx-ast-utils": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.4.1.tgz", + "integrity": "sha512-z1xSldJ6imESSzOjd3NNkieVJKRlKYSOtMG8SFyCj2FIrvSaSuli/WjpBkEzCBoR9bYYYFgqJw61Xhu7Lcgk+w==", + "requires": { + "array-includes": "^3.1.1", + "object.assign": "^4.1.0" + } + }, + "levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "requires": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha512-3p6ZOGNbiX4CdvEd1VcE6yi78UrGNpjHO33noGwHCnT/o2fyllJDepsm8+mFFv/DvtwFHht5HIHSyOy5a+ChVQ==", + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "requires": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + } + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ==", + "requires": { + "error-ex": "^1.2.0" + } + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" + }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "requires": { + "pify": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" + }, + "prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==" + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + }, + "resolve": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", + "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", + "requires": { + "path-parse": "^1.0.6" + } + }, + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "requires": { + "glob": "^7.1.3" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==" + }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==" + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + }, + "type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "requires": { + "prelude-ls": "~1.1.2" + } + }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==" + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "requires": { + "isexe": "^2.0.0" + } } } }, @@ -34365,76 +39942,76 @@ } }, "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha512-eFIBOPW7FGjzBuk3hdXEuNSiTZS/xEMlH49HxMyzb0hyPfu4EhVjT2DH32K1hSSmVq4sebAWnZuuY5auISUTGA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", "requires": { - "load-json-file": "^2.0.0", + "load-json-file": "^4.0.0", "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" + "path-type": "^3.0.0" }, "dependencies": { "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha512-dUnb5dXUf+kzhC/W/F4e5/SkluXIFf5VUHolW1Eg1irn1hGWjPGdsRcvYJ1nD6lhk8Ir7VM0bHJKsYTx8Jx9OQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "requires": { - "pify": "^2.0.0" + "pify": "^3.0.0" } }, "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" } } }, "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha512-1orxQfbWGUiTn9XsPlChs6rLie/AV9jwZTGmu2NZw/CUDJQchXJFYE0Fq5j7+n558T1JhDWLdhyd1Zj+wLY//w==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", "requires": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" }, "dependencies": { "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "requires": { - "locate-path": "^2.0.0" + "locate-path": "^3.0.0" } }, "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "requires": { - "p-locate": "^2.0.0", + "p-locate": "^3.0.0", "path-exists": "^3.0.0" } }, "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "requires": { - "p-try": "^1.0.0" + "p-try": "^2.0.0" } }, "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "requires": { - "p-limit": "^1.1.0" + "p-limit": "^2.0.0" } }, "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww==" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" } } }, @@ -34762,11 +40339,13 @@ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "resolve": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", - "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", "requires": { - "path-parse": "^1.0.6" + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -34775,12 +40354,19 @@ "integrity": "sha512-ccu8zQTrzVr954472aUVPLEcB3YpKSYR3cg/3lo1okzobPBM+1INXBbBZlDbnI/hbEocnf8j0QVo43hQKrbchg==", "requires": { "resolve-from": "^3.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==" + } } }, "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" }, "resolve-pathname": { "version": "3.0.0", @@ -34863,6 +40449,11 @@ "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", "integrity": "sha512-knHEZMgs8BB+MInokmNTg/OyPlAddghe1YBgNwJBc5zsJi/uyIcXoSDsL/W9ymOsBoBGdPIHXYJ9+qKFwRwDng==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -34925,6 +40516,12 @@ "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==" }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, "rework": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/rework/-/rework-1.0.1.tgz", @@ -34957,9 +40554,9 @@ "integrity": "sha512-zgn5OjNQXLUTdq8m17KdaicF6w89TZs8ZU8y0AYENIU6wG8GG6LLm0yLSiPY8DmaYmHdgRW8rnApjoT0fQRfMg==" }, "rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "requires": { "glob": "^7.1.3" } @@ -34983,6 +40580,15 @@ "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==" }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, "run-queue": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/run-queue/-/run-queue-1.0.3.tgz", @@ -35046,6 +40652,108 @@ "micromatch": "^3.1.4", "minimist": "^1.1.1", "walker": "~1.0.5" + }, + "dependencies": { + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + } } }, "sanitize.css": { @@ -35080,6 +40788,11 @@ "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, "shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", @@ -35136,9 +40849,12 @@ } }, "semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "requires": { + "lru-cache": "^6.0.0" + } }, "send": { "version": "0.18.0", @@ -35289,11 +41005,6 @@ "requires": { "is-extendable": "^0.1.0" } - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" } } }, @@ -35327,11 +41038,6 @@ "mixin-object": "^2.0.1" }, "dependencies": { - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - }, "kind-of": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-2.0.1.tgz", @@ -35348,17 +41054,17 @@ } }, "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "requires": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" } }, "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, "shell-quote": { "version": "1.7.2", @@ -35406,9 +41112,9 @@ "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" }, "slash": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", - "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" }, "slice-ansi": { "version": "2.1.0", @@ -35520,11 +41226,6 @@ } } }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - }, "ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -35933,14 +41634,6 @@ "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -35990,18 +41683,11 @@ } }, "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { - "ansi-regex": "^4.1.0" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", - "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" - } + "ansi-regex": "^5.0.1" } }, "strip-bom": { @@ -36235,6 +41921,11 @@ } } }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -36279,6 +41970,16 @@ "string-width": "^3.0.0" }, "dependencies": { + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -36293,6 +41994,14 @@ "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^5.1.0" } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } } } }, @@ -36309,6 +42018,13 @@ "commander": "^2.20.0", "source-map": "~0.6.1", "source-map-support": "~0.5.12" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + } } }, "terser-webpack-plugin": { @@ -36371,6 +42087,14 @@ "semver": "^6.0.0" } }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, "p-locate": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", @@ -36379,6 +42103,11 @@ "p-limit": "^2.2.0" } }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + }, "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -36391,6 +42120,11 @@ "requires": { "find-up": "^4.0.0" } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, @@ -36403,43 +42137,6 @@ "minimatch": "^3.0.4", "read-pkg-up": "^4.0.0", "require-main-filename": "^2.0.0" - }, - "dependencies": { - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==", - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - } - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==", - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - }, - "read-pkg-up": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", - "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", - "requires": { - "find-up": "^3.0.0", - "read-pkg": "^3.0.0" - } - } } }, "text-table": { @@ -36545,12 +42242,11 @@ } }, "to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "requires": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" + "is-number": "^7.0.0" } }, "toidentifier": { @@ -36585,6 +42281,27 @@ "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.1.6.tgz", "integrity": "sha512-CrG5GqAAzMT7144Cl+UIFP7mz/iIhiy+xQ6GGcnjTezhALT02uPMRw7tgDSESgB5MsfKt55+GPWw4ir1kVtMIQ==" }, + "tsconfig-paths": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "requires": { + "minimist": "^1.2.0" + } + } + } + }, "tslib": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", @@ -36629,17 +42346,17 @@ "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==" }, "type-check": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", - "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "requires": { - "prelude-ls": "~1.1.2" + "prelude-ls": "^1.2.1" } }, "type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" }, "type-is": { "version": "1.6.18", @@ -36741,9 +42458,9 @@ "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==" }, "is-plain-obj": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.0.0.tgz", - "integrity": "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==" } } }, @@ -36756,13 +42473,6 @@ "get-value": "^2.0.6", "is-extendable": "^0.1.1", "set-value": "^2.0.1" - }, - "dependencies": { - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - } } }, "uniq": { @@ -37158,6 +42868,24 @@ "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", "optional": true }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "optional": true, + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + } + }, "chokidar": { "version": "2.1.8", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", @@ -37178,6 +42906,27 @@ "upath": "^1.1.1" } }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "optional": true, + "requires": { + "is-extendable": "^0.1.0" + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "optional": true, + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + } + }, "fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -37218,6 +42967,63 @@ "binary-extensions": "^1.0.0" } }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "optional": true, + "requires": { + "kind-of": "^3.0.2" + } + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "optional": true, + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "dependencies": { + "extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "optional": true, + "requires": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + } + }, + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "optional": true, + "requires": { + "is-plain-object": "^2.0.4" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "optional": true + } + } + }, "readdirp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", @@ -37228,6 +43034,16 @@ "micromatch": "^3.1.10", "readable-stream": "^2.0.2" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "optional": true, + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } } } }, @@ -37279,6 +43095,33 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.2.tgz", "integrity": "sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==" }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, "cacache": { "version": "12.0.4", "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.4.tgz", @@ -37310,6 +43153,91 @@ "estraverse": "^4.1.1" } }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==", + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + }, + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "requires": { + "yallist": "^3.0.2" + } + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "requires": { + "glob": "^7.1.3" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -37343,6 +43271,20 @@ "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" } + }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" } } }, @@ -37408,6 +43350,23 @@ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==" }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + } + }, "chokidar": { "version": "2.1.8", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", @@ -37427,6 +43386,25 @@ "upath": "^1.1.1" } }, + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "requires": { + "is-extendable": "^0.1.0" + } + }, + "fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ==", + "requires": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + } + }, "fsevents": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", @@ -37474,6 +43452,58 @@ "binary-extensions": "^1.0.0" } }, + "is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==", + "requires": { + "kind-of": "^3.0.2" + } + }, + "micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "requires": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "dependencies": { + "extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==", + "requires": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + } + }, + "is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "requires": { + "is-plain-object": "^2.0.4" + } + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" + } + } + }, "readdirp": { "version": "2.2.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", @@ -37494,6 +43524,11 @@ "ajv-keywords": "^3.1.0" } }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -37510,6 +43545,15 @@ "has-flag": "^3.0.0" } }, + "to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg==", + "requires": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + } + }, "ws": { "version": "6.2.2", "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.2.tgz", @@ -37610,9 +43654,9 @@ } }, "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "requires": { "isexe": "^2.0.0" } @@ -37826,6 +43870,11 @@ "strip-ansi": "^5.0.0" }, "dependencies": { + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, "ansi-styles": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", @@ -37847,6 +43896,11 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -37861,6 +43915,14 @@ "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^5.1.0" } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } } } }, @@ -37924,9 +43986,9 @@ "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" }, "yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yaml": { "version": "1.10.2", @@ -37959,11 +44021,59 @@ "yargs-parser": "^13.1.2" }, "dependencies": { + "ansi-regex": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz", + "integrity": "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==" }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + }, "string-width": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", @@ -37973,6 +44083,14 @@ "is-fullwidth-code-point": "^2.0.0", "strip-ansi": "^5.1.0" } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } } } }, diff --git a/dashboard/origin-mlx/package.json b/dashboard/origin-mlx/package.json index edfbf658..4387e65b 100644 --- a/dashboard/origin-mlx/package.json +++ b/dashboard/origin-mlx/package.json @@ -21,6 +21,7 @@ "codemirror": "^5.45.0", "dagre": "https://registry.npmjs.org/dagre/-/dagre-0.8.4.tgz", "deepmerge": "^4.0.0", + "eslint-config-airbnb": "^19.0.4", "fuzzysort": "^1.1.4", "js-cookie": "^2.2.1", "js-yaml": "^3.12.2", @@ -29,6 +30,7 @@ "react-dom": "^16.13.1", "react-gtm-module": "^2.0.11", "react-markdown": "^8.0.0", + "react-router": "^6.3.0", "react-router-dom": "^4.3.1", "react-scripts": "^3.0.1", "react-sidebar": "^3.0.2", @@ -48,7 +50,9 @@ "start-dev": "HTTPS=false REACT_APP_BRAND= REACT_APP_API= REACT_APP_KFP= REACT_APP_NBVIEWER_API= REACT_APP_DISABLE_LOGIN= react-scripts start", "build": "react-scripts build", "test": "react-scripts test", - "eject": "react-scripts eject" + "eject": "react-scripts eject", + "lint": "eslint \"src/**/*.{js,jsx,tsx}\"", + "lint:fix": "eslint \"src/**/*.{js,jsx}\" --fix" }, "eslintConfig": { "extends": "react-app" @@ -58,5 +62,15 @@ "not dead", "not ie <= 11", "not op_mini all" - ] + ], + "devDependencies": { + "@typescript-eslint/eslint-plugin": "^5.28.0", + "@typescript-eslint/parser": "^5.28.0", + "eslint": "^8.17.0", + "eslint-config-airbnb-typescript": "^17.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.30.0", + "eslint-plugin-react-hooks": "^4.6.0" + } } From f3cde155f4e214a02e7cf425131abeb22a3d959e Mon Sep 17 00:00:00 2001 From: BluThaitanium Date: Mon, 27 Jun 2022 12:56:39 -0400 Subject: [PATCH 03/11] linted python files Signed-off-by: BluThaitanium --- api/client/README.md | 8 +- api/client/docs/ApplicationSettingsApi.md | 18 +- api/client/docs/CatalogServiceApi.md | 18 +- api/client/docs/ComponentServiceApi.md | 78 +- api/client/docs/CredentialServiceApi.md | 24 +- api/client/docs/DatasetServiceApi.md | 74 +- api/client/docs/HealthCheckApi.md | 6 +- api/client/docs/InferenceServiceApi.md | 24 +- api/client/docs/ModelServiceApi.md | 76 +- api/client/docs/NotebookServiceApi.md | 78 +- api/client/docs/PipelineServiceApi.md | 70 +- api/client/setup.py | 11 +- api/client/swagger_client/__init__.py | 27 +- .../api/application_settings_api.py | 179 ++-- .../swagger_client/api/catalog_service_api.py | 178 ++-- .../api/component_service_api.py | 771 +++++++++-------- .../api/credential_service_api.py | 208 ++--- .../swagger_client/api/dataset_service_api.py | 752 +++++++++-------- .../swagger_client/api/health_check_api.py | 50 +- .../api/inference_service_api.py | 247 +++--- .../swagger_client/api/model_service_api.py | 780 ++++++++++-------- .../api/notebook_service_api.py | 764 +++++++++-------- .../api/pipeline_service_api.py | 707 +++++++++------- api/client/swagger_client/api_client.py | 408 +++++---- api/client/swagger_client/configuration.py | 34 +- api/client/swagger_client/models/__init__.py | 26 +- api/client/swagger_client/models/any_value.py | 30 +- .../swagger_client/models/api_access_token.py | 42 +- api/client/swagger_client/models/api_asset.py | 76 +- .../models/api_catalog_upload.py | 58 +- .../models/api_catalog_upload_error.py | 48 +- .../models/api_catalog_upload_item.py | 38 +- .../models/api_catalog_upload_response.py | 78 +- .../swagger_client/models/api_component.py | 86 +- .../swagger_client/models/api_credential.py | 66 +- .../swagger_client/models/api_dataset.py | 106 ++- .../models/api_generate_code_response.py | 32 +- .../api_generate_model_code_response.py | 32 +- .../models/api_get_template_response.py | 38 +- .../models/api_inferenceservice.py | 52 +- .../models/api_list_catalog_items_response.py | 63 +- .../models/api_list_catalog_upload_errors.py | 34 +- .../models/api_list_components_response.py | 40 +- .../models/api_list_credentials_response.py | 40 +- .../models/api_list_datasets_response.py | 40 +- .../api_list_inferenceservices_response.py | 44 +- .../models/api_list_models_response.py | 40 +- .../models/api_list_notebooks_response.py | 40 +- .../models/api_list_pipelines_response.py | 44 +- .../swagger_client/models/api_metadata.py | 36 +- api/client/swagger_client/models/api_model.py | 139 ++-- .../models/api_model_framework.py | 44 +- .../models/api_model_framework_runtimes.py | 34 +- .../swagger_client/models/api_model_script.py | 52 +- .../swagger_client/models/api_notebook.py | 95 ++- .../swagger_client/models/api_parameter.py | 48 +- .../swagger_client/models/api_pipeline.py | 72 +- .../models/api_pipeline_custom.py | 56 +- .../models/api_pipeline_custom_run_payload.py | 36 +- .../swagger_client/models/api_pipeline_dag.py | 32 +- .../models/api_pipeline_extended.py | 68 +- .../models/api_pipeline_extension.py | 44 +- .../models/api_pipeline_inputs.py | 32 +- .../models/api_pipeline_task.py | 65 +- .../models/api_pipeline_task_arguments.py | 32 +- .../models/api_run_code_response.py | 38 +- .../swagger_client/models/api_settings.py | 40 +- .../models/api_settings_section.py | 40 +- .../swagger_client/models/api_status.py | 38 +- api/client/swagger_client/models/api_url.py | 32 +- .../swagger_client/models/dictionary.py | 30 +- .../swagger_client/models/protobuf_any.py | 43 +- api/client/swagger_client/rest.py | 302 ++++--- api/client/test/test_any_value.py | 14 +- api/client/test/test_api_access_token.py | 10 +- api/client/test/test_api_asset.py | 10 +- api/client/test/test_api_catalog_upload.py | 10 +- .../test/test_api_catalog_upload_error.py | 12 +- .../test/test_api_catalog_upload_item.py | 12 +- .../test/test_api_catalog_upload_response.py | 12 +- api/client/test/test_api_component.py | 14 +- api/client/test/test_api_credential.py | 14 +- api/client/test/test_api_dataset.py | 10 +- .../test/test_api_generate_code_response.py | 16 +- .../test_api_generate_model_code_response.py | 16 +- .../test/test_api_get_template_response.py | 16 +- api/client/test/test_api_inferenceservice.py | 14 +- .../test_api_list_catalog_items_response.py | 12 +- .../test_api_list_catalog_upload_errors.py | 12 +- .../test/test_api_list_components_response.py | 16 +- .../test_api_list_credentials_response.py | 16 +- .../test/test_api_list_datasets_response.py | 12 +- ...est_api_list_inferenceservices_response.py | 16 +- .../test/test_api_list_models_response.py | 18 +- .../test/test_api_list_notebooks_response.py | 16 +- .../test/test_api_list_pipelines_response.py | 16 +- api/client/test/test_api_metadata.py | 14 +- api/client/test/test_api_model.py | 16 +- api/client/test/test_api_model_framework.py | 14 +- .../test/test_api_model_framework_runtimes.py | 16 +- api/client/test/test_api_model_script.py | 14 +- api/client/test/test_api_notebook.py | 14 +- api/client/test/test_api_parameter.py | 14 +- api/client/test/test_api_pipeline.py | 14 +- api/client/test/test_api_pipeline_custom.py | 14 +- .../test_api_pipeline_custom_run_payload.py | 16 +- api/client/test/test_api_pipeline_dag.py | 14 +- api/client/test/test_api_pipeline_extended.py | 16 +- .../test/test_api_pipeline_extension.py | 16 +- api/client/test/test_api_pipeline_inputs.py | 14 +- api/client/test/test_api_pipeline_task.py | 14 +- .../test/test_api_pipeline_task_arguments.py | 16 +- api/client/test/test_api_run_code_response.py | 16 +- api/client/test/test_api_settings.py | 14 +- api/client/test/test_api_settings_section.py | 14 +- api/client/test/test_api_status.py | 14 +- api/client/test/test_api_url.py | 14 +- .../test/test_application_settings_api.py | 30 +- api/client/test/test_catalog_service_api.py | 20 +- api/client/test/test_component_service_api.py | 60 +- .../test/test_credential_service_api.py | 28 +- api/client/test/test_dataset_service_api.py | 52 +- api/client/test/test_dictionary.py | 14 +- api/client/test/test_health_check_api.py | 14 +- api/client/test/test_inference_service_api.py | 20 +- api/client/test/test_model_service_api.py | 58 +- api/client/test/test_notebook_service_api.py | 60 +- api/client/test/test_pipeline_service_api.py | 48 +- api/client/test/test_protobuf_any.py | 14 +- api/examples/catalog_api.py | 101 ++- api/examples/components_api.py | 197 +++-- api/examples/credentials_api.py | 75 +- api/examples/dataset_api.py | 169 ++-- api/examples/models_api.py | 192 +++-- api/examples/notebooks_api.py | 216 +++-- api/examples/pipelines_api.py | 282 +++++-- api/examples/runs_api.py | 93 ++- api/examples/settings_api.py | 53 +- api/server/setup.py | 8 +- api/server/swagger_server/__main__.py | 40 +- .../code_templates/run_component.TEMPLATE.py | 4 +- .../code_templates/run_dataset.TEMPLATE.py | 4 +- .../code_templates/run_notebook.TEMPLATE.py | 2 +- .../run_notebook_with_dataset.TEMPLATE.py | 2 +- .../code_templates/run_pipeline.TEMPLATE.py | 4 +- .../serve_kfserving.TEMPLATE.py | 2 +- .../code_templates/serve_knative.TEMPLATE.py | 2 +- .../serve_kubernetes.TEMPLATE.py | 2 +- .../code_templates/train_watsonml.TEMPLATE.py | 2 +- .../train_watsonml_w_credentials.TEMPLATE.py | 2 +- .../application_settings_controller.py | 14 +- .../controllers/catalog_service_controller.py | 26 +- .../component_service_controller.py | 56 +- .../credential_service_controller.py | 28 +- .../controllers/dataset_service_controller.py | 56 +- .../controllers/health_check_controller.py | 8 +- .../inference_service_controller.py | 38 +- .../controllers/model_service_controller.py | 56 +- .../notebook_service_controller.py | 52 +- .../pipeline_service_controller.py | 60 +- .../controllers_impl/__init__.py | 38 +- .../application_settings_controller_impl.py | 24 +- .../catalog_service_controller_impl.py | 113 ++- .../component_service_controller_impl.py | 192 +++-- .../credential_service_controller_impl.py | 76 +- .../dataset_service_controller_impl.py | 164 ++-- .../health_check_controller_impl.py | 4 +- .../inference_service_controller_impl.py | 67 +- .../model_service_controller_impl.py | 245 ++++-- .../notebook_service_controller_impl.py | 289 ++++--- .../pipeline_service_controller_impl.py | 264 ++++-- .../data_access/minio_client.py | 92 ++- .../data_access/mysql_client.py | 257 ++++-- api/server/swagger_server/encoder.py | 2 +- .../gateways/kfserving_client.py | 32 +- .../gateways/kubeflow_pipeline_service.py | 581 +++++++++---- .../gateways/kubernetes_service.py | 52 +- api/server/swagger_server/models/__init__.py | 26 +- api/server/swagger_server/models/any_value.py | 14 +- .../swagger_server/models/api_access_token.py | 24 +- api/server/swagger_server/models/api_asset.py | 56 +- .../models/api_catalog_upload.py | 38 +- .../models/api_catalog_upload_error.py | 32 +- .../models/api_catalog_upload_item.py | 20 +- .../models/api_catalog_upload_response.py | 67 +- .../swagger_server/models/api_component.py | 66 +- .../swagger_server/models/api_credential.py | 46 +- .../swagger_server/models/api_dataset.py | 86 +- .../models/api_generate_code_response.py | 14 +- .../api_generate_model_code_response.py | 14 +- .../models/api_get_template_response.py | 20 +- .../models/api_inferenceservice.py | 36 +- .../models/api_list_catalog_items_response.py | 43 +- .../models/api_list_catalog_upload_errors.py | 17 +- .../models/api_list_components_response.py | 23 +- .../models/api_list_credentials_response.py | 23 +- .../models/api_list_datasets_response.py | 23 +- .../api_list_inferenceservices_response.py | 27 +- .../models/api_list_models_response.py | 23 +- .../models/api_list_notebooks_response.py | 23 +- .../models/api_list_pipelines_response.py | 27 +- .../swagger_server/models/api_metadata.py | 23 +- api/server/swagger_server/models/api_model.py | 119 ++- .../models/api_model_framework.py | 31 +- .../models/api_model_framework_runtimes.py | 16 +- .../swagger_server/models/api_model_script.py | 35 +- .../swagger_server/models/api_notebook.py | 75 +- .../swagger_server/models/api_parameter.py | 32 +- .../swagger_server/models/api_pipeline.py | 54 +- .../models/api_pipeline_custom.py | 40 +- .../models/api_pipeline_custom_run_payload.py | 22 +- .../swagger_server/models/api_pipeline_dag.py | 14 +- .../models/api_pipeline_extended.py | 75 +- .../models/api_pipeline_extension.py | 28 +- .../models/api_pipeline_inputs.py | 14 +- .../models/api_pipeline_task.py | 45 +- .../models/api_pipeline_task_arguments.py | 14 +- .../models/api_run_code_response.py | 21 +- .../swagger_server/models/api_settings.py | 22 +- .../models/api_settings_section.py | 27 +- .../swagger_server/models/api_status.py | 22 +- api/server/swagger_server/models/api_url.py | 14 +- .../swagger_server/models/base_model_.py | 32 +- .../swagger_server/models/dictionary.py | 14 +- .../swagger_server/models/protobuf_any.py | 30 +- api/server/swagger_server/test/__init__.py | 9 +- .../test_application_settings_controller.py | 55 +- .../test/test_catalog_service_controller.py | 55 +- .../test/test_component_service_controller.py | 212 +++-- .../test_credential_service_controller.py | 54 +- .../test/test_dataset_service_controller.py | 185 ++--- .../test/test_health_check_controller.py | 24 +- .../test/test_inference_service_controller.py | 46 +- .../test/test_model_service_controller.py | 213 +++-- .../test/test_notebook_service_controller.py | 208 ++--- .../test/test_pipeline_service_controller.py | 163 ++-- api/server/swagger_server/util.py | 64 +- bootstrapper/start.py | 196 +++-- .../src/components/Detail/DatasetDetail.tsx | 2 +- .../src/components/Detail/KFServingDetail.tsx | 2 +- .../src/components/Detail/PipelineDetail.tsx | 2 +- .../src/components/UploadButton.tsx | 2 +- dashboard/origin-mlx/src/lib/api/artifacts.ts | 2 +- .../src/pages/KFServingDetailPage.tsx | 2 +- .../src/pages/KFServingUploadPage.tsx | 2 +- .../origin-mlx/src/pages/MetaDeletePage.tsx | 2 +- temp_run.sh | 13 + .../python/regenerate_catalog_upload_json.py | 31 +- tools/python/update_doc_table.py | 28 +- tools/python/verify_doc_links.py | 121 ++- tools/python/verify_npm_packages.py | 21 +- 251 files changed, 9795 insertions(+), 7135 deletions(-) create mode 100644 temp_run.sh diff --git a/api/client/README.md b/api/client/README.md index e9829a6e..e906d13c 100644 --- a/api/client/README.md +++ b/api/client/README.md @@ -32,7 +32,7 @@ pip install "git+https://github.com/machine-learning-exchange/mlx.git@main#egg=m Then import the package: ```python -import swagger_client +import swagger_client # noqa: F401 ``` @@ -43,9 +43,9 @@ Please follow the [installation procedure](#installation--usage) and then run th ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() diff --git a/api/client/docs/ApplicationSettingsApi.md b/api/client/docs/ApplicationSettingsApi.md index 12773b06..4f92d3ad 100644 --- a/api/client/docs/ApplicationSettingsApi.md +++ b/api/client/docs/ApplicationSettingsApi.md @@ -20,9 +20,9 @@ Returns the application settings. ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ApplicationSettingsApi() @@ -63,9 +63,9 @@ Modify one or more of the application settings. ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ApplicationSettingsApi() @@ -110,9 +110,9 @@ Set and store the application settings. ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ApplicationSettingsApi() diff --git a/api/client/docs/CatalogServiceApi.md b/api/client/docs/CatalogServiceApi.md index 24aa7733..0c118cc6 100644 --- a/api/client/docs/CatalogServiceApi.md +++ b/api/client/docs/CatalogServiceApi.md @@ -18,9 +18,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CatalogServiceApi() @@ -69,9 +69,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CatalogServiceApi() @@ -116,9 +116,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CatalogServiceApi() diff --git a/api/client/docs/ComponentServiceApi.md b/api/client/docs/ComponentServiceApi.md index 758aec83..d935cade 100644 --- a/api/client/docs/ComponentServiceApi.md +++ b/api/client/docs/ComponentServiceApi.md @@ -28,9 +28,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -72,9 +72,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -116,9 +116,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -159,9 +159,9 @@ Returns the component artifacts compressed into a .tgz (.tar.gz) file. ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from urllib3.response import HTTPResponse +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from urllib3.response import HTTPResponse # noqa: F401 # create an instance of the API class @@ -214,9 +214,9 @@ Generate sample code to use component in a pipeline ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -258,9 +258,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -302,9 +302,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -346,9 +346,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -396,9 +396,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -444,9 +444,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -487,9 +487,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -533,9 +533,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() @@ -579,9 +579,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ComponentServiceApi() diff --git a/api/client/docs/CredentialServiceApi.md b/api/client/docs/CredentialServiceApi.md index 1548db36..a63c8146 100644 --- a/api/client/docs/CredentialServiceApi.md +++ b/api/client/docs/CredentialServiceApi.md @@ -21,9 +21,9 @@ Creates a credential associated with a pipeline. ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CredentialServiceApi() @@ -66,9 +66,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CredentialServiceApi() @@ -110,9 +110,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CredentialServiceApi() @@ -155,9 +155,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.CredentialServiceApi() diff --git a/api/client/docs/DatasetServiceApi.md b/api/client/docs/DatasetServiceApi.md index 70b7f6c7..ebdcf128 100644 --- a/api/client/docs/DatasetServiceApi.md +++ b/api/client/docs/DatasetServiceApi.md @@ -28,8 +28,8 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -71,9 +71,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -116,8 +116,8 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -157,9 +157,9 @@ Returns the dataset artifacts compressed into a .tgz (.tar.gz) file. ### Example ```python -import swagger_client -from swagger_client.rest import ApiException -from urllib3.response import HTTPResponse +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from urllib3.response import HTTPResponse # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -212,9 +212,9 @@ Generate sample code to use dataset in a pipeline ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -257,9 +257,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -302,9 +302,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -347,9 +347,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -398,9 +398,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -447,9 +447,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -491,9 +491,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -537,9 +537,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() @@ -584,9 +584,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.DatasetServiceApi() diff --git a/api/client/docs/HealthCheckApi.md b/api/client/docs/HealthCheckApi.md index 717c6004..9a44080b 100644 --- a/api/client/docs/HealthCheckApi.md +++ b/api/client/docs/HealthCheckApi.md @@ -16,9 +16,9 @@ Checks if the server is running ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.HealthCheckApi() diff --git a/api/client/docs/InferenceServiceApi.md b/api/client/docs/InferenceServiceApi.md index 877add02..c0cacd8e 100644 --- a/api/client/docs/InferenceServiceApi.md +++ b/api/client/docs/InferenceServiceApi.md @@ -19,9 +19,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.InferenceServiceApi() @@ -66,9 +66,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.InferenceServiceApi() @@ -113,9 +113,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.InferenceServiceApi() @@ -166,9 +166,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.InferenceServiceApi() diff --git a/api/client/docs/ModelServiceApi.md b/api/client/docs/ModelServiceApi.md index 6f8a4c32..917eb3a3 100644 --- a/api/client/docs/ModelServiceApi.md +++ b/api/client/docs/ModelServiceApi.md @@ -28,9 +28,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -72,9 +72,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -117,9 +117,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -160,9 +160,9 @@ Returns the model artifacts compressed into a .tgz (.tar.gz) file. ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from urllib3.response import HTTPResponse +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from urllib3.response import HTTPResponse # noqa: F401 # create an instance of the API class @@ -213,9 +213,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -257,9 +257,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -301,9 +301,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -345,9 +345,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -395,9 +395,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -447,8 +447,8 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -489,9 +489,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -536,9 +536,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() @@ -582,9 +582,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.ModelServiceApi() diff --git a/api/client/docs/NotebookServiceApi.md b/api/client/docs/NotebookServiceApi.md index d2abaa9c..59f964c0 100644 --- a/api/client/docs/NotebookServiceApi.md +++ b/api/client/docs/NotebookServiceApi.md @@ -28,9 +28,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -72,9 +72,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -117,9 +117,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -160,9 +160,9 @@ Returns the notebook artifacts compressed into a .tgz (.tar.gz) file. ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from urllib3.response import HTTPResponse +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from urllib3.response import HTTPResponse # noqa: F401 # create an instance of the API class @@ -215,9 +215,9 @@ Generate sample code to use notebook in a pipeline ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -259,9 +259,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -303,9 +303,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -347,9 +347,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -397,9 +397,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -445,9 +445,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -488,9 +488,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -537,9 +537,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() @@ -583,9 +583,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.NotebookServiceApi() diff --git a/api/client/docs/PipelineServiceApi.md b/api/client/docs/PipelineServiceApi.md index 2999cdb0..9c973a0a 100644 --- a/api/client/docs/PipelineServiceApi.md +++ b/api/client/docs/PipelineServiceApi.md @@ -27,9 +27,9 @@ Method | HTTP request | Description ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -71,9 +71,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -116,8 +116,8 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -158,9 +158,9 @@ Returns the pipeline YAML compressed into a .tgz (.tar.gz) file. ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from urllib3.response import HTTPResponse +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from urllib3.response import HTTPResponse # noqa: F401 # create an instance of the API class @@ -208,9 +208,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -253,9 +253,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -298,9 +298,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -351,9 +351,9 @@ Run a complex pipeline defined by a directed acyclic graph (DAG) ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -398,9 +398,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -447,9 +447,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -491,9 +491,9 @@ No authorization required ```python from __future__ import print_function import time -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() @@ -541,9 +541,9 @@ No authorization required ### Example ```python from __future__ import print_function -import swagger_client -from swagger_client.rest import ApiException -from pprint import pprint +import swagger_client # noqa: F401 +from swagger_client.rest import ApiException # noqa: F401 +from pprint import pprint # noqa: F401 # create an instance of the API class api_instance = swagger_client.PipelineServiceApi() diff --git a/api/client/setup.py b/api/client/setup.py index 78a12b14..8fba4f4b 100644 --- a/api/client/setup.py +++ b/api/client/setup.py @@ -26,13 +26,8 @@ # prerequisite: setuptools # http://pypi.python.org/pypi/setuptools -REQUIRES = [ - "certifi>=2017.4.17", - "python-dateutil>=2.1", - "six>=1.10", - "urllib3>=1.23" -] - +REQUIRES = ["certifi>=2017.4.17", "python-dateutil>=2.1", "six>=1.10", "urllib3>=1.23"] + setup( name=NAME, @@ -47,5 +42,5 @@ include_package_data=True, long_description="""\ Machine Learning Exchange API Client - """ + """, ) diff --git a/api/client/swagger_client/__init__.py b/api/client/swagger_client/__init__.py index 223c01d6..688eecbb 100644 --- a/api/client/swagger_client/__init__.py +++ b/api/client/swagger_client/__init__.py @@ -33,6 +33,7 @@ # import ApiClient from swagger_client.api_client import ApiClient from swagger_client.configuration import Configuration + # import models into sdk package from swagger_client.models.any_value import AnyValue from swagger_client.models.api_access_token import ApiAccessToken @@ -41,15 +42,25 @@ from swagger_client.models.api_catalog_upload_item import ApiCatalogUploadItem from swagger_client.models.api_credential import ApiCredential from swagger_client.models.api_generate_code_response import ApiGenerateCodeResponse -from swagger_client.models.api_generate_model_code_response import ApiGenerateModelCodeResponse +from swagger_client.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) from swagger_client.models.api_get_template_response import ApiGetTemplateResponse from swagger_client.models.api_inferenceservice import ApiInferenceservice -from swagger_client.models.api_list_catalog_items_response import ApiListCatalogItemsResponse -from swagger_client.models.api_list_catalog_upload_errors import ApiListCatalogUploadErrors +from swagger_client.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_client.models.api_list_catalog_upload_errors import ( # noqa: F401 + ApiListCatalogUploadErrors, +) from swagger_client.models.api_list_components_response import ApiListComponentsResponse -from swagger_client.models.api_list_credentials_response import ApiListCredentialsResponse +from swagger_client.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) from swagger_client.models.api_list_datasets_response import ApiListDatasetsResponse -from swagger_client.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse +from swagger_client.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) from swagger_client.models.api_list_models_response import ApiListModelsResponse from swagger_client.models.api_list_notebooks_response import ApiListNotebooksResponse from swagger_client.models.api_list_pipelines_response import ApiListPipelinesResponse @@ -60,7 +71,9 @@ from swagger_client.models.api_parameter import ApiParameter from swagger_client.models.api_pipeline import ApiPipeline from swagger_client.models.api_pipeline_custom import ApiPipelineCustom -from swagger_client.models.api_pipeline_custom_run_payload import ApiPipelineCustomRunPayload +from swagger_client.models.api_pipeline_custom_run_payload import ( # noqa: F401 + ApiPipelineCustomRunPayload, +) from swagger_client.models.api_pipeline_dag import ApiPipelineDAG from swagger_client.models.api_pipeline_extension import ApiPipelineExtension from swagger_client.models.api_pipeline_inputs import ApiPipelineInputs @@ -68,7 +81,7 @@ from swagger_client.models.api_pipeline_task_arguments import ApiPipelineTaskArguments from swagger_client.models.api_run_code_response import ApiRunCodeResponse from swagger_client.models.api_settings import ApiSettings -from swagger_client.models.api_settings_section import ApiSettingsSection +from swagger_client.models.api_settings_section import ApiSettingsSection # noqa: F401 from swagger_client.models.api_status import ApiStatus from swagger_client.models.api_url import ApiUrl from swagger_client.models.dictionary import Dictionary diff --git a/api/client/swagger_client/api/application_settings_api.py b/api/client/swagger_client/api/application_settings_api.py index 4e312b6e..0271a59d 100644 --- a/api/client/swagger_client/api/application_settings_api.py +++ b/api/client/swagger_client/api/application_settings_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,11 +50,13 @@ def get_application_settings(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_application_settings_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_application_settings_with_http_info(**kwargs) else: - (data) = self.get_application_settings_with_http_info(**kwargs) # noqa: E501 + (data) = self.get_application_settings_with_http_info( + **kwargs + ) return data def get_application_settings_with_http_info(self, **kwargs): # noqa: E501 @@ -73,20 +75,20 @@ def get_application_settings_with_http_info(self, **kwargs): # noqa: E501 """ all_params = [] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_application_settings" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} @@ -101,27 +103,30 @@ def get_application_settings_with_http_info(self, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/settings', 'GET', + "/settings", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiSettings', # noqa: E501 + response_type="ApiSettings", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def modify_application_settings(self, dictionary, **kwargs): # noqa: E501 """modify_application_settings # noqa: E501 @@ -138,14 +143,20 @@ def modify_application_settings(self, dictionary, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.modify_application_settings_with_http_info(dictionary, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.modify_application_settings_with_http_info( + dictionary, **kwargs + ) else: - (data) = self.modify_application_settings_with_http_info(dictionary, **kwargs) # noqa: E501 + (data) = self.modify_application_settings_with_http_info( + dictionary, **kwargs + ) return data - def modify_application_settings_with_http_info(self, dictionary, **kwargs): # noqa: E501 + def modify_application_settings_with_http_info( + self, dictionary, **kwargs + ): # noqa: E501 """modify_application_settings # noqa: E501 Modify one or more of the application settings. # noqa: E501 @@ -161,25 +172,26 @@ def modify_application_settings_with_http_info(self, dictionary, **kwargs): # n returns the request thread. """ - all_params = ['dictionary'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["dictionary"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method modify_application_settings" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'dictionary' is set - if ('dictionary' not in params or - params['dictionary'] is None): - raise ValueError("Missing the required parameter `dictionary` when calling `modify_application_settings`") # noqa: E501 + if "dictionary" not in params or params["dictionary"] is None: + raise ValueError( + "Missing the required parameter `dictionary` when calling `modify_application_settings`" + ) collection_formats = {} @@ -193,34 +205,40 @@ def modify_application_settings_with_http_info(self, dictionary, **kwargs): # n local_var_files = {} body_params = None - if 'dictionary' in params: - body_params = params['dictionary'] + if "dictionary" in params: + body_params = params["dictionary"] # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/settings', 'PUT', + "/settings", + "PUT", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiSettings', # noqa: E501 + response_type="ApiSettings", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_application_settings(self, settings, **kwargs): # noqa: E501 """set_application_settings # noqa: E501 @@ -237,11 +255,15 @@ def set_application_settings(self, settings, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_application_settings_with_http_info(settings, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_application_settings_with_http_info( + settings, **kwargs + ) else: - (data) = self.set_application_settings_with_http_info(settings, **kwargs) # noqa: E501 + (data) = self.set_application_settings_with_http_info( + settings, **kwargs + ) return data def set_application_settings_with_http_info(self, settings, **kwargs): # noqa: E501 @@ -260,25 +282,26 @@ def set_application_settings_with_http_info(self, settings, **kwargs): # noqa: returns the request thread. """ - all_params = ['settings'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["settings"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_application_settings" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'settings' is set - if ('settings' not in params or - params['settings'] is None): - raise ValueError("Missing the required parameter `settings` when calling `set_application_settings`") # noqa: E501 + if "settings" not in params or params["settings"] is None: + raise ValueError( + "Missing the required parameter `settings` when calling `set_application_settings`" + ) collection_formats = {} @@ -292,31 +315,37 @@ def set_application_settings_with_http_info(self, settings, **kwargs): # noqa: local_var_files = {} body_params = None - if 'settings' in params: - body_params = params['settings'] + if "settings" in params: + body_params = params["settings"] # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/settings', 'POST', + "/settings", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiSettings', # noqa: E501 + response_type="ApiSettings", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/catalog_service_api.py b/api/client/swagger_client/api/catalog_service_api.py index 5a5590f0..35bdaf28 100644 --- a/api/client/swagger_client/api/catalog_service_api.py +++ b/api/client/swagger_client/api/catalog_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -53,11 +53,11 @@ def list_all_assets(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_all_assets_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_all_assets_with_http_info(**kwargs) else: - (data) = self.list_all_assets_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_all_assets_with_http_info(**kwargs) return data def list_all_assets_with_http_info(self, **kwargs): # noqa: E501 @@ -78,35 +78,35 @@ def list_all_assets_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_all_assets" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -118,20 +118,22 @@ def list_all_assets_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/catalog', 'GET', + "/catalog", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListCatalogItemsResponse', # noqa: E501 + response_type="ApiListCatalogItemsResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_catalog_from_url(self, url, **kwargs): # noqa: E501 """upload_catalog_from_url # noqa: E501 @@ -148,11 +150,15 @@ def upload_catalog_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_catalog_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_catalog_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_catalog_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_catalog_from_url_with_http_info( + url, **kwargs + ) return data def upload_catalog_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -171,25 +177,26 @@ def upload_catalog_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_catalog_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_catalog_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_catalog_from_url`" + ) collection_formats = {} @@ -201,38 +208,44 @@ def upload_catalog_from_url_with_http_info(self, url, **kwargs): # noqa: E501 form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/catalog/upload_from_url', 'POST', + "/catalog/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiCatalogUploadResponse', # noqa: E501 + response_type="ApiCatalogUploadResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_multiple_assets(self, body, **kwargs): # noqa: E501 """upload_multiple_assets # noqa: E501 @@ -248,11 +261,15 @@ def upload_multiple_assets(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_multiple_assets_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_multiple_assets_with_http_info( + body, **kwargs + ) else: - (data) = self.upload_multiple_assets_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.upload_multiple_assets_with_http_info( + body, **kwargs + ) return data def upload_multiple_assets_with_http_info(self, body, **kwargs): # noqa: E501 @@ -270,25 +287,26 @@ def upload_multiple_assets_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_multiple_assets" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `upload_multiple_assets`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `upload_multiple_assets`" + ) collection_formats = {} @@ -302,23 +320,25 @@ def upload_multiple_assets_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/catalog', 'POST', + "/catalog", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiCatalogUploadResponse', # noqa: E501 + response_type="ApiCatalogUploadResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/component_service_api.py b/api/client/swagger_client/api/component_service_api.py index 94b05bb6..86be65a0 100644 --- a/api/client/swagger_client/api/component_service_api.py +++ b/api/client/swagger_client/api/component_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,14 +50,20 @@ def approve_components_for_publishing(self, component_ids, **kwargs): # noqa: E If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.approve_components_for_publishing_with_http_info(component_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.approve_components_for_publishing_with_http_info( + component_ids, **kwargs + ) else: - (data) = self.approve_components_for_publishing_with_http_info(component_ids, **kwargs) # noqa: E501 + (data) = self.approve_components_for_publishing_with_http_info( + component_ids, **kwargs + ) return data - def approve_components_for_publishing_with_http_info(self, component_ids, **kwargs): # noqa: E501 + def approve_components_for_publishing_with_http_info( + self, component_ids, **kwargs + ): # noqa: E501 """approve_components_for_publishing # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -72,25 +78,26 @@ def approve_components_for_publishing_with_http_info(self, component_ids, **kwar returns the request thread. """ - all_params = ['component_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["component_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method approve_components_for_publishing" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'component_ids' is set - if ('component_ids' not in params or - params['component_ids'] is None): - raise ValueError("Missing the required parameter `component_ids` when calling `approve_components_for_publishing`") # noqa: E501 + if "component_ids" not in params or params["component_ids"] is None: + raise ValueError( + "Missing the required parameter `component_ids` when calling `approve_components_for_publishing`" + ) collection_formats = {} @@ -104,13 +111,14 @@ def approve_components_for_publishing_with_http_info(self, component_ids, **kwar local_var_files = {} body_params = None - if 'component_ids' in params: - body_params = params['component_ids'] + if "component_ids" in params: + body_params = params["component_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/publish_approved', 'POST', + "/components/publish_approved", + "POST", path_params, query_params, header_params, @@ -119,11 +127,12 @@ def approve_components_for_publishing_with_http_info(self, component_ids, **kwar files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def create_component(self, body, **kwargs): # noqa: E501 """create_component # noqa: E501 @@ -139,11 +148,11 @@ def create_component(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_component_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_component_with_http_info(body, **kwargs) else: - (data) = self.create_component_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_component_with_http_info(body, **kwargs) return data def create_component_with_http_info(self, body, **kwargs): # noqa: E501 @@ -161,25 +170,26 @@ def create_component_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_component" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_component`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_component`" + ) collection_formats = {} @@ -193,26 +203,28 @@ def create_component_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components', 'POST', + "/components", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiComponent', # noqa: E501 + response_type="ApiComponent", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_component(self, id, **kwargs): # noqa: E501 """delete_component # noqa: E501 @@ -228,11 +240,11 @@ def delete_component(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_component_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_component_with_http_info(id, **kwargs) else: - (data) = self.delete_component_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_component_with_http_info(id, **kwargs) return data def delete_component_with_http_info(self, id, **kwargs): # noqa: E501 @@ -250,31 +262,32 @@ def delete_component_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_component" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_component`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_component`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -288,7 +301,8 @@ def delete_component_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}', 'DELETE', + "/components/{id}", + "DELETE", path_params, query_params, header_params, @@ -297,11 +311,12 @@ def delete_component_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def download_component_files(self, id, **kwargs): # noqa: E501 """Returns the component artifacts compressed into a .tgz (.tar.gz) file. # noqa: E501 @@ -318,11 +333,15 @@ def download_component_files(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.download_component_files_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.download_component_files_with_http_info( + id, **kwargs + ) else: - (data) = self.download_component_files_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.download_component_files_with_http_info( + id, **kwargs + ) return data def download_component_files_with_http_info(self, id, **kwargs): # noqa: E501 @@ -341,35 +360,38 @@ def download_component_files_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'include_generated_code'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "include_generated_code"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method download_component_files" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `download_component_files`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `download_component_files`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'include_generated_code' in params: - query_params.append(('include_generated_code', params['include_generated_code'])) # noqa: E501 + if "include_generated_code" in params: + query_params.append( + ("include_generated_code", params["include_generated_code"]) + ) header_params = {} @@ -378,27 +400,30 @@ def download_component_files_with_http_info(self, id, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/gzip']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/gzip"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}/download', 'GET', + "/components/{id}/download", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='file', # noqa: E501 + response_type="file", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', False), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", False), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def generate_component_code(self, id, **kwargs): # noqa: E501 """generate_component_code # noqa: E501 @@ -415,11 +440,15 @@ def generate_component_code(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.generate_component_code_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.generate_component_code_with_http_info( + id, **kwargs + ) else: - (data) = self.generate_component_code_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.generate_component_code_with_http_info( + id, **kwargs + ) return data def generate_component_code_with_http_info(self, id, **kwargs): # noqa: E501 @@ -438,31 +467,32 @@ def generate_component_code_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method generate_component_code" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `generate_component_code`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `generate_component_code`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -476,20 +506,22 @@ def generate_component_code_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}/generate_code', 'GET', + "/components/{id}/generate_code", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGenerateCodeResponse', # noqa: E501 + response_type="ApiGenerateCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_component(self, id, **kwargs): # noqa: E501 """get_component # noqa: E501 @@ -505,11 +537,11 @@ def get_component(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_component_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_component_with_http_info(id, **kwargs) else: - (data) = self.get_component_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_component_with_http_info(id, **kwargs) return data def get_component_with_http_info(self, id, **kwargs): # noqa: E501 @@ -527,31 +559,32 @@ def get_component_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_component" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_component`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_component`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -565,20 +598,22 @@ def get_component_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}', 'GET', + "/components/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiComponent', # noqa: E501 + response_type="ApiComponent", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_component_template(self, id, **kwargs): # noqa: E501 """get_component_template # noqa: E501 @@ -594,11 +629,15 @@ def get_component_template(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_component_template_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_component_template_with_http_info( + id, **kwargs + ) else: - (data) = self.get_component_template_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_component_template_with_http_info( + id, **kwargs + ) return data def get_component_template_with_http_info(self, id, **kwargs): # noqa: E501 @@ -616,31 +655,32 @@ def get_component_template_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_component_template" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_component_template`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_component_template`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -654,20 +694,22 @@ def get_component_template_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}/templates', 'GET', + "/components/{id}/templates", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGetTemplateResponse', # noqa: E501 + response_type="ApiGetTemplateResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_components(self, **kwargs): # noqa: E501 """list_components # noqa: E501 @@ -686,11 +728,11 @@ def list_components(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_components_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_components_with_http_info(**kwargs) else: - (data) = self.list_components_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_components_with_http_info(**kwargs) return data def list_components_with_http_info(self, **kwargs): # noqa: E501 @@ -711,35 +753,35 @@ def list_components_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_components" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -751,20 +793,22 @@ def list_components_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components', 'GET', + "/components", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListComponentsResponse', # noqa: E501 + response_type="ApiListComponentsResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_component(self, id, parameters, **kwargs): # noqa: E501 """run_component # noqa: E501 @@ -782,11 +826,15 @@ def run_component(self, id, parameters, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_component_with_http_info(id, parameters, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_component_with_http_info( + id, parameters, **kwargs + ) else: - (data) = self.run_component_with_http_info(id, parameters, **kwargs) # noqa: E501 + (data) = self.run_component_with_http_info( + id, parameters, **kwargs + ) return data def run_component_with_http_info(self, id, parameters, **kwargs): # noqa: E501 @@ -806,39 +854,41 @@ def run_component_with_http_info(self, id, parameters, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'parameters', 'run_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "parameters", "run_name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_component" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `run_component`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `run_component`" + ) # verify the required parameter 'parameters' is set - if ('parameters' not in params or - params['parameters'] is None): - raise ValueError("Missing the required parameter `parameters` when calling `run_component`") # noqa: E501 + if "parameters" not in params or params["parameters"] is None: + raise ValueError( + "Missing the required parameter `parameters` when calling `run_component`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -846,26 +896,28 @@ def run_component_with_http_info(self, id, parameters, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'parameters' in params: - body_params = params['parameters'] + if "parameters" in params: + body_params = params["parameters"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}/run', 'POST', + "/components/{id}/run", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_featured_components(self, component_ids, **kwargs): # noqa: E501 """set_featured_components # noqa: E501 @@ -881,14 +933,20 @@ def set_featured_components(self, component_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_featured_components_with_http_info(component_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_featured_components_with_http_info( + component_ids, **kwargs + ) else: - (data) = self.set_featured_components_with_http_info(component_ids, **kwargs) # noqa: E501 + (data) = self.set_featured_components_with_http_info( + component_ids, **kwargs + ) return data - def set_featured_components_with_http_info(self, component_ids, **kwargs): # noqa: E501 + def set_featured_components_with_http_info( + self, component_ids, **kwargs + ): # noqa: E501 """set_featured_components # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -903,25 +961,26 @@ def set_featured_components_with_http_info(self, component_ids, **kwargs): # no returns the request thread. """ - all_params = ['component_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["component_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_featured_components" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'component_ids' is set - if ('component_ids' not in params or - params['component_ids'] is None): - raise ValueError("Missing the required parameter `component_ids` when calling `set_featured_components`") # noqa: E501 + if "component_ids" not in params or params["component_ids"] is None: + raise ValueError( + "Missing the required parameter `component_ids` when calling `set_featured_components`" + ) collection_formats = {} @@ -935,13 +994,14 @@ def set_featured_components_with_http_info(self, component_ids, **kwargs): # no local_var_files = {} body_params = None - if 'component_ids' in params: - body_params = params['component_ids'] + if "component_ids" in params: + body_params = params["component_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/featured', 'POST', + "/components/featured", + "POST", path_params, query_params, header_params, @@ -950,11 +1010,12 @@ def set_featured_components_with_http_info(self, component_ids, **kwargs): # no files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_component(self, uploadfile, **kwargs): # noqa: E501 """upload_component # noqa: E501 @@ -971,11 +1032,15 @@ def upload_component(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_component_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_component_with_http_info( + uploadfile, **kwargs + ) else: - (data) = self.upload_component_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_component_with_http_info( + uploadfile, **kwargs + ) return data def upload_component_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -994,68 +1059,75 @@ def upload_component_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_component" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_component`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_component`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/upload', 'POST', + "/components/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiComponent', # noqa: E501 + response_type="ApiComponent", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_component_file(self, id, uploadfile, **kwargs): # noqa: E501 """upload_component_file # noqa: E501 @@ -1072,14 +1144,20 @@ def upload_component_file(self, id, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_component_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_component_file_with_http_info( + id, uploadfile, **kwargs + ) else: - (data) = self.upload_component_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_component_file_with_http_info( + id, uploadfile, **kwargs + ) return data - def upload_component_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E501 + def upload_component_file_with_http_info( + self, id, uploadfile, **kwargs + ): # noqa: E501 """upload_component_file # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -1095,35 +1173,37 @@ def upload_component_file_with_http_info(self, id, uploadfile, **kwargs): # noq returns the request thread. """ - all_params = ['id', 'uploadfile'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "uploadfile"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_component_file" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `upload_component_file`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `upload_component_file`" + ) # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_component_file`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_component_file`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -1131,36 +1211,42 @@ def upload_component_file_with_http_info(self, id, uploadfile, **kwargs): # noq form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/{id}/upload', 'POST', + "/components/{id}/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiComponent', # noqa: E501 + response_type="ApiComponent", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_component_from_url(self, url, **kwargs): # noqa: E501 """upload_component_from_url # noqa: E501 @@ -1178,11 +1264,15 @@ def upload_component_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_component_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_component_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_component_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_component_from_url_with_http_info( + url, **kwargs + ) return data def upload_component_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -1202,67 +1292,74 @@ def upload_component_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'name', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "name", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_component_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_component_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_component_from_url`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/components/upload_from_url', 'POST', + "/components/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiComponent', # noqa: E501 + response_type="ApiComponent", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/credential_service_api.py b/api/client/swagger_client/api/credential_service_api.py index 8f9d60d8..a289c52b 100644 --- a/api/client/swagger_client/api/credential_service_api.py +++ b/api/client/swagger_client/api/credential_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -51,11 +51,11 @@ def create_credential(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_credential_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_credential_with_http_info(body, **kwargs) else: - (data) = self.create_credential_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_credential_with_http_info(body, **kwargs) return data def create_credential_with_http_info(self, body, **kwargs): # noqa: E501 @@ -74,25 +74,26 @@ def create_credential_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_credential" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_credential`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_credential`" + ) collection_formats = {} @@ -106,30 +107,35 @@ def create_credential_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/credentials', 'POST', + "/credentials", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiCredential', # noqa: E501 + response_type="ApiCredential", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_credential(self, id, **kwargs): # noqa: E501 """delete_credential # noqa: E501 @@ -145,11 +151,11 @@ def delete_credential(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_credential_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_credential_with_http_info(id, **kwargs) else: - (data) = self.delete_credential_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_credential_with_http_info(id, **kwargs) return data def delete_credential_with_http_info(self, id, **kwargs): # noqa: E501 @@ -167,31 +173,32 @@ def delete_credential_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_credential" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_credential`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_credential`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -205,7 +212,8 @@ def delete_credential_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/credentials/{id}', 'DELETE', + "/credentials/{id}", + "DELETE", path_params, query_params, header_params, @@ -214,11 +222,12 @@ def delete_credential_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_credential(self, id, **kwargs): # noqa: E501 """get_credential # noqa: E501 @@ -234,11 +243,11 @@ def get_credential(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_credential_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_credential_with_http_info(id, **kwargs) else: - (data) = self.get_credential_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_credential_with_http_info(id, **kwargs) return data def get_credential_with_http_info(self, id, **kwargs): # noqa: E501 @@ -256,31 +265,32 @@ def get_credential_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_credential" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_credential`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_credential`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -294,20 +304,22 @@ def get_credential_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/credentials/{id}', 'GET', + "/credentials/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiCredential', # noqa: E501 + response_type="ApiCredential", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_credentials(self, **kwargs): # noqa: E501 """list_credentials # noqa: E501 @@ -326,11 +338,11 @@ def list_credentials(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_credentials_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_credentials_with_http_info(**kwargs) else: - (data) = self.list_credentials_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_credentials_with_http_info(**kwargs) return data def list_credentials_with_http_info(self, **kwargs): # noqa: E501 @@ -351,35 +363,35 @@ def list_credentials_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_credentials" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -391,17 +403,19 @@ def list_credentials_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/credentials', 'GET', + "/credentials", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListCredentialsResponse', # noqa: E501 + response_type="ApiListCredentialsResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/dataset_service_api.py b/api/client/swagger_client/api/dataset_service_api.py index b23151e1..9d5d944a 100644 --- a/api/client/swagger_client/api/dataset_service_api.py +++ b/api/client/swagger_client/api/dataset_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,14 +50,20 @@ def approve_datasets_for_publishing(self, dataset_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.approve_datasets_for_publishing_with_http_info(dataset_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.approve_datasets_for_publishing_with_http_info( + dataset_ids, **kwargs + ) else: - (data) = self.approve_datasets_for_publishing_with_http_info(dataset_ids, **kwargs) # noqa: E501 + (data) = self.approve_datasets_for_publishing_with_http_info( + dataset_ids, **kwargs + ) return data - def approve_datasets_for_publishing_with_http_info(self, dataset_ids, **kwargs): # noqa: E501 + def approve_datasets_for_publishing_with_http_info( + self, dataset_ids, **kwargs + ): # noqa: E501 """approve_datasets_for_publishing # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -72,25 +78,26 @@ def approve_datasets_for_publishing_with_http_info(self, dataset_ids, **kwargs): returns the request thread. """ - all_params = ['dataset_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["dataset_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method approve_datasets_for_publishing" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'dataset_ids' is set - if ('dataset_ids' not in params or - params['dataset_ids'] is None): - raise ValueError("Missing the required parameter `dataset_ids` when calling `approve_datasets_for_publishing`") # noqa: E501 + if "dataset_ids" not in params or params["dataset_ids"] is None: + raise ValueError( + "Missing the required parameter `dataset_ids` when calling `approve_datasets_for_publishing`" + ) collection_formats = {} @@ -104,13 +111,14 @@ def approve_datasets_for_publishing_with_http_info(self, dataset_ids, **kwargs): local_var_files = {} body_params = None - if 'dataset_ids' in params: - body_params = params['dataset_ids'] + if "dataset_ids" in params: + body_params = params["dataset_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/publish_approved', 'POST', + "/datasets/publish_approved", + "POST", path_params, query_params, header_params, @@ -119,11 +127,12 @@ def approve_datasets_for_publishing_with_http_info(self, dataset_ids, **kwargs): files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def create_dataset(self, body, **kwargs): # noqa: E501 """create_dataset # noqa: E501 @@ -139,11 +148,11 @@ def create_dataset(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_dataset_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_dataset_with_http_info(body, **kwargs) else: - (data) = self.create_dataset_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_dataset_with_http_info(body, **kwargs) return data def create_dataset_with_http_info(self, body, **kwargs): # noqa: E501 @@ -161,25 +170,26 @@ def create_dataset_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_dataset" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_dataset`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_dataset`" + ) collection_formats = {} @@ -193,26 +203,28 @@ def create_dataset_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets', 'POST', + "/datasets", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiDataset', # noqa: E501 + response_type="ApiDataset", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_dataset(self, id, **kwargs): # noqa: E501 """delete_dataset # noqa: E501 @@ -228,11 +240,11 @@ def delete_dataset(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_dataset_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_dataset_with_http_info(id, **kwargs) else: - (data) = self.delete_dataset_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_dataset_with_http_info(id, **kwargs) return data def delete_dataset_with_http_info(self, id, **kwargs): # noqa: E501 @@ -250,31 +262,32 @@ def delete_dataset_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_dataset" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_dataset`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_dataset`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -288,7 +301,8 @@ def delete_dataset_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}', 'DELETE', + "/datasets/{id}", + "DELETE", path_params, query_params, header_params, @@ -297,11 +311,12 @@ def delete_dataset_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def download_dataset_files(self, id, **kwargs): # noqa: E501 """Returns the dataset artifacts compressed into a .tgz (.tar.gz) file. # noqa: E501 @@ -318,11 +333,15 @@ def download_dataset_files(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.download_dataset_files_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.download_dataset_files_with_http_info( + id, **kwargs + ) else: - (data) = self.download_dataset_files_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.download_dataset_files_with_http_info( + id, **kwargs + ) return data def download_dataset_files_with_http_info(self, id, **kwargs): # noqa: E501 @@ -341,35 +360,38 @@ def download_dataset_files_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'include_generated_code'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "include_generated_code"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method download_dataset_files" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `download_dataset_files`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `download_dataset_files`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'include_generated_code' in params: - query_params.append(('include_generated_code', params['include_generated_code'])) # noqa: E501 + if "include_generated_code" in params: + query_params.append( + ("include_generated_code", params["include_generated_code"]) + ) header_params = {} @@ -378,27 +400,30 @@ def download_dataset_files_with_http_info(self, id, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/gzip']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/gzip"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}/download', 'GET', + "/datasets/{id}/download", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='file', # noqa: E501 + response_type="file", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', False), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", False), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def generate_dataset_code(self, id, **kwargs): # noqa: E501 """generate_dataset_code # noqa: E501 @@ -415,11 +440,13 @@ def generate_dataset_code(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.generate_dataset_code_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.generate_dataset_code_with_http_info(id, **kwargs) else: - (data) = self.generate_dataset_code_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.generate_dataset_code_with_http_info( + id, **kwargs + ) return data def generate_dataset_code_with_http_info(self, id, **kwargs): # noqa: E501 @@ -438,31 +465,32 @@ def generate_dataset_code_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method generate_dataset_code" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `generate_dataset_code`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `generate_dataset_code`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -476,20 +504,22 @@ def generate_dataset_code_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}/generate_code', 'GET', + "/datasets/{id}/generate_code", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGenerateCodeResponse', # noqa: E501 + response_type="ApiGenerateCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_dataset(self, id, **kwargs): # noqa: E501 """get_dataset # noqa: E501 @@ -505,11 +535,11 @@ def get_dataset(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_dataset_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_dataset_with_http_info(id, **kwargs) else: - (data) = self.get_dataset_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_dataset_with_http_info(id, **kwargs) return data def get_dataset_with_http_info(self, id, **kwargs): # noqa: E501 @@ -527,31 +557,32 @@ def get_dataset_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_dataset" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_dataset`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_dataset`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -565,20 +596,22 @@ def get_dataset_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}', 'GET', + "/datasets/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiDataset', # noqa: E501 + response_type="ApiDataset", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_dataset_template(self, id, **kwargs): # noqa: E501 """get_dataset_template # noqa: E501 @@ -594,11 +627,13 @@ def get_dataset_template(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_dataset_template_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_dataset_template_with_http_info(id, **kwargs) else: - (data) = self.get_dataset_template_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_dataset_template_with_http_info( + id, **kwargs + ) return data def get_dataset_template_with_http_info(self, id, **kwargs): # noqa: E501 @@ -616,31 +651,32 @@ def get_dataset_template_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_dataset_template" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_dataset_template`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_dataset_template`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -654,20 +690,22 @@ def get_dataset_template_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}/templates', 'GET', + "/datasets/{id}/templates", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGetTemplateResponse', # noqa: E501 + response_type="ApiGetTemplateResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_datasets(self, **kwargs): # noqa: E501 """list_datasets # noqa: E501 @@ -686,11 +724,11 @@ def list_datasets(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_datasets_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_datasets_with_http_info(**kwargs) else: - (data) = self.list_datasets_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_datasets_with_http_info(**kwargs) return data def list_datasets_with_http_info(self, **kwargs): # noqa: E501 @@ -711,35 +749,35 @@ def list_datasets_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_datasets" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -751,20 +789,22 @@ def list_datasets_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets', 'GET', + "/datasets", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListDatasetsResponse', # noqa: E501 + response_type="ApiListDatasetsResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_dataset(self, id, **kwargs): # noqa: E501 """run_dataset # noqa: E501 @@ -782,11 +822,11 @@ def run_dataset(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_dataset_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_dataset_with_http_info(id, **kwargs) else: - (data) = self.run_dataset_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.run_dataset_with_http_info(id, **kwargs) return data def run_dataset_with_http_info(self, id, **kwargs): # noqa: E501 @@ -806,35 +846,36 @@ def run_dataset_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'parameters', 'run_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "parameters", "run_name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_dataset" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `run_dataset`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `run_dataset`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -842,26 +883,28 @@ def run_dataset_with_http_info(self, id, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'parameters' in params: - body_params = params['parameters'] + if "parameters" in params: + body_params = params["parameters"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}/run', 'POST', + "/datasets/{id}/run", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_featured_datasets(self, dataset_ids, **kwargs): # noqa: E501 """set_featured_datasets # noqa: E501 @@ -877,11 +920,15 @@ def set_featured_datasets(self, dataset_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_featured_datasets_with_http_info(dataset_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_featured_datasets_with_http_info( + dataset_ids, **kwargs + ) else: - (data) = self.set_featured_datasets_with_http_info(dataset_ids, **kwargs) # noqa: E501 + (data) = self.set_featured_datasets_with_http_info( + dataset_ids, **kwargs + ) return data def set_featured_datasets_with_http_info(self, dataset_ids, **kwargs): # noqa: E501 @@ -899,25 +946,26 @@ def set_featured_datasets_with_http_info(self, dataset_ids, **kwargs): # noqa: returns the request thread. """ - all_params = ['dataset_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["dataset_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_featured_datasets" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'dataset_ids' is set - if ('dataset_ids' not in params or - params['dataset_ids'] is None): - raise ValueError("Missing the required parameter `dataset_ids` when calling `set_featured_datasets`") # noqa: E501 + if "dataset_ids" not in params or params["dataset_ids"] is None: + raise ValueError( + "Missing the required parameter `dataset_ids` when calling `set_featured_datasets`" + ) collection_formats = {} @@ -931,13 +979,14 @@ def set_featured_datasets_with_http_info(self, dataset_ids, **kwargs): # noqa: local_var_files = {} body_params = None - if 'dataset_ids' in params: - body_params = params['dataset_ids'] + if "dataset_ids" in params: + body_params = params["dataset_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/featured', 'POST', + "/datasets/featured", + "POST", path_params, query_params, header_params, @@ -946,11 +995,12 @@ def set_featured_datasets_with_http_info(self, dataset_ids, **kwargs): # noqa: files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_dataset(self, uploadfile, **kwargs): # noqa: E501 """upload_dataset # noqa: E501 @@ -967,11 +1017,15 @@ def upload_dataset(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_dataset_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_dataset_with_http_info( + uploadfile, **kwargs + ) else: - (data) = self.upload_dataset_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_dataset_with_http_info( + uploadfile, **kwargs + ) return data def upload_dataset_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -990,68 +1044,75 @@ def upload_dataset_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_dataset" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_dataset`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_dataset`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/upload', 'POST', + "/datasets/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiDataset', # noqa: E501 + response_type="ApiDataset", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_dataset_file(self, id, uploadfile, **kwargs): # noqa: E501 """upload_dataset_file # noqa: E501 @@ -1068,14 +1129,20 @@ def upload_dataset_file(self, id, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_dataset_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_dataset_file_with_http_info( + id, uploadfile, **kwargs + ) else: - (data) = self.upload_dataset_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_dataset_file_with_http_info( + id, uploadfile, **kwargs + ) return data - def upload_dataset_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E501 + def upload_dataset_file_with_http_info( + self, id, uploadfile, **kwargs + ): # noqa: E501 """upload_dataset_file # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -1091,35 +1158,37 @@ def upload_dataset_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: returns the request thread. """ - all_params = ['id', 'uploadfile'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "uploadfile"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_dataset_file" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `upload_dataset_file`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `upload_dataset_file`" + ) # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_dataset_file`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_dataset_file`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -1127,36 +1196,42 @@ def upload_dataset_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/{id}/upload', 'POST', + "/datasets/{id}/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiDataset', # noqa: E501 + response_type="ApiDataset", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_dataset_from_url(self, url, **kwargs): # noqa: E501 """upload_dataset_from_url # noqa: E501 @@ -1174,11 +1249,15 @@ def upload_dataset_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_dataset_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_dataset_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_dataset_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_dataset_from_url_with_http_info( + url, **kwargs + ) return data def upload_dataset_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -1198,67 +1277,74 @@ def upload_dataset_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'name', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "name", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_dataset_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_dataset_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_dataset_from_url`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/datasets/upload_from_url', 'POST', + "/datasets/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiDataset', # noqa: E501 + response_type="ApiDataset", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/health_check_api.py b/api/client/swagger_client/api/health_check_api.py index a717819e..9e731452 100644 --- a/api/client/swagger_client/api/health_check_api.py +++ b/api/client/swagger_client/api/health_check_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -51,11 +51,11 @@ def health_check(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.health_check_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.health_check_with_http_info(**kwargs) else: - (data) = self.health_check_with_http_info(**kwargs) # noqa: E501 + (data) = self.health_check_with_http_info(**kwargs) return data def health_check_with_http_info(self, **kwargs): # noqa: E501 @@ -74,31 +74,35 @@ def health_check_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['check_database', 'check_object_store'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["check_database", "check_object_store"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method health_check" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'check_database' in params: - query_params.append(('check_database', params['check_database'])) # noqa: E501 - if 'check_object_store' in params: - query_params.append(('check_object_store', params['check_object_store'])) # noqa: E501 + if "check_database" in params: + query_params.append( + ("check_database", params["check_database"]) + ) + if "check_object_store" in params: + query_params.append( + ("check_object_store", params["check_object_store"]) + ) header_params = {} @@ -110,7 +114,8 @@ def health_check_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/health_check', 'GET', + "/health_check", + "GET", path_params, query_params, header_params, @@ -119,8 +124,9 @@ def health_check_with_http_info(self, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/inference_service_api.py b/api/client/swagger_client/api/inference_service_api.py index 16821c5a..5361d83c 100644 --- a/api/client/swagger_client/api/inference_service_api.py +++ b/api/client/swagger_client/api/inference_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -51,11 +51,11 @@ def create_service(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_service_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_service_with_http_info(body, **kwargs) else: - (data) = self.create_service_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_service_with_http_info(body, **kwargs) return data def create_service_with_http_info(self, body, **kwargs): # noqa: E501 @@ -74,33 +74,34 @@ def create_service_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body', 'namespace'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body", "namespace"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_service" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_service`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_service`" + ) collection_formats = {} path_params = {} query_params = [] - if 'namespace' in params: - query_params.append(('namespace', params['namespace'])) # noqa: E501 + if "namespace" in params: + query_params.append(("namespace", params["namespace"])) header_params = {} @@ -108,26 +109,28 @@ def create_service_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/inferenceservices', 'POST', + "/inferenceservices", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiInferenceservice', # noqa: E501 + response_type="ApiInferenceservice", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_inferenceservices(self, id, **kwargs): # noqa: E501 """get_inferenceservices # noqa: E501 @@ -144,11 +147,13 @@ def get_inferenceservices(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_inferenceservices_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_inferenceservices_with_http_info(id, **kwargs) else: - (data) = self.get_inferenceservices_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_inferenceservices_with_http_info( + id, **kwargs + ) return data def get_inferenceservices_with_http_info(self, id, **kwargs): # noqa: E501 @@ -167,35 +172,36 @@ def get_inferenceservices_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'namespace'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "namespace"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_inferenceservices" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_inferenceservices`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_inferenceservices`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'namespace' in params: - query_params.append(('namespace', params['namespace'])) # noqa: E501 + if "namespace" in params: + query_params.append(("namespace", params["namespace"])) header_params = {} @@ -207,20 +213,22 @@ def get_inferenceservices_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/inferenceservices/{id}', 'GET', + "/inferenceservices/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiInferenceservice', # noqa: E501 + response_type="ApiInferenceservice", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_inferenceservices(self, **kwargs): # noqa: E501 """list_inferenceservices # noqa: E501 @@ -240,11 +248,11 @@ def list_inferenceservices(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_inferenceservices_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_inferenceservices_with_http_info(**kwargs) else: - (data) = self.list_inferenceservices_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_inferenceservices_with_http_info(**kwargs) return data def list_inferenceservices_with_http_info(self, **kwargs): # noqa: E501 @@ -266,37 +274,43 @@ def list_inferenceservices_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter', 'namespace'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = [ + "page_token", + "page_size", + "sort_by", + "filter", + "namespace", + ] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_inferenceservices" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 - if 'namespace' in params: - query_params.append(('namespace', params['namespace'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) + if "namespace" in params: + query_params.append(("namespace", params["namespace"])) header_params = {} @@ -308,20 +322,22 @@ def list_inferenceservices_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/inferenceservices', 'GET', + "/inferenceservices", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListInferenceservicesResponse', # noqa: E501 + response_type="ApiListInferenceservicesResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_service(self, uploadfile, **kwargs): # noqa: E501 """upload_service # noqa: E501 @@ -339,11 +355,15 @@ def upload_service(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_service_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_service_with_http_info( + uploadfile, **kwargs + ) else: - (data) = self.upload_service_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_service_with_http_info( + uploadfile, **kwargs + ) return data def upload_service_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -363,67 +383,74 @@ def upload_service_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name', 'namespace'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name", "namespace"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_service" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_service`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_service`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 - if 'namespace' in params: - query_params.append(('namespace', params['namespace'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) + if "namespace" in params: + query_params.append(("namespace", params["namespace"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/inferenceservices/upload', 'POST', + "/inferenceservices/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiInferenceservice', # noqa: E501 + response_type="ApiInferenceservice", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/model_service_api.py b/api/client/swagger_client/api/model_service_api.py index bc6d95c0..cd1e64de 100644 --- a/api/client/swagger_client/api/model_service_api.py +++ b/api/client/swagger_client/api/model_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,14 +50,20 @@ def approve_models_for_publishing(self, model_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.approve_models_for_publishing_with_http_info(model_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.approve_models_for_publishing_with_http_info( + model_ids, **kwargs + ) else: - (data) = self.approve_models_for_publishing_with_http_info(model_ids, **kwargs) # noqa: E501 + (data) = self.approve_models_for_publishing_with_http_info( + model_ids, **kwargs + ) return data - def approve_models_for_publishing_with_http_info(self, model_ids, **kwargs): # noqa: E501 + def approve_models_for_publishing_with_http_info( + self, model_ids, **kwargs + ): # noqa: E501 """approve_models_for_publishing # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -72,25 +78,26 @@ def approve_models_for_publishing_with_http_info(self, model_ids, **kwargs): # returns the request thread. """ - all_params = ['model_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["model_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method approve_models_for_publishing" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'model_ids' is set - if ('model_ids' not in params or - params['model_ids'] is None): - raise ValueError("Missing the required parameter `model_ids` when calling `approve_models_for_publishing`") # noqa: E501 + if "model_ids" not in params or params["model_ids"] is None: + raise ValueError( + "Missing the required parameter `model_ids` when calling `approve_models_for_publishing`" + ) collection_formats = {} @@ -104,13 +111,14 @@ def approve_models_for_publishing_with_http_info(self, model_ids, **kwargs): # local_var_files = {} body_params = None - if 'model_ids' in params: - body_params = params['model_ids'] + if "model_ids" in params: + body_params = params["model_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/publish_approved', 'POST', + "/models/publish_approved", + "POST", path_params, query_params, header_params, @@ -119,11 +127,12 @@ def approve_models_for_publishing_with_http_info(self, model_ids, **kwargs): # files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def create_model(self, body, **kwargs): # noqa: E501 """create_model # noqa: E501 @@ -139,11 +148,11 @@ def create_model(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_model_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_model_with_http_info(body, **kwargs) else: - (data) = self.create_model_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_model_with_http_info(body, **kwargs) return data def create_model_with_http_info(self, body, **kwargs): # noqa: E501 @@ -161,25 +170,26 @@ def create_model_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_model" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_model`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_model`" + ) collection_formats = {} @@ -193,26 +203,28 @@ def create_model_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models', 'POST', + "/models", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiModel', # noqa: E501 + response_type="ApiModel", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_model(self, id, **kwargs): # noqa: E501 """delete_model # noqa: E501 @@ -228,11 +240,11 @@ def delete_model(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_model_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_model_with_http_info(id, **kwargs) else: - (data) = self.delete_model_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_model_with_http_info(id, **kwargs) return data def delete_model_with_http_info(self, id, **kwargs): # noqa: E501 @@ -250,31 +262,32 @@ def delete_model_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_model" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_model`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_model`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -288,7 +301,8 @@ def delete_model_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}', 'DELETE', + "/models/{id}", + "DELETE", path_params, query_params, header_params, @@ -297,11 +311,12 @@ def delete_model_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def download_model_files(self, id, **kwargs): # noqa: E501 """Returns the model artifacts compressed into a .tgz (.tar.gz) file. # noqa: E501 @@ -318,11 +333,13 @@ def download_model_files(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.download_model_files_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.download_model_files_with_http_info(id, **kwargs) else: - (data) = self.download_model_files_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.download_model_files_with_http_info( + id, **kwargs + ) return data def download_model_files_with_http_info(self, id, **kwargs): # noqa: E501 @@ -341,35 +358,38 @@ def download_model_files_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'include_generated_code'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "include_generated_code"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method download_model_files" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `download_model_files`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `download_model_files`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'include_generated_code' in params: - query_params.append(('include_generated_code', params['include_generated_code'])) # noqa: E501 + if "include_generated_code" in params: + query_params.append( + ("include_generated_code", params["include_generated_code"]) + ) header_params = {} @@ -378,27 +398,30 @@ def download_model_files_with_http_info(self, id, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/gzip']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/gzip"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}/download', 'GET', + "/models/{id}/download", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='file', # noqa: E501 + response_type="file", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', False), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", False), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def generate_model_code(self, id, **kwargs): # noqa: E501 """generate_model_code # noqa: E501 @@ -414,11 +437,11 @@ def generate_model_code(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.generate_model_code_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.generate_model_code_with_http_info(id, **kwargs) else: - (data) = self.generate_model_code_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.generate_model_code_with_http_info(id, **kwargs) return data def generate_model_code_with_http_info(self, id, **kwargs): # noqa: E501 @@ -436,31 +459,32 @@ def generate_model_code_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method generate_model_code" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `generate_model_code`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `generate_model_code`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -474,20 +498,22 @@ def generate_model_code_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}/generate_code', 'GET', + "/models/{id}/generate_code", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGenerateModelCodeResponse', # noqa: E501 + response_type="ApiGenerateModelCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_model(self, id, **kwargs): # noqa: E501 """get_model # noqa: E501 @@ -503,11 +529,11 @@ def get_model(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_model_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_model_with_http_info(id, **kwargs) else: - (data) = self.get_model_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_model_with_http_info(id, **kwargs) return data def get_model_with_http_info(self, id, **kwargs): # noqa: E501 @@ -525,31 +551,32 @@ def get_model_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_model" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_model`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_model`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -563,20 +590,22 @@ def get_model_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}', 'GET', + "/models/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiModel', # noqa: E501 + response_type="ApiModel", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_model_template(self, id, **kwargs): # noqa: E501 """get_model_template # noqa: E501 @@ -592,11 +621,11 @@ def get_model_template(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_model_template_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_model_template_with_http_info(id, **kwargs) else: - (data) = self.get_model_template_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_model_template_with_http_info(id, **kwargs) return data def get_model_template_with_http_info(self, id, **kwargs): # noqa: E501 @@ -614,31 +643,32 @@ def get_model_template_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_model_template" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_model_template`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_model_template`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -652,20 +682,22 @@ def get_model_template_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}/templates', 'GET', + "/models/{id}/templates", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGetTemplateResponse', # noqa: E501 + response_type="ApiGetTemplateResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_models(self, **kwargs): # noqa: E501 """list_models # noqa: E501 @@ -684,11 +716,11 @@ def list_models(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_models_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_models_with_http_info(**kwargs) else: - (data) = self.list_models_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_models_with_http_info(**kwargs) return data def list_models_with_http_info(self, **kwargs): # noqa: E501 @@ -709,35 +741,35 @@ def list_models_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_models" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -749,20 +781,22 @@ def list_models_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models', 'GET', + "/models", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListModelsResponse', # noqa: E501 + response_type="ApiListModelsResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_model(self, id, pipeline_stage, execution_platform, **kwargs): # noqa: E501 """run_model # noqa: E501 @@ -782,14 +816,20 @@ def run_model(self, id, pipeline_stage, execution_platform, **kwargs): # noqa: If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_model_with_http_info(id, pipeline_stage, execution_platform, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_model_with_http_info( + id, pipeline_stage, execution_platform, **kwargs + ) else: - (data) = self.run_model_with_http_info(id, pipeline_stage, execution_platform, **kwargs) # noqa: E501 + (data) = self.run_model_with_http_info( + id, pipeline_stage, execution_platform, **kwargs + ) return data - def run_model_with_http_info(self, id, pipeline_stage, execution_platform, **kwargs): # noqa: E501 + def run_model_with_http_info( + self, id, pipeline_stage, execution_platform, **kwargs + ): # noqa: E501 """run_model # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -808,47 +848,60 @@ def run_model_with_http_info(self, id, pipeline_stage, execution_platform, **kwa returns the request thread. """ - all_params = ['id', 'pipeline_stage', 'execution_platform', 'run_name', 'parameters'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = [ + "id", + "pipeline_stage", + "execution_platform", + "run_name", + "parameters", + ] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_model" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `run_model`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `run_model`" + ) # verify the required parameter 'pipeline_stage' is set - if ('pipeline_stage' not in params or - params['pipeline_stage'] is None): - raise ValueError("Missing the required parameter `pipeline_stage` when calling `run_model`") # noqa: E501 + if "pipeline_stage" not in params or params["pipeline_stage"] is None: + raise ValueError( + "Missing the required parameter `pipeline_stage` when calling `run_model`" + ) # verify the required parameter 'execution_platform' is set - if ('execution_platform' not in params or - params['execution_platform'] is None): - raise ValueError("Missing the required parameter `execution_platform` when calling `run_model`") # noqa: E501 + if "execution_platform" not in params or params["execution_platform"] is None: + raise ValueError( + "Missing the required parameter `execution_platform` when calling `run_model`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'pipeline_stage' in params: - query_params.append(('pipeline_stage', params['pipeline_stage'])) # noqa: E501 - if 'execution_platform' in params: - query_params.append(('execution_platform', params['execution_platform'])) # noqa: E501 - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "pipeline_stage" in params: + query_params.append( + ("pipeline_stage", params["pipeline_stage"]) + ) + if "execution_platform" in params: + query_params.append( + ("execution_platform", params["execution_platform"]) + ) + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -856,26 +909,28 @@ def run_model_with_http_info(self, id, pipeline_stage, execution_platform, **kwa local_var_files = {} body_params = None - if 'parameters' in params: - body_params = params['parameters'] + if "parameters" in params: + body_params = params["parameters"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}/run', 'POST', + "/models/{id}/run", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_featured_models(self, model_ids, **kwargs): # noqa: E501 """set_featured_models # noqa: E501 @@ -891,11 +946,15 @@ def set_featured_models(self, model_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_featured_models_with_http_info(model_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_featured_models_with_http_info( + model_ids, **kwargs + ) else: - (data) = self.set_featured_models_with_http_info(model_ids, **kwargs) # noqa: E501 + (data) = self.set_featured_models_with_http_info( + model_ids, **kwargs + ) return data def set_featured_models_with_http_info(self, model_ids, **kwargs): # noqa: E501 @@ -913,25 +972,26 @@ def set_featured_models_with_http_info(self, model_ids, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['model_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["model_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_featured_models" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'model_ids' is set - if ('model_ids' not in params or - params['model_ids'] is None): - raise ValueError("Missing the required parameter `model_ids` when calling `set_featured_models`") # noqa: E501 + if "model_ids" not in params or params["model_ids"] is None: + raise ValueError( + "Missing the required parameter `model_ids` when calling `set_featured_models`" + ) collection_formats = {} @@ -945,13 +1005,14 @@ def set_featured_models_with_http_info(self, model_ids, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'model_ids' in params: - body_params = params['model_ids'] + if "model_ids" in params: + body_params = params["model_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/featured', 'POST', + "/models/featured", + "POST", path_params, query_params, header_params, @@ -960,11 +1021,12 @@ def set_featured_models_with_http_info(self, model_ids, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_model(self, uploadfile, **kwargs): # noqa: E501 """upload_model # noqa: E501 @@ -981,11 +1043,13 @@ def upload_model(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_model_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_model_with_http_info(uploadfile, **kwargs) else: - (data) = self.upload_model_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_model_with_http_info( + uploadfile, **kwargs + ) return data def upload_model_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -1004,68 +1068,75 @@ def upload_model_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_model" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_model`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_model`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/upload', 'POST', + "/models/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiModel', # noqa: E501 + response_type="ApiModel", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_model_file(self, id, uploadfile, **kwargs): # noqa: E501 """upload_model_file # noqa: E501 @@ -1082,11 +1153,15 @@ def upload_model_file(self, id, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_model_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_model_file_with_http_info( + id, uploadfile, **kwargs + ) else: - (data) = self.upload_model_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_model_file_with_http_info( + id, uploadfile, **kwargs + ) return data def upload_model_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E501 @@ -1105,35 +1180,37 @@ def upload_model_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E returns the request thread. """ - all_params = ['id', 'uploadfile'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "uploadfile"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_model_file" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `upload_model_file`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `upload_model_file`" + ) # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_model_file`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_model_file`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -1141,36 +1218,42 @@ def upload_model_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/{id}/upload', 'POST', + "/models/{id}/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiModel', # noqa: E501 + response_type="ApiModel", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_model_from_url(self, url, **kwargs): # noqa: E501 """upload_model_from_url # noqa: E501 @@ -1188,11 +1271,15 @@ def upload_model_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_model_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_model_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_model_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_model_from_url_with_http_info( + url, **kwargs + ) return data def upload_model_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -1212,67 +1299,74 @@ def upload_model_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'name', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "name", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_model_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_model_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_model_from_url`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/models/upload_from_url', 'POST', + "/models/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiModel', # noqa: E501 + response_type="ApiModel", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/notebook_service_api.py b/api/client/swagger_client/api/notebook_service_api.py index 05722971..f900a992 100644 --- a/api/client/swagger_client/api/notebook_service_api.py +++ b/api/client/swagger_client/api/notebook_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,14 +50,20 @@ def approve_notebooks_for_publishing(self, notebook_ids, **kwargs): # noqa: E50 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.approve_notebooks_for_publishing_with_http_info(notebook_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.approve_notebooks_for_publishing_with_http_info( + notebook_ids, **kwargs + ) else: - (data) = self.approve_notebooks_for_publishing_with_http_info(notebook_ids, **kwargs) # noqa: E501 + (data) = self.approve_notebooks_for_publishing_with_http_info( + notebook_ids, **kwargs + ) return data - def approve_notebooks_for_publishing_with_http_info(self, notebook_ids, **kwargs): # noqa: E501 + def approve_notebooks_for_publishing_with_http_info( + self, notebook_ids, **kwargs + ): # noqa: E501 """approve_notebooks_for_publishing # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -72,25 +78,26 @@ def approve_notebooks_for_publishing_with_http_info(self, notebook_ids, **kwargs returns the request thread. """ - all_params = ['notebook_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["notebook_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method approve_notebooks_for_publishing" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'notebook_ids' is set - if ('notebook_ids' not in params or - params['notebook_ids'] is None): - raise ValueError("Missing the required parameter `notebook_ids` when calling `approve_notebooks_for_publishing`") # noqa: E501 + if "notebook_ids" not in params or params["notebook_ids"] is None: + raise ValueError( + "Missing the required parameter `notebook_ids` when calling `approve_notebooks_for_publishing`" + ) collection_formats = {} @@ -104,13 +111,14 @@ def approve_notebooks_for_publishing_with_http_info(self, notebook_ids, **kwargs local_var_files = {} body_params = None - if 'notebook_ids' in params: - body_params = params['notebook_ids'] + if "notebook_ids" in params: + body_params = params["notebook_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/publish_approved', 'POST', + "/notebooks/publish_approved", + "POST", path_params, query_params, header_params, @@ -119,11 +127,12 @@ def approve_notebooks_for_publishing_with_http_info(self, notebook_ids, **kwargs files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def create_notebook(self, body, **kwargs): # noqa: E501 """create_notebook # noqa: E501 @@ -139,11 +148,11 @@ def create_notebook(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_notebook_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_notebook_with_http_info(body, **kwargs) else: - (data) = self.create_notebook_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_notebook_with_http_info(body, **kwargs) return data def create_notebook_with_http_info(self, body, **kwargs): # noqa: E501 @@ -161,25 +170,26 @@ def create_notebook_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_notebook" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_notebook`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_notebook`" + ) collection_formats = {} @@ -193,26 +203,28 @@ def create_notebook_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks', 'POST', + "/notebooks", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiNotebook', # noqa: E501 + response_type="ApiNotebook", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_notebook(self, id, **kwargs): # noqa: E501 """delete_notebook # noqa: E501 @@ -228,11 +240,11 @@ def delete_notebook(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_notebook_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_notebook_with_http_info(id, **kwargs) else: - (data) = self.delete_notebook_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_notebook_with_http_info(id, **kwargs) return data def delete_notebook_with_http_info(self, id, **kwargs): # noqa: E501 @@ -250,31 +262,32 @@ def delete_notebook_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_notebook" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_notebook`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_notebook`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -288,7 +301,8 @@ def delete_notebook_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}', 'DELETE', + "/notebooks/{id}", + "DELETE", path_params, query_params, header_params, @@ -297,11 +311,12 @@ def delete_notebook_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def download_notebook_files(self, id, **kwargs): # noqa: E501 """Returns the notebook artifacts compressed into a .tgz (.tar.gz) file. # noqa: E501 @@ -318,11 +333,15 @@ def download_notebook_files(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.download_notebook_files_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.download_notebook_files_with_http_info( + id, **kwargs + ) else: - (data) = self.download_notebook_files_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.download_notebook_files_with_http_info( + id, **kwargs + ) return data def download_notebook_files_with_http_info(self, id, **kwargs): # noqa: E501 @@ -341,35 +360,38 @@ def download_notebook_files_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'include_generated_code'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "include_generated_code"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method download_notebook_files" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `download_notebook_files`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `download_notebook_files`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'include_generated_code' in params: - query_params.append(('include_generated_code', params['include_generated_code'])) # noqa: E501 + if "include_generated_code" in params: + query_params.append( + ("include_generated_code", params["include_generated_code"]) + ) header_params = {} @@ -378,27 +400,30 @@ def download_notebook_files_with_http_info(self, id, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/gzip']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/gzip"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}/download', 'GET', + "/notebooks/{id}/download", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='file', # noqa: E501 + response_type="file", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', False), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", False), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def generate_notebook_code(self, id, **kwargs): # noqa: E501 """generate_notebook_code # noqa: E501 @@ -415,11 +440,15 @@ def generate_notebook_code(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.generate_notebook_code_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.generate_notebook_code_with_http_info( + id, **kwargs + ) else: - (data) = self.generate_notebook_code_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.generate_notebook_code_with_http_info( + id, **kwargs + ) return data def generate_notebook_code_with_http_info(self, id, **kwargs): # noqa: E501 @@ -438,31 +467,32 @@ def generate_notebook_code_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method generate_notebook_code" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `generate_notebook_code`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `generate_notebook_code`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -476,20 +506,22 @@ def generate_notebook_code_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}/generate_code', 'GET', + "/notebooks/{id}/generate_code", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGenerateCodeResponse', # noqa: E501 + response_type="ApiGenerateCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_notebook(self, id, **kwargs): # noqa: E501 """get_notebook # noqa: E501 @@ -505,11 +537,11 @@ def get_notebook(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_notebook_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_notebook_with_http_info(id, **kwargs) else: - (data) = self.get_notebook_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_notebook_with_http_info(id, **kwargs) return data def get_notebook_with_http_info(self, id, **kwargs): # noqa: E501 @@ -527,31 +559,32 @@ def get_notebook_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_notebook" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_notebook`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_notebook`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -565,20 +598,22 @@ def get_notebook_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}', 'GET', + "/notebooks/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiNotebook', # noqa: E501 + response_type="ApiNotebook", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_notebook_template(self, id, **kwargs): # noqa: E501 """get_notebook_template # noqa: E501 @@ -594,11 +629,13 @@ def get_notebook_template(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_notebook_template_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_notebook_template_with_http_info(id, **kwargs) else: - (data) = self.get_notebook_template_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_notebook_template_with_http_info( + id, **kwargs + ) return data def get_notebook_template_with_http_info(self, id, **kwargs): # noqa: E501 @@ -616,31 +653,32 @@ def get_notebook_template_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_notebook_template" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_notebook_template`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_notebook_template`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -654,20 +692,22 @@ def get_notebook_template_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}/templates', 'GET', + "/notebooks/{id}/templates", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGetTemplateResponse', # noqa: E501 + response_type="ApiGetTemplateResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_notebooks(self, **kwargs): # noqa: E501 """list_notebooks # noqa: E501 @@ -686,11 +726,11 @@ def list_notebooks(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_notebooks_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_notebooks_with_http_info(**kwargs) else: - (data) = self.list_notebooks_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_notebooks_with_http_info(**kwargs) return data def list_notebooks_with_http_info(self, **kwargs): # noqa: E501 @@ -711,35 +751,35 @@ def list_notebooks_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_notebooks" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -751,20 +791,22 @@ def list_notebooks_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks', 'GET', + "/notebooks", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListNotebooksResponse', # noqa: E501 + response_type="ApiListNotebooksResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_notebook(self, id, **kwargs): # noqa: E501 """run_notebook # noqa: E501 @@ -782,11 +824,11 @@ def run_notebook(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_notebook_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_notebook_with_http_info(id, **kwargs) else: - (data) = self.run_notebook_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.run_notebook_with_http_info(id, **kwargs) return data def run_notebook_with_http_info(self, id, **kwargs): # noqa: E501 @@ -806,35 +848,36 @@ def run_notebook_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'run_name', 'parameters'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "run_name", "parameters"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_notebook" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `run_notebook`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `run_notebook`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -842,26 +885,28 @@ def run_notebook_with_http_info(self, id, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'parameters' in params: - body_params = params['parameters'] + if "parameters" in params: + body_params = params["parameters"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}/run', 'POST', + "/notebooks/{id}/run", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_featured_notebooks(self, notebook_ids, **kwargs): # noqa: E501 """set_featured_notebooks # noqa: E501 @@ -877,14 +922,20 @@ def set_featured_notebooks(self, notebook_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_featured_notebooks_with_http_info(notebook_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_featured_notebooks_with_http_info( + notebook_ids, **kwargs + ) else: - (data) = self.set_featured_notebooks_with_http_info(notebook_ids, **kwargs) # noqa: E501 + (data) = self.set_featured_notebooks_with_http_info( + notebook_ids, **kwargs + ) return data - def set_featured_notebooks_with_http_info(self, notebook_ids, **kwargs): # noqa: E501 + def set_featured_notebooks_with_http_info( + self, notebook_ids, **kwargs + ): # noqa: E501 """set_featured_notebooks # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -899,25 +950,26 @@ def set_featured_notebooks_with_http_info(self, notebook_ids, **kwargs): # noqa returns the request thread. """ - all_params = ['notebook_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["notebook_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_featured_notebooks" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'notebook_ids' is set - if ('notebook_ids' not in params or - params['notebook_ids'] is None): - raise ValueError("Missing the required parameter `notebook_ids` when calling `set_featured_notebooks`") # noqa: E501 + if "notebook_ids" not in params or params["notebook_ids"] is None: + raise ValueError( + "Missing the required parameter `notebook_ids` when calling `set_featured_notebooks`" + ) collection_formats = {} @@ -931,13 +983,14 @@ def set_featured_notebooks_with_http_info(self, notebook_ids, **kwargs): # noqa local_var_files = {} body_params = None - if 'notebook_ids' in params: - body_params = params['notebook_ids'] + if "notebook_ids" in params: + body_params = params["notebook_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/featured', 'POST', + "/notebooks/featured", + "POST", path_params, query_params, header_params, @@ -946,11 +999,12 @@ def set_featured_notebooks_with_http_info(self, notebook_ids, **kwargs): # noqa files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_notebook(self, uploadfile, **kwargs): # noqa: E501 """upload_notebook # noqa: E501 @@ -968,11 +1022,15 @@ def upload_notebook(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_notebook_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_notebook_with_http_info( + uploadfile, **kwargs + ) else: - (data) = self.upload_notebook_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_notebook_with_http_info( + uploadfile, **kwargs + ) return data def upload_notebook_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -992,70 +1050,79 @@ def upload_notebook_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name', 'enterprise_github_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name", "enterprise_github_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_notebook" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_notebook`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_notebook`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 - if 'enterprise_github_token' in params: - form_params.append(('enterprise_github_token', params['enterprise_github_token'])) # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 + if "enterprise_github_token" in params: + form_params.append( + ("enterprise_github_token", params["enterprise_github_token"]) + ) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/upload', 'POST', + "/notebooks/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiNotebook', # noqa: E501 + response_type="ApiNotebook", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_notebook_file(self, id, uploadfile, **kwargs): # noqa: E501 """upload_notebook_file # noqa: E501 @@ -1072,14 +1139,20 @@ def upload_notebook_file(self, id, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_notebook_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_notebook_file_with_http_info( + id, uploadfile, **kwargs + ) else: - (data) = self.upload_notebook_file_with_http_info(id, uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_notebook_file_with_http_info( + id, uploadfile, **kwargs + ) return data - def upload_notebook_file_with_http_info(self, id, uploadfile, **kwargs): # noqa: E501 + def upload_notebook_file_with_http_info( + self, id, uploadfile, **kwargs + ): # noqa: E501 """upload_notebook_file # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -1095,35 +1168,37 @@ def upload_notebook_file_with_http_info(self, id, uploadfile, **kwargs): # noqa returns the request thread. """ - all_params = ['id', 'uploadfile'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "uploadfile"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_notebook_file" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `upload_notebook_file`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `upload_notebook_file`" + ) # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_notebook_file`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_notebook_file`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -1131,36 +1206,42 @@ def upload_notebook_file_with_http_info(self, id, uploadfile, **kwargs): # noqa form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/{id}/upload', 'POST', + "/notebooks/{id}/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiNotebook', # noqa: E501 + response_type="ApiNotebook", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_notebook_from_url(self, url, **kwargs): # noqa: E501 """upload_notebook_from_url # noqa: E501 @@ -1178,11 +1259,15 @@ def upload_notebook_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_notebook_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_notebook_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_notebook_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_notebook_from_url_with_http_info( + url, **kwargs + ) return data def upload_notebook_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -1202,67 +1287,74 @@ def upload_notebook_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'name', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "name", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_notebook_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_notebook_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_notebook_from_url`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/notebooks/upload_from_url', 'POST', + "/notebooks/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiNotebook', # noqa: E501 + response_type="ApiNotebook", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api/pipeline_service_api.py b/api/client/swagger_client/api/pipeline_service_api.py index 06fe670f..5a0bbd7a 100644 --- a/api/client/swagger_client/api/pipeline_service_api.py +++ b/api/client/swagger_client/api/pipeline_service_api.py @@ -19,7 +19,7 @@ import re # noqa: F401 # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from swagger_client.api_client import ApiClient @@ -50,14 +50,20 @@ def approve_pipelines_for_publishing(self, pipeline_ids, **kwargs): # noqa: E50 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.approve_pipelines_for_publishing_with_http_info(pipeline_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.approve_pipelines_for_publishing_with_http_info( + pipeline_ids, **kwargs + ) else: - (data) = self.approve_pipelines_for_publishing_with_http_info(pipeline_ids, **kwargs) # noqa: E501 + (data) = self.approve_pipelines_for_publishing_with_http_info( + pipeline_ids, **kwargs + ) return data - def approve_pipelines_for_publishing_with_http_info(self, pipeline_ids, **kwargs): # noqa: E501 + def approve_pipelines_for_publishing_with_http_info( + self, pipeline_ids, **kwargs + ): # noqa: E501 """approve_pipelines_for_publishing # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -72,25 +78,26 @@ def approve_pipelines_for_publishing_with_http_info(self, pipeline_ids, **kwargs returns the request thread. """ - all_params = ['pipeline_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["pipeline_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method approve_pipelines_for_publishing" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'pipeline_ids' is set - if ('pipeline_ids' not in params or - params['pipeline_ids'] is None): - raise ValueError("Missing the required parameter `pipeline_ids` when calling `approve_pipelines_for_publishing`") # noqa: E501 + if "pipeline_ids" not in params or params["pipeline_ids"] is None: + raise ValueError( + "Missing the required parameter `pipeline_ids` when calling `approve_pipelines_for_publishing`" + ) collection_formats = {} @@ -104,13 +111,14 @@ def approve_pipelines_for_publishing_with_http_info(self, pipeline_ids, **kwargs local_var_files = {} body_params = None - if 'pipeline_ids' in params: - body_params = params['pipeline_ids'] + if "pipeline_ids" in params: + body_params = params["pipeline_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/publish_approved', 'POST', + "/pipelines/publish_approved", + "POST", path_params, query_params, header_params, @@ -119,11 +127,12 @@ def approve_pipelines_for_publishing_with_http_info(self, pipeline_ids, **kwargs files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def create_pipeline(self, body, **kwargs): # noqa: E501 """create_pipeline # noqa: E501 @@ -139,11 +148,11 @@ def create_pipeline(self, body, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_pipeline_with_http_info(body, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.create_pipeline_with_http_info(body, **kwargs) else: - (data) = self.create_pipeline_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_pipeline_with_http_info(body, **kwargs) return data def create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501 @@ -161,25 +170,26 @@ def create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['body'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["body"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_pipeline`") # noqa: E501 + if "body" not in params or params["body"] is None: + raise ValueError( + "Missing the required parameter `body` when calling `create_pipeline`" + ) collection_formats = {} @@ -193,26 +203,28 @@ def create_pipeline_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] + if "body" in params: + body_params = params["body"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines', 'POST', + "/pipelines", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiPipeline', # noqa: E501 + response_type="ApiPipeline", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def delete_pipeline(self, id, **kwargs): # noqa: E501 """delete_pipeline # noqa: E501 @@ -228,11 +240,11 @@ def delete_pipeline(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_pipeline_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.delete_pipeline_with_http_info(id, **kwargs) else: - (data) = self.delete_pipeline_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.delete_pipeline_with_http_info(id, **kwargs) return data def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 @@ -250,31 +262,32 @@ def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `delete_pipeline`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `delete_pipeline`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -288,7 +301,8 @@ def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/{id}', 'DELETE', + "/pipelines/{id}", + "DELETE", path_params, query_params, header_params, @@ -297,11 +311,12 @@ def delete_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def download_pipeline_files(self, id, **kwargs): # noqa: E501 """Returns the pipeline YAML compressed into a .tgz (.tar.gz) file. # noqa: E501 @@ -317,11 +332,15 @@ def download_pipeline_files(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.download_pipeline_files_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.download_pipeline_files_with_http_info( + id, **kwargs + ) else: - (data) = self.download_pipeline_files_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.download_pipeline_files_with_http_info( + id, **kwargs + ) return data def download_pipeline_files_with_http_info(self, id, **kwargs): # noqa: E501 @@ -339,31 +358,32 @@ def download_pipeline_files_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method download_pipeline_files" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `download_pipeline_files`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `download_pipeline_files`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -374,27 +394,30 @@ def download_pipeline_files_with_http_info(self, id, **kwargs): # noqa: E501 body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/gzip']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/gzip"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/{id}/download', 'GET', + "/pipelines/{id}/download", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='file', # noqa: E501 + response_type="file", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', False), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", False), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_pipeline(self, id, **kwargs): # noqa: E501 """get_pipeline # noqa: E501 @@ -410,11 +433,11 @@ def get_pipeline(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_pipeline_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_pipeline_with_http_info(id, **kwargs) else: - (data) = self.get_pipeline_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_pipeline_with_http_info(id, **kwargs) return data def get_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 @@ -432,31 +455,32 @@ def get_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_pipeline`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_pipeline`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -470,20 +494,22 @@ def get_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/{id}', 'GET', + "/pipelines/{id}", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiPipelineExtended', # noqa: E501 + response_type="ApiPipelineExtended", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def get_template(self, id, **kwargs): # noqa: E501 """get_template # noqa: E501 @@ -499,11 +525,11 @@ def get_template(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_template_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.get_template_with_http_info(id, **kwargs) else: - (data) = self.get_template_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.get_template_with_http_info(id, **kwargs) return data def get_template_with_http_info(self, id, **kwargs): # noqa: E501 @@ -521,31 +547,32 @@ def get_template_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_template" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `get_template`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `get_template`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] @@ -559,20 +586,22 @@ def get_template_with_http_info(self, id, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/{id}/templates', 'GET', + "/pipelines/{id}/templates", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiGetTemplateResponse', # noqa: E501 + response_type="ApiGetTemplateResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def list_pipelines(self, **kwargs): # noqa: E501 """list_pipelines # noqa: E501 @@ -591,11 +620,11 @@ def list_pipelines(self, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.list_pipelines_with_http_info(**kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.list_pipelines_with_http_info(**kwargs) else: - (data) = self.list_pipelines_with_http_info(**kwargs) # noqa: E501 + (data) = self.list_pipelines_with_http_info(**kwargs) return data def list_pipelines_with_http_info(self, **kwargs): # noqa: E501 @@ -616,35 +645,35 @@ def list_pipelines_with_http_info(self, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['page_token', 'page_size', 'sort_by', 'filter'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["page_token", "page_size", "sort_by", "filter"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method list_pipelines" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] collection_formats = {} path_params = {} query_params = [] - if 'page_token' in params: - query_params.append(('page_token', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('page_size', params['page_size'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sort_by', params['sort_by'])) # noqa: E501 - if 'filter' in params: - query_params.append(('filter', params['filter'])) # noqa: E501 + if "page_token" in params: + query_params.append(("page_token", params["page_token"])) + if "page_size" in params: + query_params.append(("page_size", params["page_size"])) + if "sort_by" in params: + query_params.append(("sort_by", params["sort_by"])) + if "filter" in params: + query_params.append(("filter", params["filter"])) header_params = {} @@ -656,25 +685,27 @@ def list_pipelines_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines', 'GET', + "/pipelines", + "GET", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiListPipelinesResponse', # noqa: E501 + response_type="ApiListPipelinesResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_custom_pipeline(self, run_custom_pipeline_payload, **kwargs): # noqa: E501 """run_custom_pipeline # noqa: E501 - Run a complex pipeline defined by a directed acyclic graph (DAG) # noqa: E501 + Run a complex pipeline defined by a directed acyclic graph (DAG) This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_custom_pipeline(run_custom_pipeline_payload, async_req=True) @@ -687,17 +718,23 @@ def run_custom_pipeline(self, run_custom_pipeline_payload, **kwargs): # noqa: E If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_custom_pipeline_with_http_info(run_custom_pipeline_payload, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_custom_pipeline_with_http_info( + run_custom_pipeline_payload, **kwargs + ) else: - (data) = self.run_custom_pipeline_with_http_info(run_custom_pipeline_payload, **kwargs) # noqa: E501 + (data) = self.run_custom_pipeline_with_http_info( + run_custom_pipeline_payload, **kwargs + ) return data - def run_custom_pipeline_with_http_info(self, run_custom_pipeline_payload, **kwargs): # noqa: E501 + def run_custom_pipeline_with_http_info( + self, run_custom_pipeline_payload, **kwargs + ): # noqa: E501 """run_custom_pipeline # noqa: E501 - Run a complex pipeline defined by a directed acyclic graph (DAG) # noqa: E501 + Run a complex pipeline defined by a directed acyclic graph (DAG) This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.run_custom_pipeline_with_http_info(run_custom_pipeline_payload, async_req=True) @@ -711,33 +748,37 @@ def run_custom_pipeline_with_http_info(self, run_custom_pipeline_payload, **kwar returns the request thread. """ - all_params = ['run_custom_pipeline_payload', 'run_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["run_custom_pipeline_payload", "run_name"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_custom_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'run_custom_pipeline_payload' is set - if ('run_custom_pipeline_payload' not in params or - params['run_custom_pipeline_payload'] is None): - raise ValueError("Missing the required parameter `run_custom_pipeline_payload` when calling `run_custom_pipeline`") # noqa: E501 + if ( + "run_custom_pipeline_payload" not in params + or params["run_custom_pipeline_payload"] is None + ): + raise ValueError( + "Missing the required parameter `run_custom_pipeline_payload` when calling `run_custom_pipeline`" + ) collection_formats = {} path_params = {} query_params = [] - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -745,30 +786,35 @@ def run_custom_pipeline_with_http_info(self, run_custom_pipeline_payload, **kwar local_var_files = {} body_params = None - if 'run_custom_pipeline_payload' in params: - body_params = params['run_custom_pipeline_payload'] + if "run_custom_pipeline_payload" in params: + body_params = params["run_custom_pipeline_payload"] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/run_custom_pipeline', 'POST', + "/pipelines/run_custom_pipeline", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def run_pipeline(self, id, **kwargs): # noqa: E501 """run_pipeline # noqa: E501 @@ -786,11 +832,11 @@ def run_pipeline(self, id, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.run_pipeline_with_http_info(id, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.run_pipeline_with_http_info(id, **kwargs) else: - (data) = self.run_pipeline_with_http_info(id, **kwargs) # noqa: E501 + (data) = self.run_pipeline_with_http_info(id, **kwargs) return data def run_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 @@ -810,35 +856,36 @@ def run_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['id', 'run_name', 'parameters'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["id", "run_name", "parameters"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method run_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `run_pipeline`") # noqa: E501 + if "id" not in params or params["id"] is None: + raise ValueError( + "Missing the required parameter `id` when calling `run_pipeline`" + ) collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if "id" in params: + path_params["id"] = params["id"] # noqa: E501 query_params = [] - if 'run_name' in params: - query_params.append(('run_name', params['run_name'])) # noqa: E501 + if "run_name" in params: + query_params.append(("run_name", params["run_name"])) header_params = {} @@ -846,30 +893,35 @@ def run_pipeline_with_http_info(self, id, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'parameters' in params: - body_params = params['parameters'] + if "parameters" in params: + body_params = params["parameters"] # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["application/json"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/{id}/run', 'POST', + "/pipelines/{id}/run", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiRunCodeResponse', # noqa: E501 + response_type="ApiRunCodeResponse", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def set_featured_pipelines(self, pipeline_ids, **kwargs): # noqa: E501 """set_featured_pipelines # noqa: E501 @@ -885,14 +937,20 @@ def set_featured_pipelines(self, pipeline_ids, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.set_featured_pipelines_with_http_info(pipeline_ids, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.set_featured_pipelines_with_http_info( + pipeline_ids, **kwargs + ) else: - (data) = self.set_featured_pipelines_with_http_info(pipeline_ids, **kwargs) # noqa: E501 + (data) = self.set_featured_pipelines_with_http_info( + pipeline_ids, **kwargs + ) return data - def set_featured_pipelines_with_http_info(self, pipeline_ids, **kwargs): # noqa: E501 + def set_featured_pipelines_with_http_info( + self, pipeline_ids, **kwargs + ): # noqa: E501 """set_featured_pipelines # noqa: E501 This method makes a synchronous HTTP request by default. To make an @@ -907,25 +965,26 @@ def set_featured_pipelines_with_http_info(self, pipeline_ids, **kwargs): # noqa returns the request thread. """ - all_params = ['pipeline_ids'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["pipeline_ids"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method set_featured_pipelines" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'pipeline_ids' is set - if ('pipeline_ids' not in params or - params['pipeline_ids'] is None): - raise ValueError("Missing the required parameter `pipeline_ids` when calling `set_featured_pipelines`") # noqa: E501 + if "pipeline_ids" not in params or params["pipeline_ids"] is None: + raise ValueError( + "Missing the required parameter `pipeline_ids` when calling `set_featured_pipelines`" + ) collection_formats = {} @@ -939,13 +998,14 @@ def set_featured_pipelines_with_http_info(self, pipeline_ids, **kwargs): # noqa local_var_files = {} body_params = None - if 'pipeline_ids' in params: - body_params = params['pipeline_ids'] + if "pipeline_ids" in params: + body_params = params["pipeline_ids"] # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/featured', 'POST', + "/pipelines/featured", + "POST", path_params, query_params, header_params, @@ -954,11 +1014,12 @@ def set_featured_pipelines_with_http_info(self, pipeline_ids, **kwargs): # noqa files=local_var_files, response_type=None, # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_pipeline(self, uploadfile, **kwargs): # noqa: E501 """upload_pipeline # noqa: E501 @@ -977,11 +1038,15 @@ def upload_pipeline(self, uploadfile, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_pipeline_with_http_info(uploadfile, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_pipeline_with_http_info( + uploadfile, **kwargs + ) else: - (data) = self.upload_pipeline_with_http_info(uploadfile, **kwargs) # noqa: E501 + (data) = self.upload_pipeline_with_http_info( + uploadfile, **kwargs + ) return data def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 @@ -1002,72 +1067,79 @@ def upload_pipeline_with_http_info(self, uploadfile, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['uploadfile', 'name', 'description', 'annotations'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["uploadfile", "name", "description", "annotations"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_pipeline" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'uploadfile' is set - if ('uploadfile' not in params or - params['uploadfile'] is None): - raise ValueError("Missing the required parameter `uploadfile` when calling `upload_pipeline`") # noqa: E501 + if "uploadfile" not in params or params["uploadfile"] is None: + raise ValueError( + "Missing the required parameter `uploadfile` when calling `upload_pipeline`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 - if 'description' in params: - query_params.append(('description', params['description'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) + if "description" in params: + query_params.append(("description", params["description"])) header_params = {} form_params = [] local_var_files = {} - if 'uploadfile' in params: - local_var_files['uploadfile'] = params['uploadfile'] # noqa: E501 - if 'annotations' in params: - form_params.append(('annotations', params['annotations'])) # noqa: E501 + if "uploadfile" in params: + local_var_files["uploadfile"] = params["uploadfile"] # noqa: E501 + if "annotations" in params: + form_params.append(("annotations", params["annotations"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/upload', 'POST', + "/pipelines/upload", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiPipelineExtended', # noqa: E501 + response_type="ApiPipelineExtended", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) def upload_pipeline_from_url(self, url, **kwargs): # noqa: E501 """upload_pipeline_from_url # noqa: E501 @@ -1085,11 +1157,15 @@ def upload_pipeline_from_url(self, url, **kwargs): # noqa: E501 If the method is called asynchronously, returns the request thread. """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_pipeline_from_url_with_http_info(url, **kwargs) # noqa: E501 + kwargs["_return_http_data_only"] = True + if kwargs.get("async_req"): + return self.upload_pipeline_from_url_with_http_info( + url, **kwargs + ) else: - (data) = self.upload_pipeline_from_url_with_http_info(url, **kwargs) # noqa: E501 + (data) = self.upload_pipeline_from_url_with_http_info( + url, **kwargs + ) return data def upload_pipeline_from_url_with_http_info(self, url, **kwargs): # noqa: E501 @@ -1109,67 +1185,74 @@ def upload_pipeline_from_url_with_http_info(self, url, **kwargs): # noqa: E501 returns the request thread. """ - all_params = ['url', 'name', 'access_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') + all_params = ["url", "name", "access_token"] # noqa: E501 + all_params.append("async_req") + all_params.append("_return_http_data_only") + all_params.append("_preload_content") + all_params.append("_request_timeout") params = locals() - for key, val in six.iteritems(params['kwargs']): + for key, val in six.iteritems(params["kwargs"]): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method upload_pipeline_from_url" % key ) params[key] = val - del params['kwargs'] + del params["kwargs"] # verify the required parameter 'url' is set - if ('url' not in params or - params['url'] is None): - raise ValueError("Missing the required parameter `url` when calling `upload_pipeline_from_url`") # noqa: E501 + if "url" not in params or params["url"] is None: + raise ValueError( + "Missing the required parameter `url` when calling `upload_pipeline_from_url`" + ) collection_formats = {} path_params = {} query_params = [] - if 'name' in params: - query_params.append(('name', params['name'])) # noqa: E501 + if "name" in params: + query_params.append(("name", params["name"])) header_params = {} form_params = [] local_var_files = {} - if 'url' in params: - form_params.append(('url', params['url'])) # noqa: E501 - if 'access_token' in params: - form_params.append(('access_token', params['access_token'])) # noqa: E501 + if "url" in params: + form_params.append(("url", params["url"])) + if "access_token" in params: + form_params.append(("access_token", params["access_token"])) body_params = None # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 + header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 + header_params[ + "Content-Type" + ] = self.api_client.select_header_content_type( # noqa: E501 + ["multipart/form-data"] + ) # Authentication setting auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/pipelines/upload_from_url', 'POST', + "/pipelines/upload_from_url", + "POST", path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='ApiPipeline', # noqa: E501 + response_type="ApiPipeline", # noqa: E501 auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) + async_req=params.get("async_req"), + _return_http_data_only=params.get("_return_http_data_only"), + _preload_content=params.get("_preload_content", True), + _request_timeout=params.get("_request_timeout"), + collection_formats=collection_formats, + ) diff --git a/api/client/swagger_client/api_client.py b/api/client/swagger_client/api_client.py index d1301ef2..5f8090bc 100644 --- a/api/client/swagger_client/api_client.py +++ b/api/client/swagger_client/api_client.py @@ -23,11 +23,11 @@ import tempfile # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from six.moves.urllib.parse import quote from swagger_client.configuration import Configuration -import swagger_client.models +import swagger_client # noqa: F401.models from swagger_client import rest @@ -53,18 +53,19 @@ class ApiClient(object): PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { - 'int': int, - 'long': int if six.PY3 else long, # noqa: F821 - 'float': float, - 'str': str, - 'bool': bool, - 'date': datetime.date, - 'datetime': datetime.datetime, - 'object': object, + "int": int, + "long": int if six.PY3 else long, # noqa: F821 + "float": float, + "str": str, + "bool": bool, + "date": datetime.date, + "datetime": datetime.datetime, + "object": object, } - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None): + def __init__( + self, configuration=None, header_name=None, header_value=None, cookie=None + ): if configuration is None: configuration = Configuration() self.configuration = configuration @@ -77,7 +78,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None, self.default_headers[header_name] = header_value self.cookie = cookie # Set default User-Agent. - self.user_agent = 'Swagger-Codegen/1.0.0/python' + self.user_agent = "Swagger-Codegen/1.0.0/python" def __del__(self): if self._pool is not None: @@ -93,21 +94,32 @@ def pool(self): @property def user_agent(self): """User agent for this API client""" - return self.default_headers['User-Agent'] + return self.default_headers["User-Agent"] @user_agent.setter def user_agent(self, value): - self.default_headers['User-Agent'] = value + self.default_headers["User-Agent"] = value def set_default_header(self, header_name, header_value): self.default_headers[header_name] = header_value def __call_api( - self, resource_path, method, path_params=None, - query_params=None, header_params=None, body=None, post_params=None, - files=None, response_type=None, auth_settings=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): + self, + resource_path, + method, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, + response_type=None, + auth_settings=None, + _return_http_data_only=None, + collection_formats=None, + _preload_content=True, + _request_timeout=None, + ): config = self.configuration @@ -115,36 +127,33 @@ def __call_api( header_params = header_params or {} header_params.update(self.default_headers) if self.cookie: - header_params['Cookie'] = self.cookie + header_params["Cookie"] = self.cookie if header_params: header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) + header_params = dict( + self.parameters_to_tuples(header_params, collection_formats) + ) # path parameters if path_params: path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) + path_params = self.parameters_to_tuples(path_params, collection_formats) for k, v in path_params: # specified safe chars, encode everything resource_path = resource_path.replace( - '{%s}' % k, - quote(str(v), safe=config.safe_chars_for_path_param) + "{%s}" % k, quote(str(v), safe=config.safe_chars_for_path_param) ) # query parameters if query_params: query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, - collection_formats) + query_params = self.parameters_to_tuples(query_params, collection_formats) # post parameters if post_params or files: post_params = self.prepare_post_parameters(post_params, files) post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) + post_params = self.parameters_to_tuples(post_params, collection_formats) # auth setting self.update_params_for_auth(header_params, query_params, auth_settings) @@ -158,10 +167,15 @@ def __call_api( # perform request and return response response_data = self.request( - method, url, query_params=query_params, headers=header_params, - post_params=post_params, body=body, + method, + url, + query_params=query_params, + headers=header_params, + post_params=post_params, + body=body, _preload_content=_preload_content, - _request_timeout=_request_timeout) + _request_timeout=_request_timeout, + ) self.last_response = response_data @@ -174,10 +188,9 @@ def __call_api( return_data = None if _return_http_data_only: - return (return_data) + return return_data else: - return (return_data, response_data.status, - response_data.getheaders()) + return (return_data, response_data.status, response_data.getheaders()) def sanitize_for_serialization(self, obj): """Builds a JSON POST object. @@ -198,11 +211,9 @@ def sanitize_for_serialization(self, obj): elif isinstance(obj, self.PRIMITIVE_TYPES): return obj elif isinstance(obj, list): - return [self.sanitize_for_serialization(sub_obj) - for sub_obj in obj] + return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj] elif isinstance(obj, tuple): - return tuple(self.sanitize_for_serialization(sub_obj) - for sub_obj in obj) + return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) elif isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() @@ -214,12 +225,16 @@ def sanitize_for_serialization(self, obj): # and attributes which value is not None. # Convert attribute name to json key in # model definition for request. - obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) - for attr, _ in six.iteritems(obj.swagger_types) - if getattr(obj, attr) is not None} + obj_dict = { + obj.attribute_map[attr]: getattr(obj, attr) + for attr, _ in six.iteritems(obj.swagger_types) + if getattr(obj, attr) is not None + } - return {key: self.sanitize_for_serialization(val) - for key, val in six.iteritems(obj_dict)} + return { + key: self.sanitize_for_serialization(val) + for key, val in six.iteritems(obj_dict) + } def deserialize(self, response, response_type): """Deserializes response into an object. @@ -255,15 +270,15 @@ def __deserialize(self, data, klass): return None if type(klass) == str: - if klass.startswith('list['): - sub_kls = re.match(r'list\[(.*)\]', klass).group(1) - return [self.__deserialize(sub_data, sub_kls) - for sub_data in data] + if klass.startswith("list["): + sub_kls = re.match(r"list\[(.*)\]", klass).group(1) + return [self.__deserialize(sub_data, sub_kls) for sub_data in data] - if klass.startswith('dict('): - sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2) - return {k: self.__deserialize(v, sub_kls) - for k, v in six.iteritems(data)} + if klass.startswith("dict("): + sub_kls = re.match(r"dict\(([^,]*), (.*)\)", klass).group(2) + return { + k: self.__deserialize(v, sub_kls) for k, v in six.iteritems(data) + } # convert str to class if klass in self.NATIVE_TYPES_MAPPING: @@ -282,12 +297,24 @@ def __deserialize(self, data, klass): else: return self.__deserialize_model(data, klass) - def call_api(self, resource_path, method, - path_params=None, query_params=None, header_params=None, - body=None, post_params=None, files=None, - response_type=None, auth_settings=None, async_req=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): + def call_api( + self, + resource_path, + method, + path_params=None, + query_params=None, + header_params=None, + body=None, + post_params=None, + files=None, + response_type=None, + auth_settings=None, + async_req=None, + _return_http_data_only=None, + collection_formats=None, + _preload_content=True, + _request_timeout=None, + ): """Makes the HTTP request (synchronous) and returns deserialized data. To make an async request, set the async_req parameter. @@ -325,78 +352,121 @@ def call_api(self, resource_path, method, then the method will return the response directly. """ if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_type, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout) + return self.__call_api( + resource_path, + method, + path_params, + query_params, + header_params, + body, + post_params, + files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + ) else: - thread = self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, query_params, - header_params, body, - post_params, files, - response_type, auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, _request_timeout)) + thread = self.pool.apply_async( + self.__call_api, + ( + resource_path, + method, + path_params, + query_params, + header_params, + body, + post_params, + files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + ), + ) return thread - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): + def request( + self, + method, + url, + query_params=None, + headers=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): """Makes the HTTP request using RESTClient.""" if method == "GET": - return self.rest_client.GET(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) + return self.rest_client.GET( + url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + ) elif method == "HEAD": - return self.rest_client.HEAD(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) + return self.rest_client.HEAD( + url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + ) elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.OPTIONS( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "POST": - return self.rest_client.POST(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.POST( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "PUT": - return self.rest_client.PUT(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.PUT( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "PATCH": - return self.rest_client.PATCH(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.PATCH( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "DELETE": - return self.rest_client.DELETE(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.DELETE( + url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) else: raise ValueError( "http method must be `GET`, `HEAD`, `OPTIONS`," @@ -413,22 +483,23 @@ def parameters_to_tuples(self, params, collection_formats): new_params = [] if collection_formats is None: collection_formats = {} - for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 + for k, v in ( + six.iteritems(params) if isinstance(params, dict) else params + ): # noqa: E501 if k in collection_formats: collection_format = collection_formats[k] - if collection_format == 'multi': + if collection_format == "multi": new_params.extend((k, value) for value in v) else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' + if collection_format == "ssv": + delimiter = " " + elif collection_format == "tsv": + delimiter = "\t" + elif collection_format == "pipes": + delimiter = "|" else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(str(value) for value in v))) + delimiter = "," + new_params.append((k, delimiter.join(str(value) for value in v))) else: new_params.append((k, v)) return new_params @@ -451,13 +522,14 @@ def prepare_post_parameters(self, post_params=None, files=None): continue file_names = v if type(v) is list else [v] for n in file_names: - with open(n, 'rb') as f: + with open(n, "rb") as f: filename = os.path.basename(f.name) filedata = f.read() - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([k, tuple([filename, filedata, mimetype])])) + mimetype = ( + mimetypes.guess_type(filename)[0] + or "application/octet-stream" + ) + params.append(tuple([k, tuple([filename, filedata, mimetype])])) return params @@ -472,10 +544,10 @@ def select_header_accept(self, accepts): accepts = [x.lower() for x in accepts] - if 'application/json' in accepts: - return 'application/json' + if "application/json" in accepts: + return "application/json" else: - return ', '.join(accepts) + return ", ".join(accepts) def select_header_content_type(self, content_types): """Returns `Content-Type` based on an array of content_types provided. @@ -484,12 +556,12 @@ def select_header_content_type(self, content_types): :return: Content-Type (e.g. application/json). """ if not content_types: - return 'application/json' + return "application/json" content_types = [x.lower() for x in content_types] - if 'application/json' in content_types or '*/*' in content_types: - return 'application/json' + if "application/json" in content_types or "*/*" in content_types: + return "application/json" else: return content_types[0] @@ -506,15 +578,15 @@ def update_params_for_auth(self, headers, querys, auth_settings): for auth in auth_settings: auth_setting = self.configuration.auth_settings().get(auth) if auth_setting: - if not auth_setting['value']: + if not auth_setting["value"]: continue - elif auth_setting['in'] == 'header': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - querys.append((auth_setting['key'], auth_setting['value'])) + elif auth_setting["in"] == "header": + headers[auth_setting["key"]] = auth_setting["value"] + elif auth_setting["in"] == "query": + querys.append((auth_setting["key"], auth_setting["value"])) else: raise ValueError( - 'Authentication token must be in `query` or `header`' + "Authentication token must be in `query` or `header`" ) def __deserialize_file(self, response): @@ -532,8 +604,9 @@ def __deserialize_file(self, response): content_disposition = response.getheader("Content-Disposition") if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) + filename = re.search( + r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition + ).group(1) path = os.path.join(os.path.dirname(path), filename) with open(path, "wb") as f: @@ -571,13 +644,13 @@ def __deserialize_date(self, string): """ try: from dateutil.parser import parse + return parse(string).date() except ImportError: return string except ValueError: raise rest.ApiException( - status=0, - reason="Failed to parse `{0}` as date object".format(string) + status=0, reason="Failed to parse `{0}` as date object".format(string) ) def __deserialize_datatime(self, string): @@ -590,16 +663,14 @@ def __deserialize_datatime(self, string): """ try: from dateutil.parser import parse + return parse(string) except ImportError: return string except ValueError: raise rest.ApiException( status=0, - reason=( - "Failed to parse `{0}` as datetime object" - .format(string) - ) + reason=("Failed to parse `{0}` as datetime object".format(string)), ) def __hasattr(self, object, name): @@ -613,28 +684,33 @@ def __deserialize_model(self, data, klass): :return: model object. """ - if (not klass.swagger_types and - not self.__hasattr(klass, 'get_real_child_model')): + if not klass.swagger_types and not self.__hasattr( + klass, "get_real_child_model" + ): return data kwargs = {} if klass.swagger_types is not None: for attr, attr_type in six.iteritems(klass.swagger_types): - if (data is not None and - klass.attribute_map[attr] in data and - isinstance(data, (list, dict))): + if ( + data is not None + and klass.attribute_map[attr] in data + and isinstance(data, (list, dict)) + ): value = data[klass.attribute_map[attr]] kwargs[attr] = self.__deserialize(value, attr_type) instance = klass(**kwargs) - if (isinstance(instance, dict) and - klass.swagger_types is not None and - isinstance(data, dict)): + if ( + isinstance(instance, dict) + and klass.swagger_types is not None + and isinstance(data, dict) + ): for key, value in data.items(): if key not in klass.swagger_types: instance[key] = value - if self.__hasattr(instance, 'get_real_child_model'): + if self.__hasattr(instance, "get_real_child_model"): klass_name = instance.get_real_child_model(data) if klass_name: instance = self.__deserialize(data, klass_name) diff --git a/api/client/swagger_client/configuration.py b/api/client/swagger_client/configuration.py index 972d8de9..3ff0d882 100644 --- a/api/client/swagger_client/configuration.py +++ b/api/client/swagger_client/configuration.py @@ -22,7 +22,7 @@ import sys import urllib3 -import six +import six # noqa: F401 from six.moves import http_client as httplib @@ -62,7 +62,7 @@ def __init__(self): self.logger["package_logger"] = logging.getLogger("swagger_client") self.logger["urllib3_logger"] = logging.getLogger("urllib3") # Log format - self.logger_format = '%(asctime)s %(levelname)s %(message)s' + self.logger_format = "%(asctime)s %(levelname)s %(message)s" # Log stream handler self.logger_stream_handler = None # Log file handler @@ -95,7 +95,7 @@ def __init__(self): # Proxy URL self.proxy = None # Safe chars for path_param - self.safe_chars_for_path_param = '' + self.safe_chars_for_path_param = "" @classmethod def set_default(cls, default): @@ -203,9 +203,10 @@ def get_api_key_with_prefix(self, identifier): :param identifier: The identifier of apiKey. :return: The token for api key authentication. """ - if (self.api_key.get(identifier) and - self.api_key_prefix.get(identifier)): - return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501 + if self.api_key.get(identifier) and self.api_key_prefix.get(identifier): + return ( + self.api_key_prefix[identifier] + " " + self.api_key[identifier] + ) elif self.api_key.get(identifier): return self.api_key[identifier] @@ -215,26 +216,25 @@ def get_basic_auth_token(self): :return: The token for basic HTTP authentication. """ return urllib3.util.make_headers( - basic_auth=self.username + ':' + self.password - ).get('authorization') + basic_auth=self.username + ":" + self.password + ).get("authorization") def auth_settings(self): """Gets Auth Settings dict for api client. :return: The Auth Settings information dict. """ - return { - - } + return {} def to_debug_report(self): """Gets the essential information for debugging. :return: The report for debugging. """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: 0.1.30-upload-catalog-from-url\n"\ - "SDK Package Version: 0.1.0".\ - format(env=sys.platform, pyversion=sys.version) + return ( + "Python SDK Debug Report:\n" + "OS: {env}\n" + "Python Version: {pyversion}\n" + "Version of the API: 0.1.30-upload-catalog-from-url\n" + "SDK Package Version: 0.1.0".format(env=sys.platform, pyversion=sys.version) + ) diff --git a/api/client/swagger_client/models/__init__.py b/api/client/swagger_client/models/__init__.py index a158fbb7..e0d9725e 100644 --- a/api/client/swagger_client/models/__init__.py +++ b/api/client/swagger_client/models/__init__.py @@ -25,15 +25,25 @@ from swagger_client.models.api_catalog_upload_item import ApiCatalogUploadItem from swagger_client.models.api_credential import ApiCredential from swagger_client.models.api_generate_code_response import ApiGenerateCodeResponse -from swagger_client.models.api_generate_model_code_response import ApiGenerateModelCodeResponse +from swagger_client.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) from swagger_client.models.api_get_template_response import ApiGetTemplateResponse from swagger_client.models.api_inferenceservice import ApiInferenceservice -from swagger_client.models.api_list_catalog_items_response import ApiListCatalogItemsResponse -from swagger_client.models.api_list_catalog_upload_errors import ApiListCatalogUploadErrors +from swagger_client.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_client.models.api_list_catalog_upload_errors import ( # noqa: F401 + ApiListCatalogUploadErrors, +) from swagger_client.models.api_list_components_response import ApiListComponentsResponse -from swagger_client.models.api_list_credentials_response import ApiListCredentialsResponse +from swagger_client.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) from swagger_client.models.api_list_datasets_response import ApiListDatasetsResponse -from swagger_client.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse +from swagger_client.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) from swagger_client.models.api_list_models_response import ApiListModelsResponse from swagger_client.models.api_list_notebooks_response import ApiListNotebooksResponse from swagger_client.models.api_list_pipelines_response import ApiListPipelinesResponse @@ -44,7 +54,9 @@ from swagger_client.models.api_parameter import ApiParameter from swagger_client.models.api_pipeline import ApiPipeline from swagger_client.models.api_pipeline_custom import ApiPipelineCustom -from swagger_client.models.api_pipeline_custom_run_payload import ApiPipelineCustomRunPayload +from swagger_client.models.api_pipeline_custom_run_payload import ( # noqa: F401 + ApiPipelineCustomRunPayload, +) from swagger_client.models.api_pipeline_dag import ApiPipelineDAG from swagger_client.models.api_pipeline_extension import ApiPipelineExtension from swagger_client.models.api_pipeline_inputs import ApiPipelineInputs @@ -52,7 +64,7 @@ from swagger_client.models.api_pipeline_task_arguments import ApiPipelineTaskArguments from swagger_client.models.api_run_code_response import ApiRunCodeResponse from swagger_client.models.api_settings import ApiSettings -from swagger_client.models.api_settings_section import ApiSettingsSection +from swagger_client.models.api_settings_section import ApiSettingsSection # noqa: F401 from swagger_client.models.api_status import ApiStatus from swagger_client.models.api_url import ApiUrl from swagger_client.models.dictionary import Dictionary diff --git a/api/client/swagger_client/models/any_value.py b/api/client/swagger_client/models/any_value.py index 6c3b1192..bcdf7f73 100644 --- a/api/client/swagger_client/models/any_value.py +++ b/api/client/swagger_client/models/any_value.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class AnyValue(object): @@ -33,11 +33,9 @@ class AnyValue(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - } + swagger_types = {} - attribute_map = { - } + attribute_map = {} def __init__(self): # noqa: E501 """AnyValue - a model defined in Swagger""" # noqa: E501 @@ -50,18 +48,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(AnyValue, dict): diff --git a/api/client/swagger_client/models/api_access_token.py b/api/client/swagger_client/models/api_access_token.py index 3bea5f88..15528eb7 100644 --- a/api/client/swagger_client/models/api_access_token.py +++ b/api/client/swagger_client/models/api_access_token.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiAccessToken(object): @@ -33,15 +33,9 @@ class ApiAccessToken(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'api_token': 'str', - 'url_host': 'str' - } + swagger_types = {"api_token": "str", "url_host": "str"} - attribute_map = { - 'api_token': 'api_token', - 'url_host': 'url_host' - } + attribute_map = {"api_token": "api_token", "url_host": "url_host"} def __init__(self, api_token=None, url_host=None): # noqa: E501 """ApiAccessToken - a model defined in Swagger""" # noqa: E501 @@ -74,7 +68,9 @@ def api_token(self, api_token): :type: str """ if api_token is None: - raise ValueError("Invalid value for `api_token`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `api_token`, must not be `None`" + ) self._api_token = api_token @@ -99,7 +95,9 @@ def url_host(self, url_host): :type: str """ if url_host is None: - raise ValueError("Invalid value for `url_host`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url_host`, must not be `None`" + ) self._url_host = url_host @@ -110,18 +108,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiAccessToken, dict): diff --git a/api/client/swagger_client/models/api_asset.py b/api/client/swagger_client/models/api_asset.py index 2e2f45a6..bcbe4355 100644 --- a/api/client/swagger_client/models/api_asset.py +++ b/api/client/swagger_client/models/api_asset.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiAsset(object): @@ -34,28 +34,38 @@ class ApiAsset(object): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'featured': 'bool', - 'publish_approved': 'bool', - 'related_assets': 'list[str]', - 'filter_categories': 'dict(str, str)' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "featured": "bool", + "publish_approved": "bool", + "related_assets": "list[str]", + "filter_categories": "dict(str, str)", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", } - def __init__(self, id=None, created_at=None, name=None, description=None, featured=None, publish_approved=None, related_assets=None, filter_categories=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + featured=None, + publish_approved=None, + related_assets=None, + filter_categories=None, + ): # noqa: E501 """ApiAsset - a model defined in Swagger""" # noqa: E501 self._id = None @@ -144,7 +154,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -167,7 +179,9 @@ def description(self, description): :type: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -262,18 +276,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiAsset, dict): diff --git a/api/client/swagger_client/models/api_catalog_upload.py b/api/client/swagger_client/models/api_catalog_upload.py index 8b6a22f5..690e633e 100644 --- a/api/client/swagger_client/models/api_catalog_upload.py +++ b/api/client/swagger_client/models/api_catalog_upload.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiCatalogUpload(object): @@ -34,24 +34,32 @@ class ApiCatalogUpload(object): and the value is json key in definition. """ swagger_types = { - 'api_access_tokens': 'list[ApiAccessToken]', - 'components': 'list[ApiCatalogUploadItem]', - 'datasets': 'list[ApiCatalogUploadItem]', - 'models': 'list[ApiCatalogUploadItem]', - 'notebooks': 'list[ApiCatalogUploadItem]', - 'pipelines': 'list[ApiCatalogUploadItem]' + "api_access_tokens": "list[ApiAccessToken]", + "components": "list[ApiCatalogUploadItem]", + "datasets": "list[ApiCatalogUploadItem]", + "models": "list[ApiCatalogUploadItem]", + "notebooks": "list[ApiCatalogUploadItem]", + "pipelines": "list[ApiCatalogUploadItem]", } attribute_map = { - 'api_access_tokens': 'api_access_tokens', - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines' + "api_access_tokens": "api_access_tokens", + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", } - def __init__(self, api_access_tokens=None, components=None, datasets=None, models=None, notebooks=None, pipelines=None): # noqa: E501 + def __init__( + self, + api_access_tokens=None, + components=None, + datasets=None, + models=None, + notebooks=None, + pipelines=None, + ): # noqa: E501 """ApiCatalogUpload - a model defined in Swagger""" # noqa: E501 self._api_access_tokens = None @@ -210,18 +218,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiCatalogUpload, dict): diff --git a/api/client/swagger_client/models/api_catalog_upload_error.py b/api/client/swagger_client/models/api_catalog_upload_error.py index bd2cb71b..42f4a550 100644 --- a/api/client/swagger_client/models/api_catalog_upload_error.py +++ b/api/client/swagger_client/models/api_catalog_upload_error.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiCatalogUploadError(object): @@ -34,20 +34,22 @@ class ApiCatalogUploadError(object): and the value is json key in definition. """ swagger_types = { - 'name': 'str', - 'url': 'str', - 'error_message': 'str', - 'status_code': 'int' + "name": "str", + "url": "str", + "error_message": "str", + "status_code": "int", } attribute_map = { - 'name': 'name', - 'url': 'url', - 'error_message': 'error_message', - 'status_code': 'status_code' + "name": "name", + "url": "url", + "error_message": "error_message", + "status_code": "status_code", } - def __init__(self, name=None, url=None, error_message=None, status_code=None): # noqa: E501 + def __init__( + self, name=None, url=None, error_message=None, status_code=None + ): # noqa: E501 """ApiCatalogUploadError - a model defined in Swagger""" # noqa: E501 self._name = None @@ -106,7 +108,9 @@ def url(self, url): :type: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url @@ -159,18 +163,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiCatalogUploadError, dict): diff --git a/api/client/swagger_client/models/api_catalog_upload_item.py b/api/client/swagger_client/models/api_catalog_upload_item.py index 96aea05e..e260e055 100644 --- a/api/client/swagger_client/models/api_catalog_upload_item.py +++ b/api/client/swagger_client/models/api_catalog_upload_item.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiCatalogUploadItem(object): @@ -33,15 +33,9 @@ class ApiCatalogUploadItem(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'name': 'str', - 'url': 'str' - } + swagger_types = {"name": "str", "url": "str"} - attribute_map = { - 'name': 'name', - 'url': 'url' - } + attribute_map = {"name": "name", "url": "url"} def __init__(self, name=None, url=None): # noqa: E501 """ApiCatalogUploadItem - a model defined in Swagger""" # noqa: E501 @@ -96,7 +90,9 @@ def url(self, url): :type: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url @@ -107,18 +103,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiCatalogUploadItem, dict): diff --git a/api/client/swagger_client/models/api_catalog_upload_response.py b/api/client/swagger_client/models/api_catalog_upload_response.py index eab6cb44..c5de243c 100644 --- a/api/client/swagger_client/models/api_catalog_upload_response.py +++ b/api/client/swagger_client/models/api_catalog_upload_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiCatalogUploadResponse(object): @@ -34,32 +34,44 @@ class ApiCatalogUploadResponse(object): and the value is json key in definition. """ swagger_types = { - 'components': 'list[ApiComponent]', - 'datasets': 'list[ApiDataset]', - 'models': 'list[ApiModel]', - 'notebooks': 'list[ApiNotebook]', - 'pipelines': 'list[ApiPipeline]', - 'total_size': 'int', - 'next_page_token': 'str', - 'errors': 'list[ApiCatalogUploadError]', - 'total_errors': 'int', - 'total_created': 'int' + "components": "list[ApiComponent]", + "datasets": "list[ApiDataset]", + "models": "list[ApiModel]", + "notebooks": "list[ApiNotebook]", + "pipelines": "list[ApiPipeline]", + "total_size": "int", + "next_page_token": "str", + "errors": "list[ApiCatalogUploadError]", + "total_errors": "int", + "total_created": "int", } attribute_map = { - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token', - 'errors': 'errors', - 'total_errors': 'total_errors', - 'total_created': 'total_created' + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", + "errors": "errors", + "total_errors": "total_errors", + "total_created": "total_created", } - def __init__(self, components=None, datasets=None, models=None, notebooks=None, pipelines=None, total_size=None, next_page_token=None, errors=None, total_errors=None, total_created=None): # noqa: E501 + def __init__( + self, + components=None, + datasets=None, + models=None, + notebooks=None, + pipelines=None, + total_size=None, + next_page_token=None, + errors=None, + total_errors=None, + total_created=None, + ): # noqa: E501 """ApiCatalogUploadResponse - a model defined in Swagger""" # noqa: E501 self._components = None @@ -312,18 +324,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiCatalogUploadResponse, dict): diff --git a/api/client/swagger_client/models/api_component.py b/api/client/swagger_client/models/api_component.py index 57edd56b..99d2e4e8 100644 --- a/api/client/swagger_client/models/api_component.py +++ b/api/client/swagger_client/models/api_component.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_asset import ApiAsset from swagger_client.models.api_metadata import ApiMetadata # noqa: F401,E501 @@ -38,32 +38,44 @@ class ApiComponent(ApiAsset): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'featured': 'bool', - 'publish_approved': 'bool', - 'related_assets': 'list[str]', - 'filter_categories': 'dict(str, str)', - 'metadata': 'ApiMetadata', - 'parameters': 'list[ApiParameter]' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "featured": "bool", + "publish_approved": "bool", + "related_assets": "list[str]", + "filter_categories": "dict(str, str)", + "metadata": "ApiMetadata", + "parameters": "list[ApiParameter]", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'metadata': 'metadata', - 'parameters': 'parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "metadata": "metadata", + "parameters": "parameters", } - def __init__(self, id=None, created_at=None, name=None, description=None, featured=None, publish_approved=None, related_assets=None, filter_categories=None, metadata=None, parameters=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + featured=None, + publish_approved=None, + related_assets=None, + filter_categories=None, + metadata=None, + parameters=None, + ): # noqa: E501 """ApiComponent - a model defined in Swagger""" # noqa: E501 self._id = None @@ -158,7 +170,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -181,7 +195,9 @@ def description(self, description): :type: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -318,18 +334,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiComponent, dict): diff --git a/api/client/swagger_client/models/api_credential.py b/api/client/swagger_client/models/api_credential.py index 6586383f..c68752fd 100644 --- a/api/client/swagger_client/models/api_credential.py +++ b/api/client/swagger_client/models/api_credential.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiCredential(object): @@ -34,24 +34,32 @@ class ApiCredential(object): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'pipeline_id': 'str', - 'project_id': 'str', - 'api_key': 'str', - 'data_assets': 'list[str]' + "id": "str", + "created_at": "datetime", + "pipeline_id": "str", + "project_id": "str", + "api_key": "str", + "data_assets": "list[str]", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'pipeline_id': 'pipeline_id', - 'project_id': 'project_id', - 'api_key': 'api_key', - 'data_assets': 'data_assets' + "id": "id", + "created_at": "created_at", + "pipeline_id": "pipeline_id", + "project_id": "project_id", + "api_key": "api_key", + "data_assets": "data_assets", } - def __init__(self, id=None, created_at=None, pipeline_id=None, project_id=None, api_key=None, data_assets=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + pipeline_id=None, + project_id=None, + api_key=None, + data_assets=None, + ): # noqa: E501 """ApiCredential - a model defined in Swagger""" # noqa: E501 self._id = None @@ -134,7 +142,9 @@ def pipeline_id(self, pipeline_id): :type: str """ if pipeline_id is None: - raise ValueError("Invalid value for `pipeline_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `pipeline_id`, must not be `None`" + ) self._pipeline_id = pipeline_id @@ -157,7 +167,9 @@ def project_id(self, project_id): :type: str """ if project_id is None: - raise ValueError("Invalid value for `project_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `project_id`, must not be `None`" + ) self._project_id = project_id @@ -214,18 +226,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiCredential, dict): diff --git a/api/client/swagger_client/models/api_dataset.py b/api/client/swagger_client/models/api_dataset.py index 71e73383..5861879a 100644 --- a/api/client/swagger_client/models/api_dataset.py +++ b/api/client/swagger_client/models/api_dataset.py @@ -14,9 +14,9 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_asset import ApiAsset @@ -35,40 +35,56 @@ class ApiDataset(ApiAsset): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'featured': 'bool', - 'publish_approved': 'bool', - 'related_assets': 'list[str]', - 'filter_categories': 'dict(str, str)', - 'domain': 'str', - 'format': 'str', - 'size': 'str', - 'number_of_records': 'int', - 'license': 'str', - 'metadata': 'ApiMetadata' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "featured": "bool", + "publish_approved": "bool", + "related_assets": "list[str]", + "filter_categories": "dict(str, str)", + "domain": "str", + "format": "str", + "size": "str", + "number_of_records": "int", + "license": "str", + "metadata": "ApiMetadata", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'domain': 'domain', - 'format': 'format', - 'size': 'size', - 'number_of_records': 'number_of_records', - 'license': 'license', - 'metadata': 'metadata' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "domain": "domain", + "format": "format", + "size": "size", + "number_of_records": "number_of_records", + "license": "license", + "metadata": "metadata", } - def __init__(self, id=None, created_at=None, name=None, description=None, featured=None, publish_approved=None, related_assets=None, filter_categories=None, domain=None, format=None, size=None, number_of_records=None, license=None, metadata=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + featured=None, + publish_approved=None, + related_assets=None, + filter_categories=None, + domain=None, + format=None, + size=None, + number_of_records=None, + license=None, + metadata=None, + ): # noqa: E501 """ApiDataset - a model defined in Swagger""" # noqa: E501 self._id = None @@ -175,7 +191,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -198,7 +216,9 @@ def description(self, description): :type: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -419,18 +439,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiDataset, dict): diff --git a/api/client/swagger_client/models/api_generate_code_response.py b/api/client/swagger_client/models/api_generate_code_response.py index a3728407..75aa82dd 100644 --- a/api/client/swagger_client/models/api_generate_code_response.py +++ b/api/client/swagger_client/models/api_generate_code_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiGenerateCodeResponse(object): @@ -33,13 +33,9 @@ class ApiGenerateCodeResponse(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'script': 'str' - } + swagger_types = {"script": "str"} - attribute_map = { - 'script': 'script' - } + attribute_map = {"script": "script"} def __init__(self, script=None): # noqa: E501 """ApiGenerateCodeResponse - a model defined in Swagger""" # noqa: E501 @@ -80,18 +76,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiGenerateCodeResponse, dict): diff --git a/api/client/swagger_client/models/api_generate_model_code_response.py b/api/client/swagger_client/models/api_generate_model_code_response.py index 787571ab..7e6a76d7 100644 --- a/api/client/swagger_client/models/api_generate_model_code_response.py +++ b/api/client/swagger_client/models/api_generate_model_code_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_model_script import ApiModelScript # noqa: F401,E501 @@ -35,13 +35,9 @@ class ApiGenerateModelCodeResponse(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'scripts': 'list[ApiModelScript]' - } + swagger_types = {"scripts": "list[ApiModelScript]"} - attribute_map = { - 'scripts': 'scripts' - } + attribute_map = {"scripts": "scripts"} def __init__(self, scripts=None): # noqa: E501 """ApiGenerateModelCodeResponse - a model defined in Swagger""" # noqa: E501 @@ -82,18 +78,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiGenerateModelCodeResponse, dict): diff --git a/api/client/swagger_client/models/api_get_template_response.py b/api/client/swagger_client/models/api_get_template_response.py index b0418a9d..5885ba83 100644 --- a/api/client/swagger_client/models/api_get_template_response.py +++ b/api/client/swagger_client/models/api_get_template_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiGetTemplateResponse(object): @@ -33,15 +33,9 @@ class ApiGetTemplateResponse(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'template': 'str', - 'url': 'str' - } + swagger_types = {"template": "str", "url": "str"} - attribute_map = { - 'template': 'template', - 'url': 'url' - } + attribute_map = {"template": "template", "url": "url"} def __init__(self, template=None, url=None): # noqa: E501 """ApiGetTemplateResponse - a model defined in Swagger""" # noqa: E501 @@ -82,7 +76,7 @@ def template(self, template): def url(self): """Gets the url of this ApiGetTemplateResponse. # noqa: E501 - The URL to download the template text from S3 storage (Minio) # noqa: E501 + The URL to download the template text from S3 storage (Minio) :return: The url of this ApiGetTemplateResponse. # noqa: E501 :rtype: str @@ -93,7 +87,7 @@ def url(self): def url(self, url): """Sets the url of this ApiGetTemplateResponse. - The URL to download the template text from S3 storage (Minio) # noqa: E501 + The URL to download the template text from S3 storage (Minio) :param url: The url of this ApiGetTemplateResponse. # noqa: E501 :type: str @@ -108,18 +102,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiGetTemplateResponse, dict): diff --git a/api/client/swagger_client/models/api_inferenceservice.py b/api/client/swagger_client/models/api_inferenceservice.py index f1c25172..17bf41ff 100644 --- a/api/client/swagger_client/models/api_inferenceservice.py +++ b/api/client/swagger_client/models/api_inferenceservice.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.any_value import AnyValue # noqa: F401,E501 @@ -36,20 +36,22 @@ class ApiInferenceservice(object): and the value is json key in definition. """ swagger_types = { - 'api_version': 'str', - 'kind': 'str', - 'metadata': 'AnyValue', - 'spec': 'AnyValue' + "api_version": "str", + "kind": "str", + "metadata": "AnyValue", + "spec": "AnyValue", } attribute_map = { - 'api_version': 'apiVersion', - 'kind': 'kind', - 'metadata': 'metadata', - 'spec': 'spec' + "api_version": "apiVersion", + "kind": "kind", + "metadata": "metadata", + "spec": "spec", } - def __init__(self, api_version=None, kind=None, metadata=None, spec=None): # noqa: E501 + def __init__( + self, api_version=None, kind=None, metadata=None, spec=None + ): # noqa: E501 """ApiInferenceservice - a model defined in Swagger""" # noqa: E501 self._api_version = None @@ -84,7 +86,9 @@ def api_version(self, api_version): :type: str """ if api_version is None: - raise ValueError("Invalid value for `api_version`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `api_version`, must not be `None`" + ) self._api_version = api_version @@ -107,7 +111,9 @@ def kind(self, kind): :type: str """ if kind is None: - raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `kind`, must not be `None`" + ) self._kind = kind @@ -160,18 +166,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiInferenceservice, dict): diff --git a/api/client/swagger_client/models/api_list_catalog_items_response.py b/api/client/swagger_client/models/api_list_catalog_items_response.py index 8296f422..173993c5 100644 --- a/api/client/swagger_client/models/api_list_catalog_items_response.py +++ b/api/client/swagger_client/models/api_list_catalog_items_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiListCatalogItemsResponse(object): @@ -34,26 +34,35 @@ class ApiListCatalogItemsResponse(object): and the value is json key in definition. """ swagger_types = { - 'components': 'list[ApiComponent]', - 'datasets': 'list[ApiDataset]', - 'models': 'list[ApiModel]', - 'notebooks': 'list[ApiNotebook]', - 'pipelines': 'list[ApiPipeline]', - 'total_size': 'int', - 'next_page_token': 'str' + "components": "list[ApiComponent]", + "datasets": "list[ApiDataset]", + "models": "list[ApiModel]", + "notebooks": "list[ApiNotebook]", + "pipelines": "list[ApiPipeline]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, components=None, datasets=None, models=None, notebooks=None, pipelines=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, + components=None, + datasets=None, + models=None, + notebooks=None, + pipelines=None, + total_size=None, + next_page_token=None, + ): # noqa: E501 """ApiListCatalogItemsResponse - a model defined in Swagger""" # noqa: E501 self._components = None @@ -234,18 +243,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListCatalogItemsResponse, dict): diff --git a/api/client/swagger_client/models/api_list_catalog_upload_errors.py b/api/client/swagger_client/models/api_list_catalog_upload_errors.py index f434defe..c9d78838 100644 --- a/api/client/swagger_client/models/api_list_catalog_upload_errors.py +++ b/api/client/swagger_client/models/api_list_catalog_upload_errors.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiListCatalogUploadErrors(object): @@ -33,15 +33,9 @@ class ApiListCatalogUploadErrors(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'errors': 'list[ApiCatalogUploadError]', - 'total_errors': 'int' - } + swagger_types = {"errors": "list[ApiCatalogUploadError]", "total_errors": "int"} - attribute_map = { - 'errors': 'errors', - 'total_errors': 'total_errors' - } + attribute_map = {"errors": "errors", "total_errors": "total_errors"} def __init__(self, errors=None, total_errors=None): # noqa: E501 """ApiListCatalogUploadErrors - a model defined in Swagger""" # noqa: E501 @@ -104,18 +98,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListCatalogUploadErrors, dict): diff --git a/api/client/swagger_client/models/api_list_components_response.py b/api/client/swagger_client/models/api_list_components_response.py index c1d0085d..e044f259 100644 --- a/api/client/swagger_client/models/api_list_components_response.py +++ b/api/client/swagger_client/models/api_list_components_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_component import ApiComponent # noqa: F401,E501 @@ -36,18 +36,20 @@ class ApiListComponentsResponse(object): and the value is json key in definition. """ swagger_types = { - 'components': 'list[ApiComponent]', - 'total_size': 'int', - 'next_page_token': 'str' + "components": "list[ApiComponent]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'components': 'components', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "components": "components", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, components=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, components=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListComponentsResponse - a model defined in Swagger""" # noqa: E501 self._components = None @@ -132,18 +134,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListComponentsResponse, dict): diff --git a/api/client/swagger_client/models/api_list_credentials_response.py b/api/client/swagger_client/models/api_list_credentials_response.py index edca354f..79da88aa 100644 --- a/api/client/swagger_client/models/api_list_credentials_response.py +++ b/api/client/swagger_client/models/api_list_credentials_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_credential import ApiCredential # noqa: F401,E501 @@ -36,18 +36,20 @@ class ApiListCredentialsResponse(object): and the value is json key in definition. """ swagger_types = { - 'credentials': 'list[ApiCredential]', - 'total_size': 'int', - 'next_page_token': 'str' + "credentials": "list[ApiCredential]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'credentials': 'credentials', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "credentials": "credentials", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, credentials=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, credentials=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListCredentialsResponse - a model defined in Swagger""" # noqa: E501 self._credentials = None @@ -132,18 +134,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListCredentialsResponse, dict): diff --git a/api/client/swagger_client/models/api_list_datasets_response.py b/api/client/swagger_client/models/api_list_datasets_response.py index 7f94a35b..4d7596fe 100644 --- a/api/client/swagger_client/models/api_list_datasets_response.py +++ b/api/client/swagger_client/models/api_list_datasets_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiListDatasetsResponse(object): @@ -34,18 +34,20 @@ class ApiListDatasetsResponse(object): and the value is json key in definition. """ swagger_types = { - 'datasets': 'list[ApiDataset]', - 'total_size': 'int', - 'next_page_token': 'str' + "datasets": "list[ApiDataset]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'datasets': 'datasets', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "datasets": "datasets", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, datasets=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, datasets=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListDatasetsResponse - a model defined in Swagger""" # noqa: E501 self._datasets = None @@ -130,18 +132,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListDatasetsResponse, dict): diff --git a/api/client/swagger_client/models/api_list_inferenceservices_response.py b/api/client/swagger_client/models/api_list_inferenceservices_response.py index 9d70b760..aa5dcdd3 100644 --- a/api/client/swagger_client/models/api_list_inferenceservices_response.py +++ b/api/client/swagger_client/models/api_list_inferenceservices_response.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_inferenceservice import ApiInferenceservice # noqa: F401,E501 +from swagger_client.models.api_inferenceservice import ( # noqa: F401 + ApiInferenceservice, +) class ApiListInferenceservicesResponse(object): @@ -36,18 +38,20 @@ class ApiListInferenceservicesResponse(object): and the value is json key in definition. """ swagger_types = { - 'inferenceservices': 'list[ApiInferenceservice]', - 'total_size': 'int', - 'next_page_token': 'str' + "inferenceservices": "list[ApiInferenceservice]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'inferenceservices': 'Inferenceservices', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "inferenceservices": "Inferenceservices", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, inferenceservices=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, inferenceservices=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListInferenceservicesResponse - a model defined in Swagger""" # noqa: E501 self._inferenceservices = None @@ -132,18 +136,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListInferenceservicesResponse, dict): diff --git a/api/client/swagger_client/models/api_list_models_response.py b/api/client/swagger_client/models/api_list_models_response.py index c61cf0aa..373f805e 100644 --- a/api/client/swagger_client/models/api_list_models_response.py +++ b/api/client/swagger_client/models/api_list_models_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_model import ApiModel # noqa: F401,E501 @@ -36,18 +36,20 @@ class ApiListModelsResponse(object): and the value is json key in definition. """ swagger_types = { - 'models': 'list[ApiModel]', - 'total_size': 'int', - 'next_page_token': 'str' + "models": "list[ApiModel]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'models': 'models', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "models": "models", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, models=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, models=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListModelsResponse - a model defined in Swagger""" # noqa: E501 self._models = None @@ -132,18 +134,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListModelsResponse, dict): diff --git a/api/client/swagger_client/models/api_list_notebooks_response.py b/api/client/swagger_client/models/api_list_notebooks_response.py index 3470ed7f..4e86569b 100644 --- a/api/client/swagger_client/models/api_list_notebooks_response.py +++ b/api/client/swagger_client/models/api_list_notebooks_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_notebook import ApiNotebook # noqa: F401,E501 @@ -36,18 +36,20 @@ class ApiListNotebooksResponse(object): and the value is json key in definition. """ swagger_types = { - 'notebooks': 'list[ApiNotebook]', - 'total_size': 'int', - 'next_page_token': 'str' + "notebooks": "list[ApiNotebook]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'notebooks': 'notebooks', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "notebooks": "notebooks", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, notebooks=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, notebooks=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListNotebooksResponse - a model defined in Swagger""" # noqa: E501 self._notebooks = None @@ -132,18 +134,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListNotebooksResponse, dict): diff --git a/api/client/swagger_client/models/api_list_pipelines_response.py b/api/client/swagger_client/models/api_list_pipelines_response.py index eaa36945..40daa439 100644 --- a/api/client/swagger_client/models/api_list_pipelines_response.py +++ b/api/client/swagger_client/models/api_list_pipelines_response.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_pipeline_extended import ApiPipelineExtended # noqa: F401,E501 +from swagger_client.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) class ApiListPipelinesResponse(object): @@ -36,18 +38,20 @@ class ApiListPipelinesResponse(object): and the value is json key in definition. """ swagger_types = { - 'pipelines': 'list[ApiPipelineExtended]', - 'total_size': 'int', - 'next_page_token': 'str' + "pipelines": "list[ApiPipelineExtended]", + "total_size": "int", + "next_page_token": "str", } attribute_map = { - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", } - def __init__(self, pipelines=None, total_size=None, next_page_token=None): # noqa: E501 + def __init__( + self, pipelines=None, total_size=None, next_page_token=None + ): # noqa: E501 """ApiListPipelinesResponse - a model defined in Swagger""" # noqa: E501 self._pipelines = None @@ -132,18 +136,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiListPipelinesResponse, dict): diff --git a/api/client/swagger_client/models/api_metadata.py b/api/client/swagger_client/models/api_metadata.py index e405fba4..02bec979 100644 --- a/api/client/swagger_client/models/api_metadata.py +++ b/api/client/swagger_client/models/api_metadata.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiMetadata(object): @@ -34,16 +34,12 @@ class ApiMetadata(object): and the value is json key in definition. """ swagger_types = { - 'annotations': 'dict(str, str)', - 'labels': 'dict(str, str)', - 'tags': 'list[str]' + "annotations": "dict(str, str)", + "labels": "dict(str, str)", + "tags": "list[str]", } - attribute_map = { - 'annotations': 'annotations', - 'labels': 'labels', - 'tags': 'tags' - } + attribute_map = {"annotations": "annotations", "labels": "labels", "tags": "tags"} def __init__(self, annotations=None, labels=None, tags=None): # noqa: E501 """ApiMetadata - a model defined in Swagger""" # noqa: E501 @@ -130,18 +126,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiMetadata, dict): diff --git a/api/client/swagger_client/models/api_model.py b/api/client/swagger_client/models/api_model.py index 2d53f266..f14bcc1a 100644 --- a/api/client/swagger_client/models/api_model.py +++ b/api/client/swagger_client/models/api_model.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_asset import ApiAsset -from swagger_client.models.api_model_framework import ApiModelFramework # noqa: F401,E501 +from swagger_client.models.api_model_framework import ( # noqa: F401 + ApiModelFramework, +) from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 @@ -37,50 +39,71 @@ class ApiModel(ApiAsset): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'featured': 'bool', - 'publish_approved': 'bool', - 'related_assets': 'list[str]', - 'filter_categories': 'dict(str, str)', - 'domain': 'str', - 'labels': 'dict(str, str)', - 'framework': 'ApiModelFramework', - 'trainable': 'bool', - 'trainable_tested_platforms': 'list[str]', - 'trainable_credentials_required': 'bool', - 'trainable_parameters': 'list[ApiParameter]', - 'servable': 'bool', - 'servable_tested_platforms': 'list[str]', - 'servable_credentials_required': 'bool', - 'servable_parameters': 'list[ApiParameter]' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "featured": "bool", + "publish_approved": "bool", + "related_assets": "list[str]", + "filter_categories": "dict(str, str)", + "domain": "str", + "labels": "dict(str, str)", + "framework": "ApiModelFramework", + "trainable": "bool", + "trainable_tested_platforms": "list[str]", + "trainable_credentials_required": "bool", + "trainable_parameters": "list[ApiParameter]", + "servable": "bool", + "servable_tested_platforms": "list[str]", + "servable_credentials_required": "bool", + "servable_parameters": "list[ApiParameter]", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'domain': 'domain', - 'labels': 'labels', - 'framework': 'framework', - 'trainable': 'trainable', - 'trainable_tested_platforms': 'trainable_tested_platforms', - 'trainable_credentials_required': 'trainable_credentials_required', - 'trainable_parameters': 'trainable_parameters', - 'servable': 'servable', - 'servable_tested_platforms': 'servable_tested_platforms', - 'servable_credentials_required': 'servable_credentials_required', - 'servable_parameters': 'servable_parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "domain": "domain", + "labels": "labels", + "framework": "framework", + "trainable": "trainable", + "trainable_tested_platforms": "trainable_tested_platforms", + "trainable_credentials_required": "trainable_credentials_required", + "trainable_parameters": "trainable_parameters", + "servable": "servable", + "servable_tested_platforms": "servable_tested_platforms", + "servable_credentials_required": "servable_credentials_required", + "servable_parameters": "servable_parameters", } - def __init__(self, id=None, created_at=None, name=None, description=None, featured=None, publish_approved=None, related_assets=None, filter_categories=None, domain=None, labels=None, framework=None, trainable=None, trainable_tested_platforms=None, trainable_credentials_required=None, trainable_parameters=None, servable=None, servable_tested_platforms=None, servable_credentials_required=None, servable_parameters=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + featured=None, + publish_approved=None, + related_assets=None, + filter_categories=None, + domain=None, + labels=None, + framework=None, + trainable=None, + trainable_tested_platforms=None, + trainable_credentials_required=None, + trainable_parameters=None, + servable=None, + servable_tested_platforms=None, + servable_credentials_required=None, + servable_parameters=None, + ): # noqa: E501 """ApiModel - a model defined in Swagger""" # noqa: E501 self._id = None @@ -201,7 +224,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -224,7 +249,9 @@ def description(self, description): :type: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -373,7 +400,9 @@ def framework(self, framework): :type: ApiModelFramework """ if framework is None: - raise ValueError("Invalid value for `framework`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `framework`, must not be `None`" + ) self._framework = framework @@ -552,18 +581,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiModel, dict): diff --git a/api/client/swagger_client/models/api_model_framework.py b/api/client/swagger_client/models/api_model_framework.py index 87eb99a9..d418dcb6 100644 --- a/api/client/swagger_client/models/api_model_framework.py +++ b/api/client/swagger_client/models/api_model_framework.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_model_framework_runtimes import ApiModelFrameworkRuntimes # noqa: F401,E501 +from swagger_client.models.api_model_framework_runtimes import ( # noqa: F401 + ApiModelFrameworkRuntimes, +) class ApiModelFramework(object): @@ -36,16 +38,12 @@ class ApiModelFramework(object): and the value is json key in definition. """ swagger_types = { - 'name': 'str', - 'version': 'str', - 'runtimes': 'ApiModelFrameworkRuntimes' + "name": "str", + "version": "str", + "runtimes": "ApiModelFrameworkRuntimes", } - attribute_map = { - 'name': 'name', - 'version': 'version', - 'runtimes': 'runtimes' - } + attribute_map = {"name": "name", "version": "version", "runtimes": "runtimes"} def __init__(self, name=None, version=None, runtimes=None): # noqa: E501 """ApiModelFramework - a model defined in Swagger""" # noqa: E501 @@ -80,7 +78,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -133,18 +133,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiModelFramework, dict): diff --git a/api/client/swagger_client/models/api_model_framework_runtimes.py b/api/client/swagger_client/models/api_model_framework_runtimes.py index db1fa331..4dde8579 100644 --- a/api/client/swagger_client/models/api_model_framework_runtimes.py +++ b/api/client/swagger_client/models/api_model_framework_runtimes.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiModelFrameworkRuntimes(object): @@ -33,15 +33,9 @@ class ApiModelFrameworkRuntimes(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'name': 'str', - 'version': 'str' - } + swagger_types = {"name": "str", "version": "str"} - attribute_map = { - 'name': 'name', - 'version': 'version' - } + attribute_map = {"name": "name", "version": "version"} def __init__(self, name=None, version=None): # noqa: E501 """ApiModelFrameworkRuntimes - a model defined in Swagger""" # noqa: E501 @@ -104,18 +98,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiModelFrameworkRuntimes, dict): diff --git a/api/client/swagger_client/models/api_model_script.py b/api/client/swagger_client/models/api_model_script.py index d57941c9..d7ae8201 100644 --- a/api/client/swagger_client/models/api_model_script.py +++ b/api/client/swagger_client/models/api_model_script.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiModelScript(object): @@ -34,18 +34,20 @@ class ApiModelScript(object): and the value is json key in definition. """ swagger_types = { - 'pipeline_stage': 'str', - 'execution_platform': 'str', - 'script_code': 'str' + "pipeline_stage": "str", + "execution_platform": "str", + "script_code": "str", } attribute_map = { - 'pipeline_stage': 'pipeline_stage', - 'execution_platform': 'execution_platform', - 'script_code': 'script_code' + "pipeline_stage": "pipeline_stage", + "execution_platform": "execution_platform", + "script_code": "script_code", } - def __init__(self, pipeline_stage=None, execution_platform=None, script_code=None): # noqa: E501 + def __init__( + self, pipeline_stage=None, execution_platform=None, script_code=None + ): # noqa: E501 """ApiModelScript - a model defined in Swagger""" # noqa: E501 self._pipeline_stage = None @@ -78,7 +80,9 @@ def pipeline_stage(self, pipeline_stage): :type: str """ if pipeline_stage is None: - raise ValueError("Invalid value for `pipeline_stage`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `pipeline_stage`, must not be `None`" + ) self._pipeline_stage = pipeline_stage @@ -103,7 +107,9 @@ def execution_platform(self, execution_platform): :type: str """ if execution_platform is None: - raise ValueError("Invalid value for `execution_platform`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `execution_platform`, must not be `None`" + ) self._execution_platform = execution_platform @@ -128,7 +134,9 @@ def script_code(self, script_code): :type: str """ if script_code is None: - raise ValueError("Invalid value for `script_code`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `script_code`, must not be `None`" + ) self._script_code = script_code @@ -139,18 +147,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiModelScript, dict): diff --git a/api/client/swagger_client/models/api_notebook.py b/api/client/swagger_client/models/api_notebook.py index 8c608ade..3b60e821 100644 --- a/api/client/swagger_client/models/api_notebook.py +++ b/api/client/swagger_client/models/api_notebook.py @@ -14,9 +14,9 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_asset import ApiAsset from swagger_client.models.api_metadata import ApiMetadata # noqa: F401,E501 @@ -37,34 +37,47 @@ class ApiNotebook(ApiAsset): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'featured': 'bool', - 'publish_approved': 'bool', - 'related_assets': 'list[str]', - 'filter_categories': 'dict(str, str)', - 'url': 'str', - 'metadata': 'ApiMetadata', - 'parameters': 'list[ApiParameter]' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "featured": "bool", + "publish_approved": "bool", + "related_assets": "list[str]", + "filter_categories": "dict(str, str)", + "url": "str", + "metadata": "ApiMetadata", + "parameters": "list[ApiParameter]", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'url': 'url', - 'metadata': 'metadata', - 'parameters': 'parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "url": "url", + "metadata": "metadata", + "parameters": "parameters", } - def __init__(self, id=None, created_at=None, name=None, description=None, featured=None, publish_approved=None, related_assets=None, filter_categories=None, url=None, metadata=None, parameters=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + featured=None, + publish_approved=None, + related_assets=None, + filter_categories=None, + url=None, + metadata=None, + parameters=None, + ): # noqa: E501 """ApiNotebook - a model defined in Swagger""" # noqa: E501 self._id = None @@ -161,7 +174,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -184,7 +199,9 @@ def description(self, description): :type: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -293,7 +310,9 @@ def url(self, url): :type: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url @@ -346,18 +365,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiNotebook, dict): diff --git a/api/client/swagger_client/models/api_parameter.py b/api/client/swagger_client/models/api_parameter.py index b1c2b206..8194043b 100644 --- a/api/client/swagger_client/models/api_parameter.py +++ b/api/client/swagger_client/models/api_parameter.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.any_value import AnyValue # noqa: F401,E501 @@ -36,20 +36,22 @@ class ApiParameter(object): and the value is json key in definition. """ swagger_types = { - 'name': 'str', - 'description': 'str', - 'default': 'AnyValue', - 'value': 'AnyValue' + "name": "str", + "description": "str", + "default": "AnyValue", + "value": "AnyValue", } attribute_map = { - 'name': 'name', - 'description': 'description', - 'default': 'default', - 'value': 'value' + "name": "name", + "description": "description", + "default": "default", + "value": "value", } - def __init__(self, name=None, description=None, default=None, value=None): # noqa: E501 + def __init__( + self, name=None, description=None, default=None, value=None + ): # noqa: E501 """ApiParameter - a model defined in Swagger""" # noqa: E501 self._name = None @@ -85,7 +87,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -159,18 +163,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiParameter, dict): diff --git a/api/client/swagger_client/models/api_pipeline.py b/api/client/swagger_client/models/api_pipeline.py index 8546a04b..947110b5 100644 --- a/api/client/swagger_client/models/api_pipeline.py +++ b/api/client/swagger_client/models/api_pipeline.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 @@ -36,28 +36,38 @@ class ApiPipeline(object): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'parameters': 'list[ApiParameter]', - 'status': 'str', - 'default_version_id': 'str', - 'namespace': 'str' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "parameters": "list[ApiParameter]", + "status": "str", + "default_version_id": "str", + "namespace": "str", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'parameters': 'parameters', - 'status': 'status', - 'default_version_id': 'default_version_id', - 'namespace': 'namespace' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "parameters": "parameters", + "status": "status", + "default_version_id": "default_version_id", + "namespace": "namespace", } - def __init__(self, id=None, created_at=None, name=None, description=None, parameters=None, status=None, default_version_id=None, namespace=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + parameters=None, + status=None, + default_version_id=None, + namespace=None, + ): # noqa: E501 """ApiPipeline - a model defined in Swagger""" # noqa: E501 self._id = None @@ -219,7 +229,7 @@ def status(self, status): def default_version_id(self): """Gets the default_version_id of this ApiPipeline. # noqa: E501 - The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) :return: The default_version_id of this ApiPipeline. # noqa: E501 :rtype: str @@ -230,7 +240,7 @@ def default_version_id(self): def default_version_id(self, default_version_id): """Sets the default_version_id of this ApiPipeline. - The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) :param default_version_id: The default_version_id of this ApiPipeline. # noqa: E501 :type: str @@ -266,18 +276,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipeline, dict): diff --git a/api/client/swagger_client/models/api_pipeline_custom.py b/api/client/swagger_client/models/api_pipeline_custom.py index 2a28c336..6a2df615 100644 --- a/api/client/swagger_client/models/api_pipeline_custom.py +++ b/api/client/swagger_client/models/api_pipeline_custom.py @@ -14,13 +14,15 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_pipeline_dag import ApiPipelineDAG # noqa: F401,E501 -from swagger_client.models.api_pipeline_inputs import ApiPipelineInputs # noqa: F401,E501 +from swagger_client.models.api_pipeline_inputs import ( # noqa: F401 + ApiPipelineInputs, +) class ApiPipelineCustom(object): @@ -37,20 +39,22 @@ class ApiPipelineCustom(object): and the value is json key in definition. """ swagger_types = { - 'dag': 'ApiPipelineDAG', - 'inputs': 'ApiPipelineInputs', - 'name': 'str', - 'description': 'str' + "dag": "ApiPipelineDAG", + "inputs": "ApiPipelineInputs", + "name": "str", + "description": "str", } attribute_map = { - 'dag': 'dag', - 'inputs': 'inputs', - 'name': 'name', - 'description': 'description' + "dag": "dag", + "inputs": "inputs", + "name": "name", + "description": "description", } - def __init__(self, dag=None, inputs=None, name=None, description=None): # noqa: E501 + def __init__( + self, dag=None, inputs=None, name=None, description=None + ): # noqa: E501 """ApiPipelineCustom - a model defined in Swagger""" # noqa: E501 self._dag = None @@ -85,7 +89,9 @@ def dag(self, dag): :type: ApiPipelineDAG """ if dag is None: - raise ValueError("Invalid value for `dag`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `dag`, must not be `None`" + ) self._dag = dag @@ -131,7 +137,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -165,18 +173,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineCustom, dict): diff --git a/api/client/swagger_client/models/api_pipeline_custom_run_payload.py b/api/client/swagger_client/models/api_pipeline_custom_run_payload.py index 14b49cb0..50422297 100644 --- a/api/client/swagger_client/models/api_pipeline_custom_run_payload.py +++ b/api/client/swagger_client/models/api_pipeline_custom_run_payload.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_pipeline_custom import ApiPipelineCustom # noqa: F401,E501 +from swagger_client.models.api_pipeline_custom import ( # noqa: F401 + ApiPipelineCustom, +) from swagger_client.models.dictionary import Dictionary # noqa: F401,E501 @@ -37,13 +39,13 @@ class ApiPipelineCustomRunPayload(object): and the value is json key in definition. """ swagger_types = { - 'custom_pipeline': 'ApiPipelineCustom', - 'run_parameters': 'Dictionary' + "custom_pipeline": "ApiPipelineCustom", + "run_parameters": "Dictionary", } attribute_map = { - 'custom_pipeline': 'custom_pipeline', - 'run_parameters': 'run_parameters' + "custom_pipeline": "custom_pipeline", + "run_parameters": "run_parameters", } def __init__(self, custom_pipeline=None, run_parameters=None): # noqa: E501 @@ -107,18 +109,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineCustomRunPayload, dict): diff --git a/api/client/swagger_client/models/api_pipeline_dag.py b/api/client/swagger_client/models/api_pipeline_dag.py index 44ecde1b..d1a39ead 100644 --- a/api/client/swagger_client/models/api_pipeline_dag.py +++ b/api/client/swagger_client/models/api_pipeline_dag.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_pipeline_task import ApiPipelineTask # noqa: F401,E501 @@ -35,13 +35,9 @@ class ApiPipelineDAG(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'tasks': 'list[ApiPipelineTask]' - } + swagger_types = {"tasks": "list[ApiPipelineTask]"} - attribute_map = { - 'tasks': 'tasks' - } + attribute_map = {"tasks": "tasks"} def __init__(self, tasks=None): # noqa: E501 """ApiPipelineDAG - a model defined in Swagger""" # noqa: E501 @@ -82,18 +78,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineDAG, dict): diff --git a/api/client/swagger_client/models/api_pipeline_extended.py b/api/client/swagger_client/models/api_pipeline_extended.py index b2d4f8dc..a67d452c 100644 --- a/api/client/swagger_client/models/api_pipeline_extended.py +++ b/api/client/swagger_client/models/api_pipeline_extended.py @@ -13,15 +13,16 @@ Generated by: https://github.com/swagger-api/swagger-codegen.git """ - -import pprint +import pprint # noqa: F401 # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 from swagger_client.models.api_pipeline import ApiPipeline # noqa: F401,E501 -from swagger_client.models.api_pipeline_extension import ApiPipelineExtension # noqa: F401,E501 +from swagger_client.models.api_pipeline_extension import ( # noqa: F401 + ApiPipelineExtension, +) class ApiPipelineExtended(ApiPipeline, ApiPipelineExtension): @@ -38,34 +39,47 @@ class ApiPipelineExtended(ApiPipeline, ApiPipelineExtension): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'created_at': 'datetime', - 'name': 'str', - 'description': 'str', - 'parameters': 'list[ApiParameter]', - 'status': 'str', - 'default_version_id': 'str', - 'namespace': 'str', - 'annotations': 'dict(str, str)', - 'featured': 'bool', - 'publish_approved': 'bool' + "id": "str", + "created_at": "datetime", + "name": "str", + "description": "str", + "parameters": "list[ApiParameter]", + "status": "str", + "default_version_id": "str", + "namespace": "str", + "annotations": "dict(str, str)", + "featured": "bool", + "publish_approved": "bool", } attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'parameters': 'parameters', - 'status': 'status', - 'default_version_id': 'default_version_id', - 'namespace': 'namespace', - 'annotations': 'annotations', - 'featured': 'featured', - 'publish_approved': 'publish_approved' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "parameters": "parameters", + "status": "status", + "default_version_id": "default_version_id", + "namespace": "namespace", + "annotations": "annotations", + "featured": "featured", + "publish_approved": "publish_approved", } - def __init__(self, id=None, created_at=None, name=None, description=None, parameters=None, status=None, default_version_id=None, namespace=None, annotations=None, featured=None, publish_approved=None): # noqa: E501 + def __init__( + self, + id=None, + created_at=None, + name=None, + description=None, + parameters=None, + status=None, + default_version_id=None, + namespace=None, + annotations=None, + featured=None, + publish_approved=None, + ): # noqa: E501 """ApiPipelineExtended - a model defined in Swagger""" # noqa: E501 self._id = None diff --git a/api/client/swagger_client/models/api_pipeline_extension.py b/api/client/swagger_client/models/api_pipeline_extension.py index 5a178fa2..2c863507 100644 --- a/api/client/swagger_client/models/api_pipeline_extension.py +++ b/api/client/swagger_client/models/api_pipeline_extension.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiPipelineExtension(object): @@ -34,20 +34,22 @@ class ApiPipelineExtension(object): and the value is json key in definition. """ swagger_types = { - 'id': 'str', - 'annotations': 'dict(str, str)', - 'featured': 'bool', - 'publish_approved': 'bool' + "id": "str", + "annotations": "dict(str, str)", + "featured": "bool", + "publish_approved": "bool", } attribute_map = { - 'id': 'id', - 'annotations': 'annotations', - 'featured': 'featured', - 'publish_approved': 'publish_approved' + "id": "id", + "annotations": "annotations", + "featured": "featured", + "publish_approved": "publish_approved", } - def __init__(self, id=None, annotations=None, featured=None, publish_approved=None): # noqa: E501 + def __init__( + self, id=None, annotations=None, featured=None, publish_approved=None + ): # noqa: E501 """ApiPipelineExtension - a model defined in Swagger""" # noqa: E501 self._id = None @@ -156,18 +158,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineExtension, dict): diff --git a/api/client/swagger_client/models/api_pipeline_inputs.py b/api/client/swagger_client/models/api_pipeline_inputs.py index 84349a36..15275a24 100644 --- a/api/client/swagger_client/models/api_pipeline_inputs.py +++ b/api/client/swagger_client/models/api_pipeline_inputs.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 @@ -35,13 +35,9 @@ class ApiPipelineInputs(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'parameters': 'list[ApiParameter]' - } + swagger_types = {"parameters": "list[ApiParameter]"} - attribute_map = { - 'parameters': 'parameters' - } + attribute_map = {"parameters": "parameters"} def __init__(self, parameters=None): # noqa: E501 """ApiPipelineInputs - a model defined in Swagger""" # noqa: E501 @@ -82,18 +78,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineInputs, dict): diff --git a/api/client/swagger_client/models/api_pipeline_task.py b/api/client/swagger_client/models/api_pipeline_task.py index 405973ce..f61c14ef 100644 --- a/api/client/swagger_client/models/api_pipeline_task.py +++ b/api/client/swagger_client/models/api_pipeline_task.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_pipeline_task_arguments import ApiPipelineTaskArguments # noqa: F401,E501 +from swagger_client.models.api_pipeline_task_arguments import ( # noqa: F401 + ApiPipelineTaskArguments, +) class ApiPipelineTask(object): @@ -36,22 +38,29 @@ class ApiPipelineTask(object): and the value is json key in definition. """ swagger_types = { - 'name': 'str', - 'artifact_type': 'str', - 'artifact_id': 'str', - 'arguments': 'ApiPipelineTaskArguments', - 'dependencies': 'list[str]' + "name": "str", + "artifact_type": "str", + "artifact_id": "str", + "arguments": "ApiPipelineTaskArguments", + "dependencies": "list[str]", } attribute_map = { - 'name': 'name', - 'artifact_type': 'artifact_type', - 'artifact_id': 'artifact_id', - 'arguments': 'arguments', - 'dependencies': 'dependencies' + "name": "name", + "artifact_type": "artifact_type", + "artifact_id": "artifact_id", + "arguments": "arguments", + "dependencies": "dependencies", } - def __init__(self, name=None, artifact_type=None, artifact_id=None, arguments=None, dependencies=None): # noqa: E501 + def __init__( + self, + name=None, + artifact_type=None, + artifact_id=None, + arguments=None, + dependencies=None, + ): # noqa: E501 """ApiPipelineTask - a model defined in Swagger""" # noqa: E501 self._name = None @@ -112,7 +121,9 @@ def artifact_type(self, artifact_type): :type: str """ if artifact_type is None: - raise ValueError("Invalid value for `artifact_type`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `artifact_type`, must not be `None`" + ) self._artifact_type = artifact_type @@ -137,7 +148,9 @@ def artifact_id(self, artifact_id): :type: str """ if artifact_id is None: - raise ValueError("Invalid value for `artifact_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `artifact_id`, must not be `None`" + ) self._artifact_id = artifact_id @@ -192,18 +205,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineTask, dict): diff --git a/api/client/swagger_client/models/api_pipeline_task_arguments.py b/api/client/swagger_client/models/api_pipeline_task_arguments.py index 13bd3be0..3455505c 100644 --- a/api/client/swagger_client/models/api_pipeline_task_arguments.py +++ b/api/client/swagger_client/models/api_pipeline_task_arguments.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 @@ -35,13 +35,9 @@ class ApiPipelineTaskArguments(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'parameters': 'list[ApiParameter]' - } + swagger_types = {"parameters": "list[ApiParameter]"} - attribute_map = { - 'parameters': 'parameters' - } + attribute_map = {"parameters": "parameters"} def __init__(self, parameters=None): # noqa: E501 """ApiPipelineTaskArguments - a model defined in Swagger""" # noqa: E501 @@ -80,18 +76,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiPipelineTaskArguments, dict): diff --git a/api/client/swagger_client/models/api_run_code_response.py b/api/client/swagger_client/models/api_run_code_response.py index 23bf180c..309cbf29 100644 --- a/api/client/swagger_client/models/api_run_code_response.py +++ b/api/client/swagger_client/models/api_run_code_response.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiRunCodeResponse(object): @@ -33,15 +33,9 @@ class ApiRunCodeResponse(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'run_url': 'str', - 'run_output_location': 'str' - } + swagger_types = {"run_url": "str", "run_output_location": "str"} - attribute_map = { - 'run_url': 'run_url', - 'run_output_location': 'run_output_location' - } + attribute_map = {"run_url": "run_url", "run_output_location": "run_output_location"} def __init__(self, run_url=None, run_output_location=None): # noqa: E501 """ApiRunCodeResponse - a model defined in Swagger""" # noqa: E501 @@ -75,7 +69,9 @@ def run_url(self, run_url): :type: str """ if run_url is None: - raise ValueError("Invalid value for `run_url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `run_url`, must not be `None`" + ) self._run_url = run_url @@ -109,18 +105,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiRunCodeResponse, dict): diff --git a/api/client/swagger_client/models/api_settings.py b/api/client/swagger_client/models/api_settings.py index 8b7656c5..30f39d8f 100644 --- a/api/client/swagger_client/models/api_settings.py +++ b/api/client/swagger_client/models/api_settings.py @@ -14,12 +14,14 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 -from swagger_client.models.api_settings_section import ApiSettingsSection # noqa: F401,E501 +from swagger_client.models.api_settings_section import ( # noqa: F401 + ApiSettingsSection, +) class ApiSettings(object): @@ -35,13 +37,9 @@ class ApiSettings(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'sections': 'list[ApiSettingsSection]' - } + swagger_types = {"sections": "list[ApiSettingsSection]"} - attribute_map = { - 'sections': 'sections' - } + attribute_map = {"sections": "sections"} def __init__(self, sections=None): # noqa: E501 """ApiSettings - a model defined in Swagger""" # noqa: E501 @@ -72,7 +70,9 @@ def sections(self, sections): :type: list[ApiSettingsSection] """ if sections is None: - raise ValueError("Invalid value for `sections`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `sections`, must not be `None`" + ) self._sections = sections @@ -83,18 +83,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiSettings, dict): diff --git a/api/client/swagger_client/models/api_settings_section.py b/api/client/swagger_client/models/api_settings_section.py index 8c52ff99..292141a5 100644 --- a/api/client/swagger_client/models/api_settings_section.py +++ b/api/client/swagger_client/models/api_settings_section.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.api_parameter import ApiParameter # noqa: F401,E501 @@ -36,15 +36,15 @@ class ApiSettingsSection(object): and the value is json key in definition. """ swagger_types = { - 'name': 'str', - 'description': 'str', - 'settings': 'list[ApiParameter]' + "name": "str", + "description": "str", + "settings": "list[ApiParameter]", } attribute_map = { - 'name': 'name', - 'description': 'description', - 'settings': 'settings' + "name": "name", + "description": "description", + "settings": "settings", } def __init__(self, name=None, description=None, settings=None): # noqa: E501 @@ -82,7 +82,9 @@ def name(self, name): :type: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -139,18 +141,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiSettingsSection, dict): diff --git a/api/client/swagger_client/models/api_status.py b/api/client/swagger_client/models/api_status.py index 0393c10e..b9a51d65 100644 --- a/api/client/swagger_client/models/api_status.py +++ b/api/client/swagger_client/models/api_status.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.protobuf_any import ProtobufAny # noqa: F401,E501 @@ -35,17 +35,9 @@ class ApiStatus(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'error': 'str', - 'code': 'int', - 'details': 'list[ProtobufAny]' - } - - attribute_map = { - 'error': 'error', - 'code': 'code', - 'details': 'details' - } + swagger_types = {"error": "str", "code": "int", "details": "list[ProtobufAny]"} + + attribute_map = {"error": "error", "code": "code", "details": "details"} def __init__(self, error=None, code=None, details=None): # noqa: E501 """ApiStatus - a model defined in Swagger""" # noqa: E501 @@ -132,18 +124,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiStatus, dict): diff --git a/api/client/swagger_client/models/api_url.py b/api/client/swagger_client/models/api_url.py index 7bd94eae..2d34f301 100644 --- a/api/client/swagger_client/models/api_url.py +++ b/api/client/swagger_client/models/api_url.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ApiUrl(object): @@ -33,13 +33,9 @@ class ApiUrl(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'pipeline_url': 'str' - } + swagger_types = {"pipeline_url": "str"} - attribute_map = { - 'pipeline_url': 'pipeline_url' - } + attribute_map = {"pipeline_url": "pipeline_url"} def __init__(self, pipeline_url=None): # noqa: E501 """ApiUrl - a model defined in Swagger""" # noqa: E501 @@ -78,18 +74,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ApiUrl, dict): diff --git a/api/client/swagger_client/models/dictionary.py b/api/client/swagger_client/models/dictionary.py index b10e9562..122a39f0 100644 --- a/api/client/swagger_client/models/dictionary.py +++ b/api/client/swagger_client/models/dictionary.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 from swagger_client.models.any_value import AnyValue # noqa: F401,E501 @@ -35,11 +35,9 @@ class Dictionary(dict): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - } + swagger_types = {} - attribute_map = { - } + attribute_map = {} def __init__(self): # noqa: E501 """Dictionary - a model defined in Swagger""" # noqa: E501 @@ -52,18 +50,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(Dictionary, dict): diff --git a/api/client/swagger_client/models/protobuf_any.py b/api/client/swagger_client/models/protobuf_any.py index 40e14b00..e23808ed 100644 --- a/api/client/swagger_client/models/protobuf_any.py +++ b/api/client/swagger_client/models/protobuf_any.py @@ -14,10 +14,10 @@ """ -import pprint +import pprint # noqa: F401 import re # noqa: F401 -import six +import six # noqa: F401 class ProtobufAny(object): @@ -33,15 +33,9 @@ class ProtobufAny(object): attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { - 'type_url': 'str', - 'value': 'str' - } + swagger_types = {"type_url": "str", "value": "str"} - attribute_map = { - 'type_url': 'type_url', - 'value': 'value' - } + attribute_map = {"type_url": "type_url", "value": "value"} def __init__(self, type_url=None, value=None): # noqa: E501 """ProtobufAny - a model defined in Swagger""" # noqa: E501 @@ -98,8 +92,13 @@ def value(self, value): :param value: The value of this ProtobufAny. # noqa: E501 :type: str """ - if value is not None and not re.search(r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', value): # noqa: E501 - raise ValueError(r"Invalid value for `value`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501 + if value is not None and not re.search( + r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", + value, + ): # noqa: E501 + raise ValueError( + r"Invalid value for `value`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`" # noqa: E501 + ) self._value = value @@ -110,18 +109,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) else: result[attr] = value if issubclass(ProtobufAny, dict): diff --git a/api/client/swagger_client/rest.py b/api/client/swagger_client/rest.py index dbff75e3..0a2081cb 100644 --- a/api/client/swagger_client/rest.py +++ b/api/client/swagger_client/rest.py @@ -23,21 +23,21 @@ import ssl import certifi + # python 2 and python 3 compatibility library -import six +import six # noqa: F401 from six.moves.urllib.parse import urlencode try: import urllib3 except ImportError: - raise ImportError('Swagger python client requires urllib3.') + raise ImportError("Swagger python client requires urllib3.") logger = logging.getLogger(__name__) class RESTResponse(io.IOBase): - def __init__(self, resp): self.urllib3_response = resp self.status = resp.status @@ -54,7 +54,6 @@ def getheader(self, name, default=None): class RESTClientObject(object): - def __init__(self, configuration, pools_size=4, maxsize=None): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 @@ -77,7 +76,9 @@ def __init__(self, configuration, pools_size=4, maxsize=None): addition_pool_args = {} if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + addition_pool_args[ + "assert_hostname" + ] = configuration.assert_hostname # noqa: E501 if maxsize is None: if configuration.connection_pool_maxsize is not None: @@ -108,9 +109,17 @@ def __init__(self, configuration, pools_size=4, maxsize=None): **addition_pool_args ) - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): + def request( + self, + method, + url, + query_params=None, + headers=None, + body=None, + post_params=None, + _preload_content=True, + _request_timeout=None, + ): """Perform requests. :param method: http request method @@ -130,8 +139,7 @@ def request(self, method, url, query_params=None, headers=None, (connection, read) timeouts. """ method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] + assert method in ["GET", "HEAD", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"] if post_params and body: raise ValueError( @@ -143,62 +151,74 @@ def request(self, method, url, query_params=None, headers=None, timeout = None if _request_timeout: - if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 + if isinstance( + _request_timeout, (int,) if six.PY3 else (int, long) + ): # noqa: E501,F821 timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): + elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2: timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) + connect=_request_timeout[0], read=_request_timeout[1] + ) - if 'Content-Type' not in headers: - headers['Content-Type'] = 'application/json' + if "Content-Type" not in headers: + headers["Content-Type"] = "application/json" try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]: if query_params: - url += '?' + urlencode(query_params) - if re.search('json', headers['Content-Type'], re.IGNORECASE): + url += "?" + urlencode(query_params) + if re.search("json", headers["Content-Type"], re.IGNORECASE): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( - method, url, + method, + url, body=request_body, preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + headers=headers, + ) + elif ( + headers["Content-Type"] == "application/x-www-form-urlencoded" + ): # noqa: E501 r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=False, preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': + headers=headers, + ) + elif headers["Content-Type"] == "multipart/form-data": # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. - del headers['Content-Type'] + del headers["Content-Type"] r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=True, preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + ) # Pass a `string` parameter directly in the body to support # other content types than Json when `body` argument is # provided in serialized form elif isinstance(body, str): request_body = body r = self.pool_manager.request( - method, url, + method, + url, body=request_body, preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + ) else: # Cannot generate the request from given parameters msg = """Cannot prepare a request message for provided @@ -207,11 +227,14 @@ def request(self, method, url, query_params=None, headers=None, raise ApiException(status=0, reason=msg) # For `GET`, `HEAD` else: - r = self.pool_manager.request(method, url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers) + r = self.pool_manager.request( + method, + url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers, + ) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) raise ApiException(status=0, reason=msg) @@ -222,7 +245,7 @@ def request(self, method, url, query_params=None, headers=None, # In the python 3, the response.data is bytes. # we need to decode it to string. if six.PY3: - r.data = r.data.decode('utf8') + r.data = r.data.decode("utf8") # log response body logger.debug("response body: %s", r.data) @@ -232,74 +255,145 @@ def request(self, method, url, query_params=None, headers=None, return r - def GET(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def DELETE(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def POST(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PUT(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PATCH(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + def GET( + self, + url, + headers=None, + query_params=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "GET", + url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + ) + + def HEAD( + self, + url, + headers=None, + query_params=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "HEAD", + url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + ) + + def OPTIONS( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "OPTIONS", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def DELETE( + self, + url, + headers=None, + query_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "DELETE", + url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def POST( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "POST", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def PUT( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "PUT", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def PATCH( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "PATCH", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) class ApiException(Exception): - def __init__(self, status=None, reason=None, http_resp=None): if http_resp: self.status = http_resp.status @@ -314,11 +408,9 @@ def __init__(self, status=None, reason=None, http_resp=None): def __str__(self): """Custom error messages for exception""" - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) + error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason) if self.headers: - error_message += "HTTP response headers: {0}\n".format( - self.headers) + error_message += "HTTP response headers: {0}\n".format(self.headers) if self.body: error_message += "HTTP response body: {0}\n".format(self.body) diff --git a/api/client/test/test_any_value.py b/api/client/test/test_any_value.py index a65f2417..c5af008a 100644 --- a/api/client/test/test_any_value.py +++ b/api/client/test/test_any_value.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.any_value import AnyValue # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.any_value import AnyValue # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestAnyValue(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testAnyValue(self): """Test AnyValue""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.any_value.AnyValue() # noqa: E501 + # model = swagger_client.models.any_value.AnyValue() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_access_token.py b/api/client/test/test_api_access_token.py index 87267ce3..b30c254c 100644 --- a/api/client/test/test_api_access_token.py +++ b/api/client/test/test_api_access_token.py @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_access_token import ApiAccessToken # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_access_token import ApiAccessToken # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiAccessToken(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiAccessToken(self): """Test ApiAccessToken""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_access_token.ApiAccessToken() # noqa: E501 + # model = swagger_client.models.api_access_token.ApiAccessToken() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_asset.py b/api/client/test/test_api_asset.py index 7544cf16..2f07093b 100644 --- a/api/client/test/test_api_asset.py +++ b/api/client/test/test_api_asset.py @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_asset import ApiAsset # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_asset import ApiAsset # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiAsset(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiAsset(self): """Test ApiAsset""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_asset.ApiAsset() # noqa: E501 + # model = swagger_client.models.api_asset.ApiAsset() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_catalog_upload.py b/api/client/test/test_api_catalog_upload.py index 96251a64..a8d3b48d 100644 --- a/api/client/test/test_api_catalog_upload.py +++ b/api/client/test/test_api_catalog_upload.py @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_catalog_upload import ApiCatalogUpload # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_catalog_upload import ApiCatalogUpload # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiCatalogUpload(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiCatalogUpload(self): """Test ApiCatalogUpload""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_catalog_upload.ApiCatalogUpload() # noqa: E501 + # model = swagger_client.models.api_catalog_upload.ApiCatalogUpload() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_catalog_upload_error.py b/api/client/test/test_api_catalog_upload_error.py index e1844423..8ce84e1b 100644 --- a/api/client/test/test_api_catalog_upload_error.py +++ b/api/client/test/test_api_catalog_upload_error.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_catalog_upload_error import ApiCatalogUploadError # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_catalog_upload_error import ( # noqa: F401 + ApiCatalogUploadError, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiCatalogUploadError(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiCatalogUploadError(self): """Test ApiCatalogUploadError""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_catalog_upload_error.ApiCatalogUploadError() # noqa: E501 + # model = swagger_client.models.api_catalog_upload_error.ApiCatalogUploadError() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_catalog_upload_item.py b/api/client/test/test_api_catalog_upload_item.py index 37331562..ccfeb2f9 100644 --- a/api/client/test/test_api_catalog_upload_item.py +++ b/api/client/test/test_api_catalog_upload_item.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_catalog_upload_item import ApiCatalogUploadItem # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_catalog_upload_item import ( # noqa: F401 + ApiCatalogUploadItem, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiCatalogUploadItem(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiCatalogUploadItem(self): """Test ApiCatalogUploadItem""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_catalog_upload_item.ApiCatalogUploadItem() # noqa: E501 + # model = swagger_client.models.api_catalog_upload_item.ApiCatalogUploadItem() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_catalog_upload_response.py b/api/client/test/test_api_catalog_upload_response.py index f31bfd19..c483c0df 100644 --- a/api/client/test/test_api_catalog_upload_response.py +++ b/api/client/test/test_api_catalog_upload_response.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_catalog_upload_response import ApiCatalogUploadResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_catalog_upload_response import ( # noqa: F401 + ApiCatalogUploadResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiCatalogUploadResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiCatalogUploadResponse(self): """Test ApiCatalogUploadResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_catalog_upload_response.ApiCatalogUploadResponse() # noqa: E501 + # model = swagger_client.models.api_catalog_upload_response.ApiCatalogUploadResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_component.py b/api/client/test/test_api_component.py index 1b137a8d..646284ef 100644 --- a/api/client/test/test_api_component.py +++ b/api/client/test/test_api_component.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_component import ApiComponent # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_component import ApiComponent # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiComponent(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiComponent(self): """Test ApiComponent""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_component.ApiComponent() # noqa: E501 + # model = swagger_client.models.api_component.ApiComponent() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_credential.py b/api/client/test/test_api_credential.py index ca78f702..256bb01f 100644 --- a/api/client/test/test_api_credential.py +++ b/api/client/test/test_api_credential.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_credential import ApiCredential # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_credential import ApiCredential # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiCredential(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiCredential(self): """Test ApiCredential""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_credential.ApiCredential() # noqa: E501 + # model = swagger_client.models.api_credential.ApiCredential() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_dataset.py b/api/client/test/test_api_dataset.py index 39db2085..8fad9a6f 100644 --- a/api/client/test/test_api_dataset.py +++ b/api/client/test/test_api_dataset.py @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_dataset import ApiDataset # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_dataset import ApiDataset # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiDataset(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiDataset(self): """Test ApiDataset""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_dataset.ApiDataset() # noqa: E501 + # model = swagger_client.models.api_dataset.ApiDataset() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_generate_code_response.py b/api/client/test/test_api_generate_code_response.py index aa2b7c99..5c7b42d5 100644 --- a/api/client/test/test_api_generate_code_response.py +++ b/api/client/test/test_api_generate_code_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiGenerateCodeResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiGenerateCodeResponse(self): """Test ApiGenerateCodeResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_generate_code_response.ApiGenerateCodeResponse() # noqa: E501 + # model = swagger_client.models.api_generate_code_response.ApiGenerateCodeResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_generate_model_code_response.py b/api/client/test/test_api_generate_model_code_response.py index 374295b8..7cbf6f63 100644 --- a/api/client/test/test_api_generate_model_code_response.py +++ b/api/client/test/test_api_generate_model_code_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_generate_model_code_response import ApiGenerateModelCodeResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiGenerateModelCodeResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiGenerateModelCodeResponse(self): """Test ApiGenerateModelCodeResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_generate_model_code_response.ApiGenerateModelCodeResponse() # noqa: E501 + # model = swagger_client.models.api_generate_model_code_response.ApiGenerateModelCodeResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_get_template_response.py b/api/client/test/test_api_get_template_response.py index e5c0bce5..6bab1203 100644 --- a/api/client/test/test_api_get_template_response.py +++ b/api/client/test/test_api_get_template_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiGetTemplateResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiGetTemplateResponse(self): """Test ApiGetTemplateResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_get_template_response.ApiGetTemplateResponse() # noqa: E501 + # model = swagger_client.models.api_get_template_response.ApiGetTemplateResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_inferenceservice.py b/api/client/test/test_api_inferenceservice.py index a4ccd922..b1c5d48a 100644 --- a/api/client/test/test_api_inferenceservice.py +++ b/api/client/test/test_api_inferenceservice.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_inferenceservice import ApiInferenceservice # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_inferenceservice import ApiInferenceservice # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiInferenceservice(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiInferenceservice(self): """Test ApiInferenceservice""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_inferenceservice.ApiInferenceservice() # noqa: E501 + # model = swagger_client.models.api_inferenceservice.ApiInferenceservice() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_catalog_items_response.py b/api/client/test/test_api_list_catalog_items_response.py index 4e8d95f5..f89c1118 100644 --- a/api/client/test/test_api_list_catalog_items_response.py +++ b/api/client/test/test_api_list_catalog_items_response.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_catalog_items_response import ApiListCatalogItemsResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListCatalogItemsResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListCatalogItemsResponse(self): """Test ApiListCatalogItemsResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_catalog_items_response.ApiListCatalogItemsResponse() # noqa: E501 + # model = swagger_client.models.api_list_catalog_items_response.ApiListCatalogItemsResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_catalog_upload_errors.py b/api/client/test/test_api_list_catalog_upload_errors.py index 37f4ed7b..02d4854c 100644 --- a/api/client/test/test_api_list_catalog_upload_errors.py +++ b/api/client/test/test_api_list_catalog_upload_errors.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_catalog_upload_errors import ApiListCatalogUploadErrors # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_catalog_upload_errors import ( # noqa: F401 + ApiListCatalogUploadErrors, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListCatalogUploadErrors(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListCatalogUploadErrors(self): """Test ApiListCatalogUploadErrors""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_catalog_upload_errors.ApiListCatalogUploadErrors() # noqa: E501 + # model = swagger_client.models.api_list_catalog_upload_errors.ApiListCatalogUploadErrors() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_components_response.py b/api/client/test/test_api_list_components_response.py index d7984c85..61b30910 100644 --- a/api/client/test/test_api_list_components_response.py +++ b/api/client/test/test_api_list_components_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_components_response import ApiListComponentsResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_components_response import ( # noqa: F401 + ApiListComponentsResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListComponentsResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListComponentsResponse(self): """Test ApiListComponentsResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_components_response.ApiListComponentsResponse() # noqa: E501 + # model = swagger_client.models.api_list_components_response.ApiListComponentsResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_credentials_response.py b/api/client/test/test_api_list_credentials_response.py index 205c906d..7119794f 100644 --- a/api/client/test/test_api_list_credentials_response.py +++ b/api/client/test/test_api_list_credentials_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_credentials_response import ApiListCredentialsResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListCredentialsResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListCredentialsResponse(self): """Test ApiListCredentialsResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_credentials_response.ApiListCredentialsResponse() # noqa: E501 + # model = swagger_client.models.api_list_credentials_response.ApiListCredentialsResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_datasets_response.py b/api/client/test/test_api_list_datasets_response.py index a15a8b08..329322e9 100644 --- a/api/client/test/test_api_list_datasets_response.py +++ b/api/client/test/test_api_list_datasets_response.py @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_datasets_response import ApiListDatasetsResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_datasets_response import ( # noqa: F401 + ApiListDatasetsResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListDatasetsResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListDatasetsResponse(self): """Test ApiListDatasetsResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_datasets_response.ApiListDatasetsResponse() # noqa: E501 + # model = swagger_client.models.api_list_datasets_response.ApiListDatasetsResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_inferenceservices_response.py b/api/client/test/test_api_list_inferenceservices_response.py index 50c368e1..cc3a1501 100644 --- a/api/client/test/test_api_list_inferenceservices_response.py +++ b/api/client/test/test_api_list_inferenceservices_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListInferenceservicesResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListInferenceservicesResponse(self): """Test ApiListInferenceservicesResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_inferenceservices_response.ApiListInferenceservicesResponse() # noqa: E501 + # model = swagger_client.models.api_list_inferenceservices_response.ApiListInferenceservicesResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_models_response.py b/api/client/test/test_api_list_models_response.py index b862c1c9..0f23f154 100644 --- a/api/client/test/test_api_list_models_response.py +++ b/api/client/test/test_api_list_models_response.py @@ -1,6 +1,6 @@ -# Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# Copyright 2021 The MLX Contributors +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_models_response import ApiListModelsResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_models_response import ( # noqa: F401 + ApiListModelsResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListModelsResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListModelsResponse(self): """Test ApiListModelsResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_models_response.ApiListModelsResponse() # noqa: E501 + # model = swagger_client.models.api_list_models_response.ApiListModelsResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_notebooks_response.py b/api/client/test/test_api_list_notebooks_response.py index 536558ee..c22ea98c 100644 --- a/api/client/test/test_api_list_notebooks_response.py +++ b/api/client/test/test_api_list_notebooks_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_notebooks_response import ApiListNotebooksResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_notebooks_response import ( # noqa: F401 + ApiListNotebooksResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListNotebooksResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListNotebooksResponse(self): """Test ApiListNotebooksResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_notebooks_response.ApiListNotebooksResponse() # noqa: E501 + # model = swagger_client.models.api_list_notebooks_response.ApiListNotebooksResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_list_pipelines_response.py b/api/client/test/test_api_list_pipelines_response.py index f72082ad..8ac7447f 100644 --- a/api/client/test/test_api_list_pipelines_response.py +++ b/api/client/test/test_api_list_pipelines_response.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_list_pipelines_response import ApiListPipelinesResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_list_pipelines_response import ( # noqa: F401 + ApiListPipelinesResponse, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiListPipelinesResponse(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiListPipelinesResponse(self): """Test ApiListPipelinesResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_list_pipelines_response.ApiListPipelinesResponse() # noqa: E501 + # model = swagger_client.models.api_list_pipelines_response.ApiListPipelinesResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_metadata.py b/api/client/test/test_api_metadata.py index f4bdd18a..d7c9743e 100644 --- a/api/client/test/test_api_metadata.py +++ b/api/client/test/test_api_metadata.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_metadata import ApiMetadata # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_metadata import ApiMetadata # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiMetadata(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiMetadata(self): """Test ApiMetadata""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_metadata.ApiMetadata() # noqa: E501 + # model = swagger_client.models.api_metadata.ApiMetadata() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_model.py b/api/client/test/test_api_model.py index 3e3a558f..1eaca289 100644 --- a/api/client/test/test_api_model.py +++ b/api/client/test/test_api_model.py @@ -1,6 +1,6 @@ -# Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# Copyright 2021 The MLX Contributors +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_model import ApiModel # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_model import ApiModel # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiModel(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiModel(self): """Test ApiModel""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_model.ApiModel() # noqa: E501 + # model = swagger_client.models.api_model.ApiModel() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_model_framework.py b/api/client/test/test_api_model_framework.py index 6fb01989..a8e5afc1 100644 --- a/api/client/test/test_api_model_framework.py +++ b/api/client/test/test_api_model_framework.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_model_framework import ApiModelFramework # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_model_framework import ApiModelFramework # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiModelFramework(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiModelFramework(self): """Test ApiModelFramework""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_model_framework.ApiModelFramework() # noqa: E501 + # model = swagger_client.models.api_model_framework.ApiModelFramework() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_model_framework_runtimes.py b/api/client/test/test_api_model_framework_runtimes.py index a19ea7aa..83e457fe 100644 --- a/api/client/test/test_api_model_framework_runtimes.py +++ b/api/client/test/test_api_model_framework_runtimes.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_model_framework_runtimes import ApiModelFrameworkRuntimes # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_model_framework_runtimes import ( # noqa: F401 + ApiModelFrameworkRuntimes, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiModelFrameworkRuntimes(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiModelFrameworkRuntimes(self): """Test ApiModelFrameworkRuntimes""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_model_framework_runtimes.ApiModelFrameworkRuntimes() # noqa: E501 + # model = swagger_client.models.api_model_framework_runtimes.ApiModelFrameworkRuntimes() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_model_script.py b/api/client/test/test_api_model_script.py index 2f9ebfc6..c7300a1c 100644 --- a/api/client/test/test_api_model_script.py +++ b/api/client/test/test_api_model_script.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_model_script import ApiModelScript # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_model_script import ApiModelScript # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiModelScript(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiModelScript(self): """Test ApiModelScript""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_model_script.ApiModelScript() # noqa: E501 + # model = swagger_client.models.api_model_script.ApiModelScript() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_notebook.py b/api/client/test/test_api_notebook.py index a18fe0e6..45337b00 100644 --- a/api/client/test/test_api_notebook.py +++ b/api/client/test/test_api_notebook.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_notebook import ApiNotebook # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_notebook import ApiNotebook # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiNotebook(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiNotebook(self): """Test ApiNotebook""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_notebook.ApiNotebook() # noqa: E501 + # model = swagger_client.models.api_notebook.ApiNotebook() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_parameter.py b/api/client/test/test_api_parameter.py index d48c6e13..9388d0f3 100644 --- a/api/client/test/test_api_parameter.py +++ b/api/client/test/test_api_parameter.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_parameter import ApiParameter # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiParameter(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiParameter(self): """Test ApiParameter""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_parameter.ApiParameter() # noqa: E501 + # model = swagger_client.models.api_parameter.ApiParameter() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline.py b/api/client/test/test_api_pipeline.py index c182878e..2d2d9364 100644 --- a/api/client/test/test_api_pipeline.py +++ b/api/client/test/test_api_pipeline.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline import ApiPipeline # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline import ApiPipeline # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipeline(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiPipeline(self): """Test ApiPipeline""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline.ApiPipeline() # noqa: E501 + # model = swagger_client.models.api_pipeline.ApiPipeline() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_custom.py b/api/client/test/test_api_pipeline_custom.py index 93e95f4f..a896c5d3 100644 --- a/api/client/test/test_api_pipeline_custom.py +++ b/api/client/test/test_api_pipeline_custom.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_custom import ApiPipelineCustom # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_custom import ApiPipelineCustom # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineCustom(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiPipelineCustom(self): """Test ApiPipelineCustom""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_custom.ApiPipelineCustom() # noqa: E501 + # model = swagger_client.models.api_pipeline_custom.ApiPipelineCustom() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_custom_run_payload.py b/api/client/test/test_api_pipeline_custom_run_payload.py index 055e1479..54c16b02 100644 --- a/api/client/test/test_api_pipeline_custom_run_payload.py +++ b/api/client/test/test_api_pipeline_custom_run_payload.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_custom_run_payload import ApiPipelineCustomRunPayload # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_custom_run_payload import ( # noqa: F401 + ApiPipelineCustomRunPayload, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineCustomRunPayload(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiPipelineCustomRunPayload(self): """Test ApiPipelineCustomRunPayload""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_custom_run_payload.ApiPipelineCustomRunPayload() # noqa: E501 + # model = swagger_client.models.api_pipeline_custom_run_payload.ApiPipelineCustomRunPayload() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_dag.py b/api/client/test/test_api_pipeline_dag.py index 4e8d5f9f..2c1fa1b3 100644 --- a/api/client/test/test_api_pipeline_dag.py +++ b/api/client/test/test_api_pipeline_dag.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_dag import ApiPipelineDAG # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_dag import ApiPipelineDAG # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineDAG(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiPipelineDAG(self): """Test ApiPipelineDAG""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_dag.ApiPipelineDAG() # noqa: E501 + # model = swagger_client.models.api_pipeline_dag.ApiPipelineDAG() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_extended.py b/api/client/test/test_api_pipeline_extended.py index be12d311..61e95520 100644 --- a/api/client/test/test_api_pipeline_extended.py +++ b/api/client/test/test_api_pipeline_extended.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_extended import ApiPipelineExtended # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineExtended(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiPipelineExtended(self): """Test ApiPipelineExtended""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_extended.ApiPipelineExtended() # noqa: E501 + # model = swagger_client.models.api_pipeline_extended.ApiPipelineExtended() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_extension.py b/api/client/test/test_api_pipeline_extension.py index 8635a61c..5c6ea743 100644 --- a/api/client/test/test_api_pipeline_extension.py +++ b/api/client/test/test_api_pipeline_extension.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_extension import ApiPipelineExtension # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_extension import ( # noqa: F401 + ApiPipelineExtension, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineExtension(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiPipelineExtension(self): """Test ApiPipelineExtension""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_extension.ApiPipelineExtension() # noqa: E501 + # model = swagger_client.models.api_pipeline_extension.ApiPipelineExtension() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_inputs.py b/api/client/test/test_api_pipeline_inputs.py index 5fe4704d..b29cbb96 100644 --- a/api/client/test/test_api_pipeline_inputs.py +++ b/api/client/test/test_api_pipeline_inputs.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_inputs import ApiPipelineInputs # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_inputs import ApiPipelineInputs # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineInputs(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiPipelineInputs(self): """Test ApiPipelineInputs""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_inputs.ApiPipelineInputs() # noqa: E501 + # model = swagger_client.models.api_pipeline_inputs.ApiPipelineInputs() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_task.py b/api/client/test/test_api_pipeline_task.py index 5b8733d9..4a8f8d03 100644 --- a/api/client/test/test_api_pipeline_task.py +++ b/api/client/test/test_api_pipeline_task.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_task import ApiPipelineTask # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_task import ApiPipelineTask # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineTask(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiPipelineTask(self): """Test ApiPipelineTask""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_task.ApiPipelineTask() # noqa: E501 + # model = swagger_client.models.api_pipeline_task.ApiPipelineTask() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_pipeline_task_arguments.py b/api/client/test/test_api_pipeline_task_arguments.py index 9895ca53..e6db7ecb 100644 --- a/api/client/test/test_api_pipeline_task_arguments.py +++ b/api/client/test/test_api_pipeline_task_arguments.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,11 @@ import unittest -import swagger_client -from swagger_client.models.api_pipeline_task_arguments import ApiPipelineTaskArguments # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_pipeline_task_arguments import ( # noqa: F401 + ApiPipelineTaskArguments, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApiPipelineTaskArguments(unittest.TestCase): @@ -35,9 +37,9 @@ def tearDown(self): def testApiPipelineTaskArguments(self): """Test ApiPipelineTaskArguments""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_pipeline_task_arguments.ApiPipelineTaskArguments() # noqa: E501 + # model = swagger_client.models.api_pipeline_task_arguments.ApiPipelineTaskArguments() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_run_code_response.py b/api/client/test/test_api_run_code_response.py index 21816bd0..36be4cbb 100644 --- a/api/client/test/test_api_run_code_response.py +++ b/api/client/test/test_api_run_code_response.py @@ -1,6 +1,6 @@ -# Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# Copyright 2021 The MLX Contributors +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiRunCodeResponse(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiRunCodeResponse(self): """Test ApiRunCodeResponse""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_run_code_response.ApiRunCodeResponse() # noqa: E501 + # model = swagger_client.models.api_run_code_response.ApiRunCodeResponse() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_settings.py b/api/client/test/test_api_settings.py index b2606acd..0ffb6be6 100644 --- a/api/client/test/test_api_settings.py +++ b/api/client/test/test_api_settings.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_settings import ApiSettings # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_settings import ApiSettings # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiSettings(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiSettings(self): """Test ApiSettings""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_settings.ApiSettings() # noqa: E501 + # model = swagger_client.models.api_settings.ApiSettings() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_settings_section.py b/api/client/test/test_api_settings_section.py index 589e28e2..38f5cc20 100644 --- a/api/client/test/test_api_settings_section.py +++ b/api/client/test/test_api_settings_section.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_settings_section import ApiSettingsSection # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_settings_section import ApiSettingsSection # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiSettingsSection(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiSettingsSection(self): """Test ApiSettingsSection""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_settings_section.ApiSettingsSection() # noqa: E501 + # model = swagger_client.models.api_settings_section.ApiSettingsSection() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_status.py b/api/client/test/test_api_status.py index 50111e77..de184aed 100644 --- a/api/client/test/test_api_status.py +++ b/api/client/test/test_api_status.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_status import ApiStatus # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiStatus(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiStatus(self): """Test ApiStatus""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_status.ApiStatus() # noqa: E501 + # model = swagger_client.models.api_status.ApiStatus() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_api_url.py b/api/client/test/test_api_url.py index 49840fc0..f1ebb2b2 100644 --- a/api/client/test/test_api_url.py +++ b/api/client/test/test_api_url.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.api_url import ApiUrl # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.api_url import ApiUrl # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestApiUrl(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testApiUrl(self): """Test ApiUrl""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.api_url.ApiUrl() # noqa: E501 + # model = swagger_client.models.api_url.ApiUrl() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_application_settings_api.py b/api/client/test/test_application_settings_api.py index 4c156416..1f92eea1 100644 --- a/api/client/test/test_application_settings_api.py +++ b/api/client/test/test_application_settings_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,38 +18,36 @@ import unittest -import swagger_client -from swagger_client.api.application_settings_api import ApplicationSettingsApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.application_settings_api import ( # noqa: F401 + ApplicationSettingsApi, +) +from swagger_client.rest import ApiException # noqa: F401 class TestApplicationSettingsApi(unittest.TestCase): """ApplicationSettingsApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.application_settings_api.ApplicationSettingsApi() # noqa: E501 + self.api = ( + swagger_client.api.application_settings_api.ApplicationSettingsApi() + ) def tearDown(self): pass def test_get_application_settings(self): - """Test case for get_application_settings - - """ + """Test case for get_application_settings""" pass def test_modify_application_settings(self): - """Test case for modify_application_settings - - """ + """Test case for modify_application_settings""" pass def test_set_application_settings(self): - """Test case for set_application_settings - - """ + """Test case for set_application_settings""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_catalog_service_api.py b/api/client/test/test_catalog_service_api.py index 43c309bc..75cc66db 100644 --- a/api/client/test/test_catalog_service_api.py +++ b/api/client/test/test_catalog_service_api.py @@ -18,32 +18,30 @@ import unittest -import swagger_client -from swagger_client.api.catalog_service_api import CatalogServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.catalog_service_api import CatalogServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestCatalogServiceApi(unittest.TestCase): """CatalogServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.catalog_service_api.CatalogServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.catalog_service_api.CatalogServiceApi() + ) def tearDown(self): pass def test_list_all_assets(self): - """Test case for list_all_assets - - """ + """Test case for list_all_assets""" pass def test_upload_multiple_assets(self): - """Test case for upload_multiple_assets - - """ + """Test case for upload_multiple_assets""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_component_service_api.py b/api/client/test/test_component_service_api.py index 64fd0db7..dc27274f 100644 --- a/api/client/test/test_component_service_api.py +++ b/api/client/test/test_component_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,36 +18,32 @@ import unittest -import swagger_client -from swagger_client.api.component_service_api import ComponentServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.component_service_api import ComponentServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestComponentServiceApi(unittest.TestCase): """ComponentServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.component_service_api.ComponentServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.component_service_api.ComponentServiceApi() + ) def tearDown(self): pass def test_approve_components_for_publishing(self): - """Test case for approve_components_for_publishing - - """ + """Test case for approve_components_for_publishing""" pass def test_create_component(self): - """Test case for create_component - - """ + """Test case for create_component""" pass def test_delete_component(self): - """Test case for delete_component - - """ + """Test case for delete_component""" pass def test_download_component_files(self): @@ -58,53 +54,37 @@ def test_download_component_files(self): pass def test_generate_component_code(self): - """Test case for generate_component_code - - """ + """Test case for generate_component_code""" pass def test_get_component(self): - """Test case for get_component - - """ + """Test case for get_component""" pass def test_get_component_template(self): - """Test case for get_component_template - - """ + """Test case for get_component_template""" pass def test_list_components(self): - """Test case for list_components - - """ + """Test case for list_components""" pass def test_run_component(self): - """Test case for run_component - - """ + """Test case for run_component""" pass def test_set_featured_components(self): - """Test case for set_featured_components - - """ + """Test case for set_featured_components""" pass def test_upload_component(self): - """Test case for upload_component - - """ + """Test case for upload_component""" pass def test_upload_component_file(self): - """Test case for upload_component_file - - """ + """Test case for upload_component_file""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_credential_service_api.py b/api/client/test/test_credential_service_api.py index affee9f4..131d3455 100644 --- a/api/client/test/test_credential_service_api.py +++ b/api/client/test/test_credential_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,38 +18,34 @@ import unittest -import swagger_client -from swagger_client.api.credential_service_api import CredentialServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.credential_service_api import CredentialServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestCredentialServiceApi(unittest.TestCase): """CredentialServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.credential_service_api.CredentialServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.credential_service_api.CredentialServiceApi() + ) def tearDown(self): pass def test_create_credentials(self): - """Test case for create_credentials - - """ + """Test case for create_credentials""" pass def test_delete_credential(self): - """Test case for delete_credential - - """ + """Test case for delete_credential""" pass def test_get_credential(self): - """Test case for get_credential - - """ + """Test case for get_credential""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_dataset_service_api.py b/api/client/test/test_dataset_service_api.py index 6c429030..72652ad3 100644 --- a/api/client/test/test_dataset_service_api.py +++ b/api/client/test/test_dataset_service_api.py @@ -18,36 +18,32 @@ import unittest -import swagger_client -from swagger_client.api.dataset_service_api import DatasetServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.dataset_service_api import DatasetServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestDatasetServiceApi(unittest.TestCase): """DatasetServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.dataset_service_api.DatasetServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.dataset_service_api.DatasetServiceApi() + ) def tearDown(self): pass def test_approve_datasets_for_publishing(self): - """Test case for approve_datasets_for_publishing - - """ + """Test case for approve_datasets_for_publishing""" pass def test_create_dataset(self): - """Test case for create_dataset - - """ + """Test case for create_dataset""" pass def test_delete_dataset(self): - """Test case for delete_dataset - - """ + """Test case for delete_dataset""" pass def test_download_dataset_files(self): @@ -58,47 +54,33 @@ def test_download_dataset_files(self): pass def test_generate_dataset_code(self): - """Test case for generate_dataset_code - - """ + """Test case for generate_dataset_code""" pass def test_get_dataset(self): - """Test case for get_dataset - - """ + """Test case for get_dataset""" pass def test_get_dataset_template(self): - """Test case for get_dataset_template - - """ + """Test case for get_dataset_template""" pass def test_list_datasets(self): - """Test case for list_datasets - - """ + """Test case for list_datasets""" pass def test_set_featured_datasets(self): - """Test case for set_featured_datasets - - """ + """Test case for set_featured_datasets""" pass def test_upload_dataset(self): - """Test case for upload_dataset - - """ + """Test case for upload_dataset""" pass def test_upload_dataset_file(self): - """Test case for upload_dataset_file - - """ + """Test case for upload_dataset_file""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_dictionary.py b/api/client/test/test_dictionary.py index 5647c729..e0fcad1b 100644 --- a/api/client/test/test_dictionary.py +++ b/api/client/test/test_dictionary.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.dictionary import Dictionary # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.dictionary import Dictionary # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestDictionary(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testDictionary(self): """Test Dictionary""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.dictionary.Dictionary() # noqa: E501 + # model = swagger_client.models.dictionary.Dictionary() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_health_check_api.py b/api/client/test/test_health_check_api.py index 38f62f63..ef9eb895 100644 --- a/api/client/test/test_health_check_api.py +++ b/api/client/test/test_health_check_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,16 +18,16 @@ import unittest -import swagger_client -from swagger_client.api.health_check_api import HealthCheckApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.health_check_api import HealthCheckApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestHealthCheckApi(unittest.TestCase): """HealthCheckApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.health_check_api.HealthCheckApi() # noqa: E501 + self.api = swagger_client.api.health_check_api.HealthCheckApi() def tearDown(self): pass @@ -40,5 +40,5 @@ def test_health_check(self): pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_inference_service_api.py b/api/client/test/test_inference_service_api.py index 9022aa29..27642eec 100644 --- a/api/client/test/test_inference_service_api.py +++ b/api/client/test/test_inference_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,24 +18,24 @@ import unittest -import swagger_client -from swagger_client.api.inference_service_api import InferenceServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.inference_service_api import InferenceServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestInferenceServiceApi(unittest.TestCase): """InferenceServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.inference_service_api.InferenceServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.inference_service_api.InferenceServiceApi() + ) def tearDown(self): pass def test_get_service(self): - """Test case for get_service - - """ + """Test case for get_service""" pass def test_list_services(self): @@ -46,5 +46,5 @@ def test_list_services(self): pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_model_service_api.py b/api/client/test/test_model_service_api.py index 1e8bd309..99c81fac 100644 --- a/api/client/test/test_model_service_api.py +++ b/api/client/test/test_model_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,36 +18,30 @@ import unittest -import swagger_client -from swagger_client.api.model_service_api import ModelServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.model_service_api import ModelServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestModelServiceApi(unittest.TestCase): """ModelServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.model_service_api.ModelServiceApi() # noqa: E501 + self.api = swagger_client.api.model_service_api.ModelServiceApi() def tearDown(self): pass def test_approve_models_for_publishing(self): - """Test case for approve_models_for_publishing - - """ + """Test case for approve_models_for_publishing""" pass def test_create_model(self): - """Test case for create_model - - """ + """Test case for create_model""" pass def test_delete_model(self): - """Test case for delete_model - - """ + """Test case for delete_model""" pass def test_download_model_files(self): @@ -58,53 +52,37 @@ def test_download_model_files(self): pass def test_generate_model_code(self): - """Test case for generate_model_code - - """ + """Test case for generate_model_code""" pass def test_get_model(self): - """Test case for get_model - - """ + """Test case for get_model""" pass def test_get_model_template(self): - """Test case for get_model_template - - """ + """Test case for get_model_template""" pass def test_list_models(self): - """Test case for list_models - - """ + """Test case for list_models""" pass def test_run_model(self): - """Test case for run_model - - """ + """Test case for run_model""" pass def test_set_featured_models(self): - """Test case for set_featured_models - - """ + """Test case for set_featured_models""" pass def test_upload_model(self): - """Test case for upload_model - - """ + """Test case for upload_model""" pass def test_upload_model_file(self): - """Test case for upload_model_file - - """ + """Test case for upload_model_file""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_notebook_service_api.py b/api/client/test/test_notebook_service_api.py index e4de5558..7e156ba9 100644 --- a/api/client/test/test_notebook_service_api.py +++ b/api/client/test/test_notebook_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,36 +18,32 @@ import unittest -import swagger_client -from swagger_client.api.notebook_service_api import NotebookServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.notebook_service_api import NotebookServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestNotebookServiceApi(unittest.TestCase): """NotebookServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.notebook_service_api.NotebookServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.notebook_service_api.NotebookServiceApi() + ) def tearDown(self): pass def test_approve_notebooks_for_publishing(self): - """Test case for approve_notebooks_for_publishing - - """ + """Test case for approve_notebooks_for_publishing""" pass def test_create_notebook(self): - """Test case for create_notebook - - """ + """Test case for create_notebook""" pass def test_delete_notebook(self): - """Test case for delete_notebook - - """ + """Test case for delete_notebook""" pass def test_download_notebook_files(self): @@ -58,53 +54,37 @@ def test_download_notebook_files(self): pass def test_generate_notebook_code(self): - """Test case for generate_notebook_code - - """ + """Test case for generate_notebook_code""" pass def test_get_notebook(self): - """Test case for get_notebook - - """ + """Test case for get_notebook""" pass def test_get_notebook_template(self): - """Test case for get_notebook_template - - """ + """Test case for get_notebook_template""" pass def test_list_notebooks(self): - """Test case for list_notebooks - - """ + """Test case for list_notebooks""" pass def test_run_notebook(self): - """Test case for run_notebook - - """ + """Test case for run_notebook""" pass def test_set_featured_notebooks(self): - """Test case for set_featured_notebooks - - """ + """Test case for set_featured_notebooks""" pass def test_upload_notebook(self): - """Test case for upload_notebook - - """ + """Test case for upload_notebook""" pass def test_upload_notebook_file(self): - """Test case for upload_notebook_file - - """ + """Test case for upload_notebook_file""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_pipeline_service_api.py b/api/client/test/test_pipeline_service_api.py index 0f10e752..8fbcf77d 100644 --- a/api/client/test/test_pipeline_service_api.py +++ b/api/client/test/test_pipeline_service_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,36 +18,32 @@ import unittest -import swagger_client -from swagger_client.api.pipeline_service_api import PipelineServiceApi # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.api.pipeline_service_api import PipelineServiceApi # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestPipelineServiceApi(unittest.TestCase): """PipelineServiceApi unit test stubs""" def setUp(self): - self.api = swagger_client.api.pipeline_service_api.PipelineServiceApi() # noqa: E501 + self.api = ( + swagger_client.api.pipeline_service_api.PipelineServiceApi() + ) def tearDown(self): pass def test_approve_pipelines_for_publishing(self): - """Test case for approve_pipelines_for_publishing - - """ + """Test case for approve_pipelines_for_publishing""" pass def test_create_pipeline(self): - """Test case for create_pipeline - - """ + """Test case for create_pipeline""" pass def test_delete_pipeline(self): - """Test case for delete_pipeline - - """ + """Test case for delete_pipeline""" pass def test_download_pipeline_files(self): @@ -58,35 +54,25 @@ def test_download_pipeline_files(self): pass def test_get_pipeline(self): - """Test case for get_pipeline - - """ + """Test case for get_pipeline""" pass def test_get_template(self): - """Test case for get_template - - """ + """Test case for get_template""" pass def test_list_pipelines(self): - """Test case for list_pipelines - - """ + """Test case for list_pipelines""" pass def test_set_featured_pipelines(self): - """Test case for set_featured_pipelines - - """ + """Test case for set_featured_pipelines""" pass def test_upload_pipeline(self): - """Test case for upload_pipeline - - """ + """Test case for upload_pipeline""" pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/client/test/test_protobuf_any.py b/api/client/test/test_protobuf_any.py index a7fffee0..60562fa3 100644 --- a/api/client/test/test_protobuf_any.py +++ b/api/client/test/test_protobuf_any.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 """ @@ -18,9 +18,9 @@ import unittest -import swagger_client -from swagger_client.models.protobuf_any import ProtobufAny # noqa: E501 -from swagger_client.rest import ApiException +import swagger_client # noqa: F401 +from swagger_client.models.protobuf_any import ProtobufAny # noqa: F401, E501 +from swagger_client.rest import ApiException # noqa: F401 class TestProtobufAny(unittest.TestCase): @@ -35,9 +35,9 @@ def tearDown(self): def testProtobufAny(self): """Test ProtobufAny""" # FIXME: construct object with mandatory attributes with example values - # model = swagger_client.models.protobuf_any.ProtobufAny() # noqa: E501 + # model = swagger_client.models.protobuf_any.ProtobufAny() pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/api/examples/catalog_api.py b/api/examples/catalog_api.py index 85593982..e50e2033 100644 --- a/api/examples/catalog_api.py +++ b/api/examples/catalog_api.py @@ -1,27 +1,32 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 from __future__ import print_function import json -import swagger_client +import swagger_client # noqa: F401 -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiListCatalogItemsResponse, ApiCatalogUpload,\ - ApiCatalogUploadItem, ApiCatalogUploadResponse, ApiAccessToken -from swagger_client.rest import ApiException +from swagger_client.models import ( # noqa: F401 + ApiListCatalogItemsResponse, + ApiCatalogUpload, + ApiCatalogUploadItem, + ApiCatalogUploadResponse, + ApiAccessToken, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" catalog_upload_file = "./../../bootstrapper/catalog_upload.json" @@ -31,14 +36,13 @@ def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client def print_function_name_decorator(func): - def wrapper(*args, **kwargs): print() print(f"---[ {func.__name__} ]---") @@ -59,30 +63,38 @@ def upload_catalog_assets(upload_file=catalog_upload_file) -> ApiCatalogUploadRe upload_items = json.load(f) upload_body = ApiCatalogUpload( - api_access_tokens=[ApiAccessToken(api_token=IBM_GHE_API_TOKEN, url_host="github.ibm.com")], + api_access_tokens=[ + ApiAccessToken(api_token=IBM_GHE_API_TOKEN, url_host="github.ibm.com") + ], components=upload_items.get("components"), datasets=upload_items.get("datasets"), models=upload_items.get("models"), notebooks=upload_items.get("notebooks"), - pipelines=upload_items.get("pipelines")) + pipelines=upload_items.get("pipelines"), + ) - upload_response: ApiCatalogUploadResponse = api_instance.upload_multiple_assets(upload_body) + upload_response: ApiCatalogUploadResponse = api_instance.upload_multiple_assets( + upload_body + ) - print(f"Uploaded '{upload_response.total_created}' assets, {upload_response.total_errors} errors") + print( + f"Uploaded '{upload_response.total_created}' assets, {upload_response.total_errors} errors" + ) # print a short-ish table instead of the full JSON response - asset_types = [ - "components", - "datasets", - "models", - "notebooks", - "pipelines" - ] + asset_types = ["components", "datasets", "models", "notebooks", "pipelines"] for asset_type in asset_types: asset_list = upload_response.__getattribute__(asset_type) print(f"\n{asset_type.upper()}:\n") for asset in asset_list: - print("%s %s %s" % (asset.id, asset.created_at.strftime("%Y-%m-%d %H:%M:%S"), asset.name)) + print( + "%s %s %s" + % ( + asset.id, + asset.created_at.strftime("%Y-%m-%d %H:%M:%S"), + asset.name, + ) + ) if upload_response.total_errors > 0: print(f"\nERRORS:\n") @@ -92,7 +104,11 @@ def upload_catalog_assets(upload_file=catalog_upload_file) -> ApiCatalogUploadRe return upload_response except ApiException as e: - print("Exception when calling CatalogServiceApi -> upload_multiple_assets: %s\n" % e, file=stderr) + print( + "Exception when calling CatalogServiceApi -> upload_multiple_assets: %s\n" + % e, + file=stderr, + ) raise e return None @@ -108,7 +124,7 @@ def delete_assets(upload_assets_response: ApiCatalogUploadResponse = None): "datasets": swagger_client.DatasetServiceApi(api_client).delete_dataset, "models": swagger_client.ModelServiceApi(api_client).delete_model, "notebooks": swagger_client.NotebookServiceApi(api_client).delete_notebook, - "pipelines": swagger_client.PipelineServiceApi(api_client).delete_pipeline + "pipelines": swagger_client.PipelineServiceApi(api_client).delete_pipeline, } try: @@ -125,7 +141,9 @@ def delete_assets(upload_assets_response: ApiCatalogUploadResponse = None): @print_function_name_decorator -def list_assets(filter_dict: dict = {}, sort_by: str = None) -> ApiListCatalogItemsResponse: +def list_assets( + filter_dict: dict = {}, sort_by: str = None +) -> ApiListCatalogItemsResponse: api_client = get_swagger_client() api_instance = swagger_client.CatalogServiceApi(api_client=api_client) @@ -133,27 +151,32 @@ def list_assets(filter_dict: dict = {}, sort_by: str = None) -> ApiListCatalogIt try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListCatalogItemsResponse = \ - api_instance.list_all_assets(filter=filter_str, sort_by=sort_by) + api_response: ApiListCatalogItemsResponse = api_instance.list_all_assets( + filter=filter_str, sort_by=sort_by + ) - asset_types = [ - "components", - "datasets", - "models", - "notebooks", - "pipelines" - ] + asset_types = ["components", "datasets", "models", "notebooks", "pipelines"] # print a short-ish table instead of the full JSON response for asset_type in asset_types: asset_list = api_response.__getattribute__(asset_type) for asset in asset_list: - print("%s %s %s" % (asset.id, asset.created_at.strftime("%Y-%m-%d %H:%M:%S"), asset.name)) + print( + "%s %s %s" + % ( + asset.id, + asset.created_at.strftime("%Y-%m-%d %H:%M:%S"), + asset.name, + ) + ) return api_response except ApiException as e: - print("Exception when calling CatalogServiceApi -> list_all_assets: %s\n" % e, file=stderr) + print( + "Exception when calling CatalogServiceApi -> list_all_assets: %s\n" % e, + file=stderr, + ) return [] @@ -173,5 +196,5 @@ def main(): list_assets() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/api/examples/components_api.py b/api/examples/components_api.py index 84c31f6b..47645068 100644 --- a/api/examples/components_api.py +++ b/api/examples/components_api.py @@ -1,5 +1,5 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 from __future__ import print_function @@ -9,42 +9,48 @@ import os import random import re -import swagger_client +import swagger_client # noqa: F401 import tarfile import tempfile from io import BytesIO -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiComponent, ApiGetTemplateResponse, ApiListComponentsResponse, \ - ApiGenerateCodeResponse, ApiRunCodeResponse -from swagger_client.rest import ApiException +from swagger_client.models import ( + ApiComponent, + ApiGetTemplateResponse, + ApiListComponentsResponse, + ApiGenerateCodeResponse, + ApiRunCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" -yaml_files = glob.glob("./../../../katalog/component-samples/**/component.yaml", recursive=True) +yaml_files = glob.glob( + "./../../../katalog/component-samples/**/component.yaml", recursive=True +) def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client def print_function_name_decorator(func): - def wrapper(*args, **kwargs): print() print(f"---[ {func.__name__}{args}{kwargs} ]---") @@ -74,12 +80,17 @@ def upload_component_template(uploadfile_name, name=None) -> str: api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - component: ApiComponent = api_instance.upload_component(uploadfile=uploadfile_name, name=name) + component: ApiComponent = api_instance.upload_component( + uploadfile=uploadfile_name, name=name + ) print(f"Uploaded '{component.name}': {component.id}") return component.id except ApiException as e: - print("Exception when calling ComponentServiceApi -> upload_component: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> upload_component: %s\n" % e, + file=stderr, + ) raise e return None @@ -106,11 +117,17 @@ def upload_component_file(component_id, file_path): api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - response = api_instance.upload_component_file(id=component_id, uploadfile=file_path) + response = api_instance.upload_component_file( + id=component_id, uploadfile=file_path + ) print(f"Upload file '{file_path}' to component with ID '{component_id}'") except ApiException as e: - print("Exception when calling ComponentServiceApi -> upload_component_file: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> upload_component_file: %s\n" + % e, + file=stderr, + ) raise e @@ -121,13 +138,13 @@ def download_component_tgz(component_id) -> str: api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_component_files(component_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_component_files( + component_id, include_generated_code=True, _preload_content=False + ) - attachment_header = response.info().get("Content-Disposition", - f"attachment; filename={component_id}.tgz") + attachment_header = response.info().get( + "Content-Disposition", f"attachment; filename={component_id}.tgz" + ) download_filename = re.sub("attachment; filename=", "", attachment_header) @@ -135,7 +152,7 @@ def download_component_tgz(component_id) -> str: os.makedirs(download_dir, exist_ok=True) tarfile_path = os.path.join(download_dir, download_filename) - with open(tarfile_path, 'wb') as f: + with open(tarfile_path, "wb") as f: f.write(response.read()) print(tarfile_path) @@ -143,7 +160,11 @@ def download_component_tgz(component_id) -> str: return tarfile_path except ApiException as e: - print("Exception when calling ComponentServiceApi -> download_component_files: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> download_component_files: %s\n" + % e, + file=stderr, + ) return "Download failed?" @@ -155,35 +176,49 @@ def verify_component_download(component_id: str) -> bool: api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_component_files(component_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_component_files( + component_id, include_generated_code=True, _preload_content=False + ) tgz_file = BytesIO(response.read()) tar = tarfile.open(fileobj=tgz_file) - file_contents = {m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") - for m in tar.getmembers()} + file_contents = { + m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") + for m in tar.getmembers() + } - template_response: ApiGetTemplateResponse = api_instance.get_component_template(component_id) + template_response: ApiGetTemplateResponse = api_instance.get_component_template( + component_id + ) template_text_from_api = template_response.template - assert template_text_from_api == file_contents.get("yaml", file_contents.get("yml")) + assert template_text_from_api == file_contents.get( + "yaml", file_contents.get("yml") + ) - generate_code_response: ApiGenerateCodeResponse = api_instance.generate_component_code(component_id) + generate_code_response: ApiGenerateCodeResponse = ( + api_instance.generate_component_code(component_id) + ) run_script_from_api = generate_code_response.script - regex = re.compile(r"name='[^']*'") # controller adds random chars to name, replace those + regex = re.compile( + r"name='[^']*'" + ) # controller adds random chars to name, replace those - assert regex.sub("name='...'", run_script_from_api) == \ - regex.sub("name='...'", file_contents.get("py")) + assert regex.sub("name='...'", run_script_from_api) == regex.sub( + "name='...'", file_contents.get("py") + ) print("downloaded files match") return True except ApiException as e: - print("Exception when calling ComponentServiceApi -> download_component_files: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> download_component_files: %s\n" + % e, + file=stderr, + ) return False @@ -198,7 +233,11 @@ def approve_components_for_publishing(component_ids: [str]): api_response = api_instance.approve_components_for_publishing(component_ids) except ApiException as e: - print("Exception when calling ComponentServiceApi -> approve_components_for_publishing: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> approve_components_for_publishing: %s\n" + % e, + file=stderr, + ) return None @@ -213,7 +252,11 @@ def set_featured_components(component_ids: [str]): api_response = api_instance.set_featured_components(component_ids) except ApiException as e: - print("Exception when calling ComponentServiceApi -> set_featured_components: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> set_featured_components: %s\n" + % e, + file=stderr, + ) return None @@ -230,7 +273,10 @@ def get_component(component_id: str) -> ApiComponent: return component_meta except ApiException as e: - print("Exception when calling ComponentServiceApi -> get_component: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> get_component: %s\n" % e, + file=stderr, + ) return None @@ -244,7 +290,10 @@ def delete_component(component_id: str): try: api_instance.delete_component(component_id) except ApiException as e: - print("Exception when calling ComponentServiceApi -> delete_component: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> delete_component: %s\n" % e, + file=stderr, + ) @print_function_name_decorator @@ -254,7 +303,9 @@ def get_template(template_id: str) -> str: api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - template_response: ApiGetTemplateResponse = api_instance.get_component_template(template_id) + template_response: ApiGetTemplateResponse = api_instance.get_component_template( + template_id + ) print(template_response.template) # yaml_dict = yaml.load(template_response.template, Loader=yaml.FullLoader) @@ -267,7 +318,11 @@ def get_template(template_id: str) -> str: return template_response.template except ApiException as e: - print("Exception when calling ComponentServiceApi -> get_component_template: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> get_component_template: %s\n" + % e, + file=stderr, + ) return None @@ -279,13 +334,18 @@ def generate_code(component_id: str) -> str: api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - generate_code_response: ApiGenerateCodeResponse = api_instance.generate_component_code(component_id) + generate_code_response: ApiGenerateCodeResponse = ( + api_instance.generate_component_code(component_id) + ) print(generate_code_response.script) return generate_code_response.script except ApiException as e: - print("Exception while calling ComponentServiceApi -> generate_code: %s\n" % e, file=stderr) + print( + "Exception while calling ComponentServiceApi -> generate_code: %s\n" % e, + file=stderr, + ) return None @@ -297,14 +357,21 @@ def run_code(component_id: str, parameters: dict = {}, run_name: str = None) -> api_instance = swagger_client.ComponentServiceApi(api_client=api_client) try: - param_array = [{"name": key, "value": value} for key, value in parameters.items()] - run_code_response: ApiRunCodeResponse = api_instance.run_component(component_id, param_array, run_name=run_name) + param_array = [ + {"name": key, "value": value} for key, value in parameters.items() + ] + run_code_response: ApiRunCodeResponse = api_instance.run_component( + component_id, param_array, run_name=run_name + ) print(run_code_response.run_url) return run_code_response.run_url except ApiException as e: - print("Exception while calling ComponentServiceApi -> run_code: %s\n" % e, file=stderr) + print( + "Exception while calling ComponentServiceApi -> run_code: %s\n" % e, + file=stderr, + ) return None @@ -318,15 +385,23 @@ def list_components(filter_dict: dict = {}, sort_by: str = None) -> [ApiComponen try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListComponentsResponse = api_instance.list_components(filter=filter_str, sort_by=sort_by) + api_response: ApiListComponentsResponse = api_instance.list_components( + filter=filter_str, sort_by=sort_by + ) for c in api_response.components: - print("%s %s %s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name)) + print( + "%s %s %s" + % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name) + ) return api_response.components except ApiException as e: - print("Exception when calling ComponentServiceApi -> list_components: %s\n" % e, file=stderr) + print( + "Exception when calling ComponentServiceApi -> list_components: %s\n" % e, + file=stderr, + ) return [] @@ -349,7 +424,7 @@ def main(): approve_components_for_publishing(component_ids) # randomly selected a component - i = random.randint(0, len(component_ids)-1) + i = random.randint(0, len(component_ids) - 1) component_id = component_ids[i] # show one randomly selected component @@ -361,22 +436,24 @@ def main(): verify_component_download(component_id) upload_component_file(component_id, tgz_file) - component = list_components(filter_dict={"name": 'Create Secret - Kubernetes Cluster'})[0] + component = list_components( + filter_dict={"name": "Create Secret - Kubernetes Cluster"} + )[0] generate_code(component.id) args = { - 'token': env.get("IBM_GHE_API_TOKEN"), - 'url': 'https://raw.github.ibm.com/user/repo/master/secret.yml', - 'name': 'my-test-credential' + "token": env.get("IBM_GHE_API_TOKEN"), + "url": "https://raw.github.ibm.com/user/repo/master/secret.yml", + "name": "my-test-credential", } run_code(component.id, args, f"Running component '{component.id}'") # # delete one component # delete_component(component_id) - + # # update a component # component = list_components(filter_dict={"name": 'Fabric for Deep Learning - Train Model'})[0] # update_component_template(component.id, "temp/files/ffdl_train.yaml") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/api/examples/credentials_api.py b/api/examples/credentials_api.py index 27cefaca..249417e8 100644 --- a/api/examples/credentials_api.py +++ b/api/examples/credentials_api.py @@ -1,41 +1,40 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 from __future__ import print_function import json import random -import swagger_client +import swagger_client # noqa: F401 -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from pipelines_api import list_pipelines from swagger_client.api_client import ApiClient, Configuration from swagger_client.models import ApiCredential, ApiListCredentialsResponse -from swagger_client.rest import ApiException +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client def print_function_name_decorator(func): - def wrapper(*args, **kwargs): print() print(f"---[ {func.__name__}{args}{kwargs} ]---") @@ -46,20 +45,28 @@ def wrapper(*args, **kwargs): @print_function_name_decorator -def create_credential(pipeline_id: str, project_id: str, data_assets: [str] = []) -> ApiCredential: +def create_credential( + pipeline_id: str, project_id: str, data_assets: [str] = [] +) -> ApiCredential: api_client = get_swagger_client() api_instance = swagger_client.CredentialServiceApi(api_client=api_client) try: - api_credential = ApiCredential(pipeline_id=pipeline_id, project_id=project_id, data_assets=data_assets) + api_credential = ApiCredential( + pipeline_id=pipeline_id, project_id=project_id, data_assets=data_assets + ) api_response: ApiCredential = api_instance.create_credential(api_credential) return api_response except ApiException as e: - print("Exception when calling CredentialServiceApi -> create_credential: %s\n" % e, file=stderr) + print( + "Exception when calling CredentialServiceApi -> create_credential: %s\n" + % e, + file=stderr, + ) return [] @@ -76,7 +83,10 @@ def get_credential(credential_id: str) -> ApiCredential: return api_credential except ApiException as e: - print("Exception when calling CredentialServiceApi -> get_credential: %s\n" % e, file=stderr) + print( + "Exception when calling CredentialServiceApi -> get_credential: %s\n" % e, + file=stderr, + ) return None @@ -90,7 +100,11 @@ def delete_credential(credential_id: str): try: api_instance.delete_credential(credential_id) except ApiException as e: - print("Exception when calling CredentialServiceApi -> delete_credential: %s\n" % e, file=stderr) + print( + "Exception when calling CredentialServiceApi -> delete_credential: %s\n" + % e, + file=stderr, + ) @print_function_name_decorator @@ -102,15 +116,28 @@ def list_credentials(filter_dict: dict = {}, sort_by: str = None) -> [ApiCredent try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListCredentialsResponse = api_instance.list_credentials(filter=filter_str, sort_by=sort_by) + api_response: ApiListCredentialsResponse = api_instance.list_credentials( + filter=filter_str, sort_by=sort_by + ) for c in api_response.credentials: - print("%s %s pl:%s pr:%s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.pipeline_id, c.project_id)) + print( + "%s %s pl:%s pr:%s" + % ( + c.id, + c.created_at.strftime("%Y-%m-%d %H:%M:%S"), + c.pipeline_id, + c.project_id, + ) + ) return api_response.credentials except ApiException as e: - print("Exception when calling CredentialServiceApi -> list_credentials: %s\n" % e, file=stderr) + print( + "Exception when calling CredentialServiceApi -> list_credentials: %s\n" % e, + file=stderr, + ) return [] @@ -118,10 +145,12 @@ def list_credentials(filter_dict: dict = {}, sort_by: str = None) -> [ApiCredent def main(): # select a random pipeline pipelines = list_pipelines() - i = random.randint(0, len(pipelines)-1) + i = random.randint(0, len(pipelines) - 1) # create a new credential - credential = create_credential(pipeline_id=pipelines[i].id, project_id="xyz", data_assets=["data1", "data2"]) + credential = create_credential( + pipeline_id=pipelines[i].id, project_id="xyz", data_assets=["data1", "data2"] + ) pprint(credential) # list credentials @@ -138,6 +167,6 @@ def main(): delete_credential(credential.id) -if __name__ == '__main__': +if __name__ == "__main__": # delete_credential("*") main() diff --git a/api/examples/dataset_api.py b/api/examples/dataset_api.py index ce6a8ea4..9467b495 100644 --- a/api/examples/dataset_api.py +++ b/api/examples/dataset_api.py @@ -8,35 +8,44 @@ import os import random import re -import swagger_client +import swagger_client # noqa: F401 import tarfile import tempfile from glob import glob from io import BytesIO -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiDataset, ApiGetTemplateResponse, ApiListDatasetsResponse, \ - ApiGenerateCodeResponse, ApiRunCodeResponse -from swagger_client.rest import ApiException +from swagger_client.models import ( + ApiDataset, + ApiGetTemplateResponse, + ApiListDatasetsResponse, + ApiGenerateCodeResponse, + ApiRunCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" -yaml_files = sorted(filter(lambda f: "template" not in f, - glob("./../../../katalog/dataset-samples/**/*.yaml", recursive=True))) +yaml_files = sorted( + filter( + lambda f: "template" not in f, + glob("./../../../katalog/dataset-samples/**/*.yaml", recursive=True), + ) +) def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client @@ -47,6 +56,7 @@ def wrapper(*args, **kwargs): print(f"---[ {func.__name__}{args}{kwargs} ]---") print() return func(*args, **kwargs) + return wrapper @@ -69,12 +79,17 @@ def upload_dataset_template(uploadfile_name, name=None) -> str: api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - dataset: ApiDataset = api_instance.upload_dataset(uploadfile=uploadfile_name, name=name) + dataset: ApiDataset = api_instance.upload_dataset( + uploadfile=uploadfile_name, name=name + ) print(f"Uploaded '{dataset.name}': {dataset.id}") return dataset.id except ApiException as e: - print("Exception when calling DatasetServiceApi -> upload_dataset: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> upload_dataset: %s\n" % e, + file=stderr, + ) # raise e return None @@ -103,7 +118,10 @@ def upload_dataset_file(dataset_id, file_path): print(f"Upload file '{file_path}' to dataset with ID '{dataset_id}'") except ApiException as e: - print("Exception when calling DatasetServiceApi -> upload_dataset_file: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> upload_dataset_file: %s\n" % e, + file=stderr, + ) raise e @@ -113,13 +131,13 @@ def download_dataset_tgz(dataset_id) -> str: api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_dataset_files(dataset_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_dataset_files( + dataset_id, include_generated_code=True, _preload_content=False + ) - attachment_header = response.info().get("Content-Disposition", - f"attachment; filename={dataset_id}.tgz") + attachment_header = response.info().get( + "Content-Disposition", f"attachment; filename={dataset_id}.tgz" + ) download_filename = re.sub("attachment; filename=", "", attachment_header) @@ -127,7 +145,7 @@ def download_dataset_tgz(dataset_id) -> str: os.makedirs(download_dir, exist_ok=True) tarfile_path = os.path.join(download_dir, download_filename) - with open(tarfile_path, 'wb') as f: + with open(tarfile_path, "wb") as f: f.write(response.read()) print(tarfile_path) @@ -135,7 +153,11 @@ def download_dataset_tgz(dataset_id) -> str: return tarfile_path except ApiException as e: - print("Exception when calling DatasetServiceApi -> download_dataset_files: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> download_dataset_files: %s\n" + % e, + file=stderr, + ) return "Download failed?" @@ -146,20 +168,25 @@ def verify_dataset_download(dataset_id: str) -> bool: api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_dataset_files(dataset_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_dataset_files( + dataset_id, include_generated_code=True, _preload_content=False + ) tgz_file = BytesIO(response.read()) tar = tarfile.open(fileobj=tgz_file) - file_contents = {m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") - for m in tar.getmembers()} + file_contents = { + m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") + for m in tar.getmembers() + } - template_response: ApiGetTemplateResponse = api_instance.get_dataset_template(dataset_id) + template_response: ApiGetTemplateResponse = api_instance.get_dataset_template( + dataset_id + ) template_text_from_api = template_response.template - assert template_text_from_api == file_contents.get("yaml", file_contents.get("yml")) + assert template_text_from_api == file_contents.get( + "yaml", file_contents.get("yml") + ) # TODO: verify generated code # generate_code_response: ApiGenerateCodeResponse = api_instance.generate_dataset_code(dataset_id) @@ -175,7 +202,11 @@ def verify_dataset_download(dataset_id: str) -> bool: return True except ApiException as e: - print("Exception when calling DatasetServiceApi -> download_dataset_files: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> download_dataset_files: %s\n" + % e, + file=stderr, + ) return False @@ -189,7 +220,11 @@ def approve_datasets_for_publishing(dataset_ids: [str]): api_response = api_instance.approve_datasets_for_publishing(dataset_ids) except ApiException as e: - print("Exception when calling DatasetServiceApi -> approve_datasets_for_publishing: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> approve_datasets_for_publishing: %s\n" + % e, + file=stderr, + ) return None @@ -203,7 +238,11 @@ def set_featured_datasets(dataset_ids: [str]): api_response = api_instance.set_featured_datasets(dataset_ids) except ApiException as e: - print("Exception when calling DatasetServiceApi -> set_featured_datasets: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> set_featured_datasets: %s\n" + % e, + file=stderr, + ) return None @@ -219,7 +258,10 @@ def get_dataset(dataset_id: str) -> ApiDataset: return dataset_meta except ApiException as e: - print("Exception when calling DatasetServiceApi -> get_dataset: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> get_dataset: %s\n" % e, + file=stderr, + ) return None @@ -232,7 +274,10 @@ def delete_dataset(dataset_id: str): try: api_instance.delete_dataset(dataset_id) except ApiException as e: - print("Exception when calling DatasetServiceApi -> delete_dataset: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> delete_dataset: %s\n" % e, + file=stderr, + ) @print_function_name_decorator @@ -241,7 +286,9 @@ def get_template(template_id: str) -> str: api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - template_response: ApiGetTemplateResponse = api_instance.get_dataset_template(template_id) + template_response: ApiGetTemplateResponse = api_instance.get_dataset_template( + template_id + ) print(template_response.template) # yaml_dict = yaml.load(template_response.template, Loader=yaml.FullLoader) @@ -254,7 +301,11 @@ def get_template(template_id: str) -> str: return template_response.template except ApiException as e: - print("Exception when calling DatasetServiceApi -> get_dataset_template: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> get_dataset_template: %s\n" + % e, + file=stderr, + ) return None @@ -265,13 +316,18 @@ def generate_code(dataset_id: str) -> str: api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - generate_code_response: ApiGenerateCodeResponse = api_instance.generate_dataset_code(dataset_id) + generate_code_response: ApiGenerateCodeResponse = ( + api_instance.generate_dataset_code(dataset_id) + ) print(generate_code_response.script) return generate_code_response.script except ApiException as e: - print("Exception while calling DatasetServiceApi -> generate_code: %s\n" % e, file=stderr) + print( + "Exception while calling DatasetServiceApi -> generate_code: %s\n" % e, + file=stderr, + ) return None @@ -282,16 +338,21 @@ def run_code(dataset_id: str, parameters: dict = {}, run_name: str = None) -> st api_instance = swagger_client.DatasetServiceApi(api_client=api_client) try: - param_array = [{"name": key, "value": value} for key, value in parameters.items()] - run_code_response: ApiRunCodeResponse = api_instance.run_dataset(dataset_id, - run_name=run_name, - parameters=param_array) + param_array = [ + {"name": key, "value": value} for key, value in parameters.items() + ] + run_code_response: ApiRunCodeResponse = api_instance.run_dataset( + dataset_id, run_name=run_name, parameters=param_array + ) print(run_code_response.run_url) return run_code_response.run_url except ApiException as e: - print("Exception while calling DatasetServiceApi -> run_code: %s\n" % e, file=stderr) + print( + "Exception while calling DatasetServiceApi -> run_code: %s\n" % e, + file=stderr, + ) return None @@ -304,15 +365,23 @@ def list_datasets(filter_dict: dict = {}, sort_by: str = None) -> [ApiDataset]: try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListDatasetsResponse = api_instance.list_datasets(filter=filter_str, sort_by=sort_by) + api_response: ApiListDatasetsResponse = api_instance.list_datasets( + filter=filter_str, sort_by=sort_by + ) for c in api_response.datasets: - print("%s %s %s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name)) + print( + "%s %s %s" + % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name) + ) return api_response.datasets except ApiException as e: - print("Exception when calling DatasetServiceApi -> list_datasets: %s\n" % e, file=stderr) + print( + "Exception when calling DatasetServiceApi -> list_datasets: %s\n" % e, + file=stderr, + ) return [] @@ -363,6 +432,6 @@ def main(): # update_dataset_template(dataset.id, "temp/files/fashion-mnist.yaml") -if __name__ == '__main__': +if __name__ == "__main__": pprint(yaml_files) main() diff --git a/api/examples/models_api.py b/api/examples/models_api.py index a55ba53f..514c4c64 100644 --- a/api/examples/models_api.py +++ b/api/examples/models_api.py @@ -1,5 +1,5 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 from __future__ import print_function @@ -8,36 +8,45 @@ import os import random import re -import swagger_client +import swagger_client # noqa: F401 import tarfile import tempfile from glob import glob from io import BytesIO -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiModel, ApiGetTemplateResponse, ApiListModelsResponse, \ - ApiGenerateModelCodeResponse, ApiRunCodeResponse -from swagger_client.rest import ApiException +from swagger_client.models import ( + ApiModel, + ApiGetTemplateResponse, + ApiListModelsResponse, + ApiGenerateModelCodeResponse, + ApiRunCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" -yaml_files = sorted(filter(lambda f: "template" not in f, - glob("./../../../katalog/model-samples/**/*.yaml", recursive=True))) +yaml_files = sorted( + filter( + lambda f: "template" not in f, + glob("./../../../katalog/model-samples/**/*.yaml", recursive=True), + ) +) def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client @@ -48,6 +57,7 @@ def wrapper(*args, **kwargs): print(f"---[ {func.__name__}{args}{kwargs} ]---") print() return func(*args, **kwargs) + return wrapper @@ -65,12 +75,17 @@ def upload_model_template(uploadfile_name, name=None) -> str: api_client = get_swagger_client() api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - print(f"Uploading '{uploadfile_name}' ... ", end='') - model: ApiModel = api_instance.upload_model(uploadfile=uploadfile_name, name=name) + print(f"Uploading '{uploadfile_name}' ... ", end="") + model: ApiModel = api_instance.upload_model( + uploadfile=uploadfile_name, name=name + ) print(f"{model.id} '{model.name}'") return model.id except ApiException as e: - print("Exception when calling ModelServiceApi -> upload_model: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> upload_model: %s\n" % e, + file=stderr, + ) raise e return None @@ -83,7 +98,10 @@ def upload_model_file(model_id, file_path): response = api_instance.upload_model_file(id=model_id, uploadfile=file_path) print(f"Upload file '{file_path}' to model '{model_id}'") except ApiException as e: - print("Exception when calling ModelServiceApi -> upload_model_file: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> upload_model_file: %s\n" % e, + file=stderr, + ) raise e @@ -105,21 +123,23 @@ def download_model_tgz(model_id, download_dir: str = None) -> str: api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_model_files(model_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_model_files( + model_id, include_generated_code=True, _preload_content=False + ) - attachment_header = response.info().get("Content-Disposition", - f"attachment; filename={model_id}.tgz") + attachment_header = response.info().get( + "Content-Disposition", f"attachment; filename={model_id}.tgz" + ) download_filename = re.sub("attachment; filename=", "", attachment_header) - download_dir = download_dir or os.path.join(tempfile.gettempdir(), "download", "models") + download_dir = download_dir or os.path.join( + tempfile.gettempdir(), "download", "models" + ) os.makedirs(download_dir, exist_ok=True) tarfile_path = os.path.join(download_dir, download_filename) - with open(tarfile_path, 'wb') as f: + with open(tarfile_path, "wb") as f: f.write(response.read()) print(tarfile_path) @@ -127,7 +147,10 @@ def download_model_tgz(model_id, download_dir: str = None) -> str: return tarfile_path except ApiException as e: - print("Exception when calling ModelServiceApi -> download_model_files: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> download_model_files: %s\n" % e, + file=stderr, + ) return "Download failed?" @@ -139,37 +162,46 @@ def verify_model_download(model_id: str) -> bool: api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_model_files(model_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_model_files( + model_id, include_generated_code=True, _preload_content=False + ) tgz_file = BytesIO(response.read()) tar = tarfile.open(fileobj=tgz_file) - file_contents = {m.name: tar.extractfile(m).read().decode("utf-8") - for m in tar.getmembers()} + file_contents = { + m.name: tar.extractfile(m).read().decode("utf-8") for m in tar.getmembers() + } # verify template text matches - template_response: ApiGetTemplateResponse = api_instance.get_model_template(model_id) + template_response: ApiGetTemplateResponse = api_instance.get_model_template( + model_id + ) template_from_api = template_response.template - template_from_tgz = [content for filename, content in file_contents.items() - if filename.endswith(".yaml") or filename.endswith(".yml")][0] + template_from_tgz = [ + content + for filename, content in file_contents.items() + if filename.endswith(".yaml") or filename.endswith(".yml") + ][0] assert template_from_api == template_from_tgz # verify generated code matches - generate_code_response: ApiGenerateModelCodeResponse = \ + generate_code_response: ApiGenerateModelCodeResponse = ( api_instance.generate_model_code(model_id) + ) for model_script in generate_code_response.scripts: stage = model_script.pipeline_stage platform = model_script.execution_platform script_from_api = model_script.script_code - downloaded_script = [content for filename, content in file_contents.items() - if f"run_{stage}_{platform}" in filename][0] + downloaded_script = [ + content + for filename, content in file_contents.items() + if f"run_{stage}_{platform}" in filename + ][0] assert script_from_api == downloaded_script @@ -178,7 +210,10 @@ def verify_model_download(model_id: str) -> bool: return True except ApiException as e: - print("Exception when calling ModelServiceApi -> download_model_files: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> download_model_files: %s\n" % e, + file=stderr, + ) return False @@ -193,7 +228,11 @@ def approve_models_for_publishing(model_ids: [str]): api_response = api_instance.approve_models_for_publishing(model_ids) except ApiException as e: - print("Exception when calling ModelServiceApi -> approve_models_for_publishing: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> approve_models_for_publishing: %s\n" + % e, + file=stderr, + ) return None @@ -208,7 +247,10 @@ def set_featured_models(model_ids: [str]): api_response = api_instance.set_featured_models(model_ids) except ApiException as e: - print("Exception when calling ModelServiceApi -> set_featured_models: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> set_featured_models: %s\n" % e, + file=stderr, + ) return None @@ -222,7 +264,9 @@ def get_model(model_id: str) -> ApiModel: pprint(model_meta, indent=2) return model_meta except ApiException as e: - print("Exception when calling ModelServiceApi -> get_model: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> get_model: %s\n" % e, file=stderr + ) return None @@ -233,7 +277,10 @@ def delete_model(model_id: str): try: api_instance.delete_model(model_id) except ApiException as e: - print("Exception when calling ModelServiceApi -> delete_model: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> delete_model: %s\n" % e, + file=stderr, + ) @print_function_name_decorator @@ -243,7 +290,9 @@ def get_template(template_id: str) -> str: api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - template_response: ApiGetTemplateResponse = api_instance.get_model_template(template_id) + template_response: ApiGetTemplateResponse = api_instance.get_model_template( + template_id + ) print(template_response.template) # yaml_dict = yaml.load(template_response.template, Loader=yaml.FullLoader) @@ -256,7 +305,10 @@ def get_template(template_id: str) -> str: return template_response.template except ApiException as e: - print("Exception when calling ModelServiceApi -> get_model_template: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> get_model_template: %s\n" % e, + file=stderr, + ) return None @@ -268,7 +320,9 @@ def generate_code(model_id: str) -> str: api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - generate_code_response: ApiGenerateModelCodeResponse = api_instance.generate_model_code(model_id) + generate_code_response: ApiGenerateModelCodeResponse = ( + api_instance.generate_model_code(model_id) + ) for model_script in generate_code_response.scripts: print(f"#######################################################") @@ -282,26 +336,42 @@ def generate_code(model_id: str) -> str: return generate_code_response.scripts except ApiException as e: - print("Exception while calling ModelServiceApi -> generate_code: %s\n" % e, file=stderr) + print( + "Exception while calling ModelServiceApi -> generate_code: %s\n" % e, + file=stderr, + ) return None @print_function_name_decorator -def run_code(model_id: str, pipeline_stage: str, execution_platform: str, run_name: str = None, parameters=dict()) -> str: +def run_code( + model_id: str, + pipeline_stage: str, + execution_platform: str, + run_name: str = None, + parameters=dict(), +) -> str: api_client = get_swagger_client() api_instance = swagger_client.ModelServiceApi(api_client=api_client) try: - run_code_response: ApiRunCodeResponse = api_instance.run_model(model_id, pipeline_stage, execution_platform, - run_name=run_name, parameters=parameters) + run_code_response: ApiRunCodeResponse = api_instance.run_model( + model_id, + pipeline_stage, + execution_platform, + run_name=run_name, + parameters=parameters, + ) print(run_code_response.run_url) return run_code_response.run_url except ApiException as e: - print("Exception while calling ModelServiceApi -> run_code: %s\n" % e, file=stderr) + print( + "Exception while calling ModelServiceApi -> run_code: %s\n" % e, file=stderr + ) return None @@ -315,15 +385,23 @@ def list_models(filter_dict: dict = {}, sort_by: str = None) -> [ApiModel]: try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListModelsResponse = api_instance.list_models(filter=filter_str, sort_by=sort_by) + api_response: ApiListModelsResponse = api_instance.list_models( + filter=filter_str, sort_by=sort_by + ) for c in api_response.models: - print("%s %s %s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name)) + print( + "%s %s %s" + % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name) + ) return api_response.models except ApiException as e: - print("Exception when calling ModelServiceApi -> list_models: %s\n" % e, file=stderr) + print( + "Exception when calling ModelServiceApi -> list_models: %s\n" % e, + file=stderr, + ) return [] @@ -346,7 +424,7 @@ def main(): approve_models_for_publishing(model_ids) # randomly selected a model - i = random.randint(0, len(model_ids)-1) + i = random.randint(0, len(model_ids) - 1) model_id = model_ids[i] # show one randomly selected model, gen code, download, update @@ -374,6 +452,6 @@ def main(): # upload_model_file(model.id, "temp/files/max-audio-classifier.yaml") -if __name__ == '__main__': +if __name__ == "__main__": pprint(yaml_files) main() diff --git a/api/examples/notebooks_api.py b/api/examples/notebooks_api.py index 83e99ccd..acb071ed 100644 --- a/api/examples/notebooks_api.py +++ b/api/examples/notebooks_api.py @@ -1,5 +1,5 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 from __future__ import print_function @@ -9,31 +9,41 @@ import random import re import requests -import swagger_client +import swagger_client # noqa: F401 import tarfile import tempfile -import yaml +import yaml # noqa: F401 from glob import glob from io import BytesIO -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiNotebook, ApiGetTemplateResponse, ApiListNotebooksResponse, \ - ApiGenerateCodeResponse, ApiRunCodeResponse -from swagger_client.rest import ApiException +from swagger_client.models import ( + ApiNotebook, + ApiGetTemplateResponse, + ApiListNotebooksResponse, + ApiGenerateCodeResponse, + ApiRunCodeResponse, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" -yaml_files = sorted(filter(lambda f: "template" not in f, glob("./../../../katalog/notebook-samples/*.yaml", recursive=True))) +yaml_files = sorted( + filter( + lambda f: "template" not in f, + glob("./../../../katalog/notebook-samples/*.yaml", recursive=True), + ) +) IBM_GHE_API_TOKEN = env.get("IBM_GHE_API_TOKEN") @@ -41,14 +51,13 @@ def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client def print_function_name_decorator(func): - def wrapper(*args, **kwargs): print() print(f"---[ {func.__name__}{args}{kwargs} ]---") @@ -79,16 +88,22 @@ def upload_notebook_template(uploadfile_name, name=None, ghe_token=None) -> str: try: if ghe_token: - notebook: ApiNotebook = api_instance.upload_notebook(uploadfile=uploadfile_name, name=name, - enterprise_github_token=ghe_token) + notebook: ApiNotebook = api_instance.upload_notebook( + uploadfile=uploadfile_name, name=name, enterprise_github_token=ghe_token + ) else: - notebook: ApiNotebook = api_instance.upload_notebook(uploadfile=uploadfile_name, name=name) + notebook: ApiNotebook = api_instance.upload_notebook( + uploadfile=uploadfile_name, name=name + ) print(f"Uploaded '{notebook.name}': {notebook.id}") return notebook.id except ApiException as e: - print("Exception when calling NotebookServiceApi -> upload_notebook: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> upload_notebook: %s\n" % e, + file=stderr, + ) # raise e return None @@ -124,35 +139,43 @@ def upload_notebook_file(notebook_id, file_path): api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - response = api_instance.upload_notebook_file(id=notebook_id, uploadfile=file_path) + response = api_instance.upload_notebook_file( + id=notebook_id, uploadfile=file_path + ) print(f"Upload file '{file_path}' to notebook with ID '{notebook_id}'") except ApiException as e: - print("Exception when calling NotebookServiceApi -> upload_notebook_file: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> upload_notebook_file: %s\n" + % e, + file=stderr, + ) raise e @print_function_name_decorator -def download_notebook_tgz(notebook_id, - download_dir=os.path.join(tempfile.gettempdir(), "download", "notebooks")) -> str: +def download_notebook_tgz( + notebook_id, + download_dir=os.path.join(tempfile.gettempdir(), "download", "notebooks"), +) -> str: api_client = get_swagger_client() api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_notebook_files(notebook_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_notebook_files( + notebook_id, include_generated_code=True, _preload_content=False + ) - attachment_header = response.info().get("Content-Disposition", - f"attachment; filename={notebook_id}.tgz") + attachment_header = response.info().get( + "Content-Disposition", f"attachment; filename={notebook_id}.tgz" + ) download_filename = re.sub("attachment; filename=", "", attachment_header) os.makedirs(download_dir, exist_ok=True) tarfile_path = os.path.join(download_dir, download_filename) - with open(tarfile_path, 'wb') as f: + with open(tarfile_path, "wb") as f: f.write(response.read()) print(tarfile_path) @@ -160,7 +183,11 @@ def download_notebook_tgz(notebook_id, return tarfile_path except ApiException as e: - print("Exception when calling NotebookServiceApi -> download_notebook_files: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> download_notebook_files: %s\n" + % e, + file=stderr, + ) return "Download failed?" @@ -172,35 +199,49 @@ def verify_notebook_download(notebook_id: str) -> bool: api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_notebook_files(notebook_id, - include_generated_code=True, - _preload_content=False) + response: HTTPResponse = api_instance.download_notebook_files( + notebook_id, include_generated_code=True, _preload_content=False + ) tgz_file = BytesIO(response.read()) tar = tarfile.open(fileobj=tgz_file) - file_contents = {m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") - for m in tar.getmembers()} + file_contents = { + m.name.split(".")[-1]: tar.extractfile(m).read().decode("utf-8") + for m in tar.getmembers() + } - template_response: ApiGetTemplateResponse = api_instance.get_notebook_template(notebook_id) + template_response: ApiGetTemplateResponse = api_instance.get_notebook_template( + notebook_id + ) template_text_from_api = template_response.template - assert template_text_from_api == file_contents.get("yaml", file_contents.get("yml")) + assert template_text_from_api == file_contents.get( + "yaml", file_contents.get("yml") + ) - generate_code_response: ApiGenerateCodeResponse = api_instance.generate_notebook_code(notebook_id) + generate_code_response: ApiGenerateCodeResponse = ( + api_instance.generate_notebook_code(notebook_id) + ) run_script_from_api = generate_code_response.script - regex = re.compile(r"name='[^']*'") # controller adds random chars to name, replace those + regex = re.compile( + r"name='[^']*'" + ) # controller adds random chars to name, replace those - assert regex.sub("name='...'", run_script_from_api) == \ - regex.sub("name='...'", file_contents.get("py")) + assert regex.sub("name='...'", run_script_from_api) == regex.sub( + "name='...'", file_contents.get("py") + ) print("downloaded files match") return True except ApiException as e: - print("Exception when calling NotebookServiceApi -> download_notebook_files: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> download_notebook_files: %s\n" + % e, + file=stderr, + ) return False @@ -215,7 +256,11 @@ def approve_notebooks_for_publishing(notebook_ids: [str]): api_response = api_instance.approve_notebooks_for_publishing(notebook_ids) except ApiException as e: - print("Exception when calling NotebookServiceApi -> approve_notebooks_for_publishing: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> approve_notebooks_for_publishing: %s\n" + % e, + file=stderr, + ) return None @@ -230,7 +275,11 @@ def set_featured_notebooks(notebook_ids: [str]): api_response = api_instance.set_featured_notebooks(notebook_ids) except ApiException as e: - print("Exception when calling NotebookServiceApi -> set_featured_notebooks: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> set_featured_notebooks: %s\n" + % e, + file=stderr, + ) return None @@ -247,7 +296,10 @@ def get_notebook(notebook_id: str) -> ApiNotebook: return notebook_meta except ApiException as e: - print("Exception when calling NotebookServiceApi -> get_notebook: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> get_notebook: %s\n" % e, + file=stderr, + ) return None @@ -261,7 +313,10 @@ def delete_notebook(notebook_id: str): try: api_instance.delete_notebook(notebook_id) except ApiException as e: - print("Exception when calling NotebookServiceApi -> delete_notebook: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> delete_notebook: %s\n" % e, + file=stderr, + ) @print_function_name_decorator @@ -271,7 +326,9 @@ def get_template(template_id: str) -> str: api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - template_response: ApiGetTemplateResponse = api_instance.get_notebook_template(template_id) + template_response: ApiGetTemplateResponse = api_instance.get_notebook_template( + template_id + ) print(template_response.template) # yaml_dict = yaml.load(template_response.template, Loader=yaml.FullLoader) @@ -284,7 +341,11 @@ def get_template(template_id: str) -> str: return template_response.template except ApiException as e: - print("Exception when calling NotebookServiceApi -> get_notebook_template: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> get_notebook_template: %s\n" + % e, + file=stderr, + ) return None @@ -296,13 +357,18 @@ def generate_code(notebook_id: str) -> str: api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - generate_code_response: ApiGenerateCodeResponse = api_instance.generate_notebook_code(notebook_id) + generate_code_response: ApiGenerateCodeResponse = ( + api_instance.generate_notebook_code(notebook_id) + ) print(generate_code_response.script) return generate_code_response.script except ApiException as e: - print("Exception while calling NotebookServiceApi -> generate_code: %s\n" % e, file=stderr) + print( + "Exception while calling NotebookServiceApi -> generate_code: %s\n" % e, + file=stderr, + ) return None @@ -314,16 +380,19 @@ def run_notebook(notebook_id: str, parameters=dict(), run_name: str = None) -> s api_instance = swagger_client.NotebookServiceApi(api_client=api_client) try: - run_code_response: ApiRunCodeResponse = api_instance.run_notebook(notebook_id, - parameters=parameters, - run_name=run_name) + run_code_response: ApiRunCodeResponse = api_instance.run_notebook( + notebook_id, parameters=parameters, run_name=run_name + ) print(run_code_response.run_url) print(run_code_response.run_output_location) return run_code_response.run_url except ApiException as e: - print("Exception while calling NotebookServiceApi -> run_code: %s\n" % e, file=stderr) + print( + "Exception while calling NotebookServiceApi -> run_code: %s\n" % e, + file=stderr, + ) return None @@ -337,15 +406,23 @@ def list_notebooks(filter_dict: dict = {}, sort_by: str = None) -> [ApiNotebook] try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListNotebooksResponse = api_instance.list_notebooks(filter=filter_str, sort_by=sort_by) + api_response: ApiListNotebooksResponse = api_instance.list_notebooks( + filter=filter_str, sort_by=sort_by + ) for c in api_response.notebooks: - print("%s %s %s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name)) + print( + "%s %s %s" + % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name) + ) return api_response.notebooks except ApiException as e: - print("Exception when calling NotebookServiceApi -> list_notebooks: %s\n" % e, file=stderr) + print( + "Exception when calling NotebookServiceApi -> list_notebooks: %s\n" % e, + file=stderr, + ) return [] @@ -363,23 +440,28 @@ def download_notebooks_from_github(): url = yaml_dict["implementation"]["github"]["source"] - download_url = url.replace("/blob", "")\ - .replace("github.com", "raw.githubusercontent.com")\ + download_url = ( + url.replace("/blob", "") + .replace("github.com", "raw.githubusercontent.com") .replace("github.ibm.com", "raw.github.ibm.com") + ) if "github.ibm.com" in url: - headers = {'Authorization': 'token %s' % env.get("IBM_GHE_API_TOKEN")} + headers = {"Authorization": "token %s" % env.get("IBM_GHE_API_TOKEN")} else: headers = {} response = requests.get(download_url, headers=headers, allow_redirects=True) if response.status_code == 200: - with open(os.path.join(download_dir, os.path.basename(url)), 'wb') as f: + with open(os.path.join(download_dir, os.path.basename(url)), "wb") as f: f.write(response.content) else: - print("{}: {:20s} --> {}".format( - response.status_code, os.path.basename(yaml_file), url)) + print( + "{}: {:20s} --> {}".format( + response.status_code, os.path.basename(yaml_file), url + ) + ) def main(): @@ -400,7 +482,7 @@ def main(): approve_notebooks_for_publishing(notebook_ids) # randomly selected a notebook - i = random.randint(0, len(notebook_ids)-1) + i = random.randint(0, len(notebook_ids) - 1) notebook_id = notebook_ids[i] # show one randomly selected notebook @@ -419,7 +501,7 @@ def main(): # delete_notebook(notebook_id) -if __name__ == '__main__': +if __name__ == "__main__": pprint(yaml_files) main() # download_notebooks_from_github() diff --git a/api/examples/pipelines_api.py b/api/examples/pipelines_api.py index 7ad00ebd..ab18584b 100644 --- a/api/examples/pipelines_api.py +++ b/api/examples/pipelines_api.py @@ -1,5 +1,5 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 from __future__ import print_function @@ -9,50 +9,64 @@ import os import random import re -import swagger_client +import swagger_client # noqa: F401 import tarfile import tempfile import typing -import yaml +import yaml # noqa: F401 from io import BytesIO -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration -from swagger_client.models import ApiPipeline, ApiGetTemplateResponse, ApiListPipelinesResponse, \ - ApiGenerateCodeResponse, ApiRunCodeResponse, ApiPipelineExtended, ApiPipelineCustom, ApiPipelineCustomRunPayload, \ - ApiPipelineTask, ApiComponent, ApiNotebook, ApiPipelineTaskArguments, ApiPipelineDAG, ApiPipelineInputs, \ - ApiParameter -from swagger_client.rest import ApiException +from swagger_client.models import ( # noqa: F401 + ApiPipeline, + ApiGetTemplateResponse, + ApiListPipelinesResponse, + ApiGenerateCodeResponse, + ApiRunCodeResponse, + ApiPipelineExtended, + ApiPipelineCustom, + ApiPipelineCustomRunPayload, + ApiPipelineTask, + ApiComponent, + ApiNotebook, + ApiPipelineTaskArguments, + ApiPipelineDAG, + ApiPipelineInputs, + ApiParameter, +) +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr from types import SimpleNamespace as custom_obj -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" # yaml_files = glob.glob("./../../pipelines/pipeline-samples/*/*.yaml") # yaml_files = glob.glob("./../../../kfp-tekton/samples/*/*.yaml") -yaml_files = glob.glob("./../../../kfp-tekton/sdk/python/tests/compiler/testdata/*.yaml")[:10] +yaml_files = glob.glob( + "./../../../kfp-tekton/sdk/python/tests/compiler/testdata/*.yaml" +)[:10] # yaml_files = sorted(glob.glob("./../../../katalog/pipeline-samples/*.yaml", recursive=True)) def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client def print_function_name_decorator(func): - def wrapper(*args, **kwargs): print() print(f"---[ {func.__name__}{str(args)[:50]}{str(kwargs)[0:50]} ]---") @@ -76,21 +90,28 @@ def create_tar_file(yamlfile_name): return tarfile_path -def upload_pipeline_template(uploadfile_name, name: str = None, description: str = None, annotations: str = "") -> str: +def upload_pipeline_template( + uploadfile_name, name: str = None, description: str = None, annotations: str = "" +) -> str: api_client = get_swagger_client() api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - pipeline: ApiPipeline = api_instance.upload_pipeline(uploadfile=uploadfile_name, - name=name, - description=description, - annotations=annotations) + pipeline: ApiPipeline = api_instance.upload_pipeline( + uploadfile=uploadfile_name, + name=name, + description=description, + annotations=annotations, + ) print(f"Uploaded '{pipeline.name}': {pipeline.id}") return pipeline.id except ApiException as e: - print("Exception when calling PipelineServiceApi -> upload_pipeline: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> upload_pipeline: %s\n" % e, + file=stderr, + ) raise e return None @@ -118,11 +139,17 @@ def upload_pipeline_file(pipeline_id, file_path): api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - response = api_instance.upload_pipeline_file(id=pipeline_id, uploadfile=file_path) + response = api_instance.upload_pipeline_file( + id=pipeline_id, uploadfile=file_path + ) print(f"Upload file '{file_path}' to pipeline with ID '{pipeline_id}'") except ApiException as e: - print("Exception when calling PipelineServiceApi -> upload_pipeline_file: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> upload_pipeline_file: %s\n" + % e, + file=stderr, + ) raise e @@ -133,11 +160,13 @@ def download_pipeline_tgz(pipeline_id) -> str: api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - response: HTTPResponse = \ - api_instance.download_pipeline_files(pipeline_id, _preload_content=False) + response: HTTPResponse = api_instance.download_pipeline_files( + pipeline_id, _preload_content=False + ) - attachment_header = response.info().get("Content-Disposition", - f"attachment; filename={pipeline_id}.tgz") + attachment_header = response.info().get( + "Content-Disposition", f"attachment; filename={pipeline_id}.tgz" + ) download_filename = re.sub("attachment; filename=", "", attachment_header) @@ -145,7 +174,7 @@ def download_pipeline_tgz(pipeline_id) -> str: os.makedirs(download_dir, exist_ok=True) tarfile_path = os.path.join(download_dir, download_filename) - with open(tarfile_path, 'wb') as f: + with open(tarfile_path, "wb") as f: f.write(response.read()) print(tarfile_path) @@ -153,7 +182,11 @@ def download_pipeline_tgz(pipeline_id) -> str: return tarfile_path except ApiException as e: - print("Exception when calling PipelineServiceApi -> download_pipeline_files: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> download_pipeline_files: %s\n" + % e, + file=stderr, + ) return "Download failed?" @@ -165,14 +198,20 @@ def verify_pipeline_download(pipeline_id: str) -> bool: api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - response: HTTPResponse = api_instance.download_pipeline_files(pipeline_id, _preload_content=False) + response: HTTPResponse = api_instance.download_pipeline_files( + pipeline_id, _preload_content=False + ) tgz_file = BytesIO(response.read()) with tarfile.open(fileobj=tgz_file) as tar: - file_contents = {m.name: tar.extractfile(m).read().decode("utf-8") - for m in tar.getmembers()} - - template_response: ApiGetTemplateResponse = api_instance.get_template(pipeline_id) + file_contents = { + m.name: tar.extractfile(m).read().decode("utf-8") + for m in tar.getmembers() + } + + template_response: ApiGetTemplateResponse = api_instance.get_template( + pipeline_id + ) template_text_from_api = template_response.template assert template_text_from_api == file_contents.get("pipeline.yaml") @@ -182,7 +221,11 @@ def verify_pipeline_download(pipeline_id: str) -> bool: return True except ApiException as e: - print("Exception when calling PipelineServiceApi -> download_pipeline_files: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> download_pipeline_files: %s\n" + % e, + file=stderr, + ) return False @@ -197,7 +240,11 @@ def approve_pipelines_for_publishing(pipeline_ids: [str]): api_response = api_instance.approve_pipelines_for_publishing(pipeline_ids) except ApiException as e: - print("Exception when calling PipelineServiceApi -> approve_pipelines_for_publishing: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> approve_pipelines_for_publishing: %s\n" + % e, + file=stderr, + ) return None @@ -212,7 +259,11 @@ def set_featured_pipelines(pipeline_ids: [str]): api_response = api_instance.set_featured_pipelines(pipeline_ids) except ApiException as e: - print("Exception when calling PipelineServiceApi -> set_featured_pipelines: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> set_featured_pipelines: %s\n" + % e, + file=stderr, + ) return None @@ -229,7 +280,10 @@ def get_pipeline(pipeline_id: str) -> ApiPipelineExtended: return pipeline_meta except ApiException as e: - print("Exception when calling PipelineServiceApi -> get_pipeline: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> get_pipeline: %s\n" % e, + file=stderr, + ) return None @@ -242,9 +296,12 @@ def delete_pipeline(pipeline_id: str): try: api_instance.delete_pipeline(pipeline_id) - + except ApiException as e: - print("Exception when calling PipelineServiceApi -> delete_pipeline: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> delete_pipeline: %s\n" % e, + file=stderr, + ) @print_function_name_decorator @@ -254,7 +311,9 @@ def get_template(template_id: str) -> str: api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - template_response: ApiGetTemplateResponse = api_instance.get_template(template_id) + template_response: ApiGetTemplateResponse = api_instance.get_template( + template_id + ) print(template_response.template) # yaml_dict = yaml.load(template_response.template, Loader=yaml.FullLoader) @@ -267,25 +326,35 @@ def get_template(template_id: str) -> str: return template_response.template except ApiException as e: - print("Exception when calling PipelineServiceApi -> get_pipeline_template: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> get_pipeline_template: %s\n" + % e, + file=stderr, + ) return None @print_function_name_decorator -def run_pipeline(pipeline_id: str, parameters: dict = None, run_name: str = None) -> str: +def run_pipeline( + pipeline_id: str, parameters: dict = None, run_name: str = None +) -> str: api_client = get_swagger_client() api_instance = swagger_client.PipelineServiceApi(api_client=api_client) try: - run_code_response: ApiRunCodeResponse = api_instance.run_pipeline(pipeline_id, parameters=parameters, - run_name=run_name) + run_code_response: ApiRunCodeResponse = api_instance.run_pipeline( + pipeline_id, parameters=parameters, run_name=run_name + ) print(run_code_response.run_url) return run_code_response.run_url except ApiException as e: - print("Exception while calling PipelineServiceApi -> run_pipeline: %s\n" % e, file=stderr) + print( + "Exception while calling PipelineServiceApi -> run_pipeline: %s\n" % e, + file=stderr, + ) return None @@ -299,35 +368,48 @@ def generate_custom_pipeline(sequential_only=False) -> ApiPipelineCustom: try: components: typing.List[ApiComponent] = random.sample( - list(filter(lambda n: "StudyJob" not in n.name, - components_api_instance.list_components().components)), 3) + list( + filter( + lambda n: "StudyJob" not in n.name, + components_api_instance.list_components().components, + ) + ), + 3, + ) # components = [] notebooks: typing.List[ApiNotebook] = random.sample( - notebooks_api_instance.list_notebooks().notebooks, 5) + notebooks_api_instance.list_notebooks().notebooks, 5 + ) tasks: typing.List[ApiPipelineTask] = [] for c in components: tasks.append( - ApiPipelineTask(name=f"component task {c.name}", - artifact_type="component", - artifact_id=c.id, - arguments=ApiPipelineTaskArguments(parameters=c.parameters), - dependencies=[])) + ApiPipelineTask( + name=f"component task {c.name}", + artifact_type="component", + artifact_id=c.id, + arguments=ApiPipelineTaskArguments(parameters=c.parameters), + dependencies=[], + ) + ) for n in notebooks: tasks.append( - ApiPipelineTask(name=f"notebook task {n.name}", - artifact_type="notebook", - artifact_id=n.id, - arguments=ApiPipelineTaskArguments(parameters=n.parameters), - dependencies=[])) + ApiPipelineTask( + name=f"notebook task {n.name}", + artifact_type="notebook", + artifact_id=n.id, + arguments=ApiPipelineTaskArguments(parameters=n.parameters), + dependencies=[], + ) + ) if sequential_only: - for i in range(len(tasks)-1): - tasks[i+1].dependencies = [tasks[i].name] + for i in range(len(tasks) - 1): + tasks[i + 1].dependencies = [tasks[i].name] else: for i in range(len(tasks)): num_deps = random.randint(0, i) @@ -344,22 +426,30 @@ def generate_custom_pipeline(sequential_only=False) -> ApiPipelineCustom: param_name = re.sub(r"\W+", "_", p.name, flags=re.ASCII).lower() pipeline_param_name = f"{task_name_prefix}_{param_name}" p.value = "{{inputs.parameters." + pipeline_param_name + "}}" - pipeline_params.append(ApiParameter(name=pipeline_param_name, value="some value")) + pipeline_params.append( + ApiParameter(name=pipeline_param_name, value="some value") + ) api_pipeline_custom = ApiPipelineCustom( name="My custom pipeline", description="A randomly generated pipeline from notebooks and components", dag=ApiPipelineDAG(tasks=tasks), - inputs=ApiPipelineInputs(parameters=pipeline_params)) + inputs=ApiPipelineInputs(parameters=pipeline_params), + ) return api_pipeline_custom except ApiException as e: - print(f"Exception while generating custom pipeline DAG with parameters: \n{str(e)}", file=stderr) + print( + f"Exception while generating custom pipeline DAG with parameters: \n{str(e)}", + file=stderr, + ) @print_function_name_decorator -def run_custom_pipeline(pipeline_template: dict, parameters: list = None, run_name: str = None) -> str: +def run_custom_pipeline( + pipeline_template: dict, parameters: list = None, run_name: str = None +) -> str: api_client = get_swagger_client() api_instance = swagger_client.PipelineServiceApi(api_client=api_client) @@ -368,23 +458,31 @@ def run_custom_pipeline(pipeline_template: dict, parameters: list = None, run_na # TODO: cleanup pipeline parameter, we should not support KFP Argo YAML if pipeline_template.get("spec", {}).get("templates"): # pipeline_dag = [t for t in pipeline_template["spec"]["templates"] if "dag" in t][0] - pipeline_dag = list(filter(lambda t: "dag" in t, pipeline_template["spec"]["templates"]))[0] + pipeline_dag = list( + filter(lambda t: "dag" in t, pipeline_template["spec"]["templates"]) + )[0] elif "dag" in pipeline_template: pipeline_dag = pipeline_template # custom_pipeline = ApiPipelineCustom.from_dict(pipeline_dag) mock_response = custom_obj(data=json.dumps(pipeline_dag)) - custom_pipeline = api_client.deserialize(response=mock_response, response_type=ApiPipelineCustom) - run_custom_pipeline_payload = ApiPipelineCustomRunPayload(custom_pipeline=custom_pipeline, - run_parameters=parameters) - run_code_response: ApiRunCodeResponse = \ - api_instance.run_custom_pipeline(run_custom_pipeline_payload=run_custom_pipeline_payload, - run_name=run_name) + custom_pipeline = api_client.deserialize( + response=mock_response, response_type=ApiPipelineCustom + ) + run_custom_pipeline_payload = ApiPipelineCustomRunPayload( + custom_pipeline=custom_pipeline, run_parameters=parameters + ) + run_code_response: ApiRunCodeResponse = api_instance.run_custom_pipeline( + run_custom_pipeline_payload=run_custom_pipeline_payload, run_name=run_name + ) print(run_code_response.run_url) return run_code_response.run_url except ApiException as e: - print("Exception while calling PipelineServiceApi -> run_pipeline: %s\n" % e, file=stderr) + print( + "Exception while calling PipelineServiceApi -> run_pipeline: %s\n" % e, + file=stderr, + ) return None @@ -398,15 +496,23 @@ def list_pipelines(filter_dict: dict = {}, sort_by: str = None) -> [ApiPipeline] try: filter_str = json.dumps(filter_dict) if filter_dict else None - api_response: ApiListPipelinesResponse = api_instance.list_pipelines(filter=filter_str, sort_by=sort_by) + api_response: ApiListPipelinesResponse = api_instance.list_pipelines( + filter=filter_str, sort_by=sort_by + ) for c in api_response.pipelines: - print("%s %s %s" % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name)) + print( + "%s %s %s" + % (c.id, c.created_at.strftime("%Y-%m-%d %H:%M:%S"), c.name) + ) return api_response.pipelines except ApiException as e: - print("Exception when calling PipelineServiceApi -> list_pipelines: %s\n" % e, file=stderr) + print( + "Exception when calling PipelineServiceApi -> list_pipelines: %s\n" % e, + file=stderr, + ) return [] @@ -436,7 +542,9 @@ def main(): description = "Some description" annotations_dict = {"platform": "Kubeflow", "license": "opensource"} annotations_str = json.dumps(annotations_dict) - pipeline_id = upload_pipeline_template(tarfile_name, name, description, annotations_str) + pipeline_id = upload_pipeline_template( + tarfile_name, name, description, annotations_str + ) p = get_pipeline(pipeline_id) assert p.description == description and p.annotations == annotations_dict @@ -450,7 +558,7 @@ def main(): set_featured_pipelines(pipeline_ids[:4]) # randomly selected a pipeline - i = random.randint(0, len(pipeline_ids)-1) + i = random.randint(0, len(pipeline_ids) - 1) pipeline_id = pipeline_ids[i] # show one randomly selected pipeline @@ -459,14 +567,16 @@ def main(): verify_pipeline_download(pipeline_id) - pipelines = list_pipelines(filter_dict={"name": "[Sample] Basic - Parallel execution"}) or \ - list_pipelines(filter_dict={"name": "[Sample] Basic - Parallel Join"}) or \ - list_pipelines(filter_dict={"name": "[Tutorial] DSL - Control structures"}) or \ - list_pipelines(filter_dict={"name": "test_parallel_join"}) + pipelines = ( + list_pipelines(filter_dict={"name": "[Sample] Basic - Parallel execution"}) + or list_pipelines(filter_dict={"name": "[Sample] Basic - Parallel Join"}) + or list_pipelines(filter_dict={"name": "[Tutorial] DSL - Control structures"}) + or list_pipelines(filter_dict={"name": "test_parallel_join"}) + ) pipeline = pipelines[0] arguments = { "url1": "gs://ml-pipeline-playground/shakespeare1.txt", - "url2": "gs://ml-pipeline-playground/shakespeare2.txt" + "url2": "gs://ml-pipeline-playground/shakespeare2.txt", } run_pipeline(pipeline.id, arguments) @@ -481,6 +591,6 @@ def main(): delete_pipeline(pipeline.id) -if __name__ == '__main__': +if __name__ == "__main__": pprint(yaml_files) main() diff --git a/api/examples/runs_api.py b/api/examples/runs_api.py index 0b5ac815..a0f8c71a 100644 --- a/api/examples/runs_api.py +++ b/api/examples/runs_api.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 from __future__ import print_function import json @@ -9,8 +9,8 @@ from kfp_server_api import ApiListExperimentsResponse from kfp_server_api import ApiListRunsResponse, ApiResourceType, ApiRunDetail from kfp_server_api.rest import ApiException -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 # export AMBASSADOR_SERVICE_HOST=$(kubectl get nodes -o jsonpath='{.items[0]..addresses[?(@.type=="ExternalIP")].address}') @@ -32,6 +32,7 @@ def wrapper(*args, **kwargs): print(f"---[ {func.__name__}{args}{kwargs} ]---") print() return func(*args, **kwargs) + return wrapper @@ -46,20 +47,27 @@ def list_runs(experiment_name: str = None): # https://github.com/kubeflow/pipelines/blob/3e7a89e044d0ce448ce0b7b2c894a483487694a1/backend/api/filter.proto#L24-L63 experiment_filter_dict = { - "predicates": [{ - "key": "name", - "op": "IS_SUBSTRING", # "EQUALS", - "string_value": experiment_name - }] + "predicates": [ + { + "key": "name", + "op": "IS_SUBSTRING", # "EQUALS", + "string_value": experiment_name, + } + ] } - experiment_response: ApiListExperimentsResponse = kfp_client._experiment_api.\ - list_experiment(page_size=100, - sort_by="created_at desc", - filter=json.dumps(experiment_filter_dict)) + experiment_response: ApiListExperimentsResponse = ( + kfp_client._experiment_api.list_experiment( + page_size=100, + sort_by="created_at desc", + filter=json.dumps(experiment_filter_dict), + ) + ) if experiment_response.experiments: - experiments = [e for e in experiment_response.experiments if experiment_name in e.name] + experiments = [ + e for e in experiment_response.experiments if experiment_name in e.name + ] else: print(f"Experiment(s) with name '{experiment_name}' do(es) not exist.") @@ -67,24 +75,35 @@ def list_runs(experiment_name: str = None): if experiments: for experiment in experiments: - run_response: ApiListRunsResponse = \ - kfp_client._run_api.list_runs(page_size=100, - sort_by="created_at desc", - resource_reference_key_type=ApiResourceType.EXPERIMENT, - resource_reference_key_id=experiment.id) + run_response: ApiListRunsResponse = kfp_client._run_api.list_runs( + page_size=100, + sort_by="created_at desc", + resource_reference_key_type=ApiResourceType.EXPERIMENT, + resource_reference_key_id=experiment.id, + ) runs.extend(run_response.runs or []) else: - run_response: ApiListRunsResponse = \ - kfp_client._run_api.list_runs(page_size=100, sort_by="created_at desc") + run_response: ApiListRunsResponse = kfp_client._run_api.list_runs( + page_size=100, sort_by="created_at desc" + ) runs.extend(run_response.runs or []) runs = sorted(runs, key=lambda r: r.created_at, reverse=True) for i, r in enumerate(runs): # pprint(r) - print("%2i: %s %s %s (%s)" % (i+1, r.id, r.created_at.strftime("%Y-%m-%d %H:%M:%S"), r.name, r.status)) + print( + "%2i: %s %s %s (%s)" + % ( + i + 1, + r.id, + r.created_at.strftime("%Y-%m-%d %H:%M:%S"), + r.name, + r.status, + ) + ) return runs @@ -105,13 +124,21 @@ def stop_run(run_id): run_detail: ApiRunDetail = kfp_client._run_api.get_run(run_id=run_id) if run_detail: # and run_detail.run.status in ["Failed", "Error"]: - workflow_manifest = json.loads(run_detail.pipeline_runtime.workflow_manifest) + workflow_manifest = json.loads( + run_detail.pipeline_runtime.workflow_manifest + ) # pods = filter(lambda d: d["type"] == 'Pod', list(workflow_manifest["status"]["nodes"].values())) - pods = [node for node in list(workflow_manifest["status"]["nodes"].values()) if node["type"] == "Pod"] + pods = [ + node + for node in list(workflow_manifest["status"]["nodes"].values()) + if node["type"] == "Pod" + ] if pods: print(f"{e.status}, {e.reason}: {pods[0]['message']}") else: - print(f"Run with id '{run_id}' could not be terminated. No pods in 'Running' state?") + print( + f"Run with id '{run_id}' could not be terminated. No pods in 'Running' state?" + ) else: print(e) @@ -137,18 +164,26 @@ def delete_run(run_id): run_detail: ApiRunDetail = kfp_client._run_api.get_run(run_id=run_id) if run_detail: # and run_detail.run.status in ["Failed", "Error"]: - workflow_manifest = json.loads(run_detail.pipeline_runtime.workflow_manifest) + workflow_manifest = json.loads( + run_detail.pipeline_runtime.workflow_manifest + ) # pods = filter(lambda d: d["type"] == 'Pod', list(workflow_manifest["status"]["nodes"].values())) - pods = [node for node in list(workflow_manifest["status"]["nodes"].values()) if node["type"] == "Pod"] + pods = [ + node + for node in list(workflow_manifest["status"]["nodes"].values()) + if node["type"] == "Pod" + ] if pods: print(pods[0]["message"]) else: - print(f"Run with id '{run_id}' could not be deleted. No corresponding pods.") + print( + f"Run with id '{run_id}' could not be deleted. No corresponding pods." + ) else: print(e) -if __name__ == '__main__': +if __name__ == "__main__": # runs = list_runs() # runs = list_runs(experiment_name="COMPONENT_RUNS") diff --git a/api/examples/settings_api.py b/api/examples/settings_api.py index 39cc446d..0015ba27 100644 --- a/api/examples/settings_api.py +++ b/api/examples/settings_api.py @@ -1,32 +1,30 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 from __future__ import print_function -import json -import os -import swagger_client +import swagger_client # noqa: F401 -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_client.api_client import ApiClient, Configuration from swagger_client.models import ApiSettings, ApiSettingsSection, ApiParameter -from swagger_client.rest import ApiException +from swagger_client.rest import ApiException # noqa: F401 from sys import stderr -from urllib3.response import HTTPResponse +from urllib3.response import HTTPResponse # noqa: F401 -host = '127.0.0.1' -port = '8080' +host = "127.0.0.1" +port = "8080" # host = env.get("MLX_API_SERVICE_HOST") # port = env.get("MLX_API_SERVICE_PORT") -api_base_path = 'apis/v1alpha1' +api_base_path = "apis/v1alpha1" def get_swagger_client(): config = Configuration() - config.host = f'http://{host}:{port}/{api_base_path}' + config.host = f"http://{host}:{port}/{api_base_path}" api_client = ApiClient(configuration=config) return api_client @@ -37,6 +35,7 @@ def wrapper(*args, **kwargs): print(f"---[ {func.__name__}{args}{kwargs} ]---") print() return func(*args, **kwargs) + return wrapper @@ -52,7 +51,10 @@ def get_app_settings(): return api_settings except ApiException as e: - print(f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, file=stderr) + print( + f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, + file=stderr, + ) raise e return None @@ -70,7 +72,10 @@ def modify_app_settings(dictionary: dict): return api_settings except ApiException as e: - print(f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, file=stderr) + print( + f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, + file=stderr, + ) raise e return None @@ -88,7 +93,10 @@ def set_app_settings(api_settings: ApiSettings): return api_settings except ApiException as e: - print(f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, file=stderr) + print( + f"Exception when calling {api_instance.__class__.__name__}: %s\n" % e, + file=stderr, + ) raise e return None @@ -97,23 +105,22 @@ def set_app_settings(api_settings: ApiSettings): def main(): settings = get_app_settings() - modify_app_settings({ - "Upload enabled": False, - "API endpoint": "localhost:8080" - }) + modify_app_settings({"Upload enabled": False, "API endpoint": "localhost:8080"}) settings.sections += [ ApiSettingsSection( name="General", description="General settings", settings=[ - ApiParameter("Color scheme", 'Color scheme [blue, red, yellow]', 'blue', 'red') - ] + ApiParameter( + "Color scheme", "Color scheme [blue, red, yellow]", "blue", "red" + ) + ], ) ] set_app_settings(settings) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/api/server/setup.py b/api/server/setup.py index dd85d1cc..5070c386 100644 --- a/api/server/setup.py +++ b/api/server/setup.py @@ -27,12 +27,10 @@ keywords=["Swagger", "MLX API"], install_requires=REQUIRES, packages=find_packages(), - package_data={'': ['swagger/swagger.yaml']}, + package_data={"": ["swagger/swagger.yaml"]}, include_package_data=True, - entry_points={ - 'console_scripts': ['swagger_server=swagger_server.__main__:main']}, + entry_points={"console_scripts": ["swagger_server=swagger_server.__main__:main"]}, long_description="""\ Machine Learning Exchange API - """ + """, ) - diff --git a/api/server/swagger_server/__main__.py b/api/server/swagger_server/__main__.py index 73a98b60..61010798 100644 --- a/api/server/swagger_server/__main__.py +++ b/api/server/swagger_server/__main__.py @@ -1,16 +1,16 @@ #!/usr/bin/env python3 # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import logging from datetime import datetime from flask import redirect, request from flask_cors import CORS -from os import environ as env +from os import environ as env # noqa: F401 from swagger_server import encoder from swagger_server import VERSION from waitress import serve @@ -18,28 +18,34 @@ def get_request_log_msg(): - return '(%03d) %s %s %s ...' % (current_thread().ident % 1000, request.remote_addr, - request.method, request.full_path) + return "(%03d) %s %s %s ..." % ( + current_thread().ident % 1000, + request.remote_addr, + request.method, + request.full_path, + ) def main(): - logging.basicConfig(format="%(asctime)s.%(msecs)03d %(levelname)-7s [%(name)-.8s] %(message)s", - datefmt="%Y/%m/%d %H:%M:%S", - level=env.get("LOGLEVEL", logging.INFO)) + logging.basicConfig( + format="%(asctime)s.%(msecs)03d %(levelname)-7s [%(name)-.8s] %(message)s", + datefmt="%Y/%m/%d %H:%M:%S", + level=env.get("LOGLEVEL", logging.INFO), + ) log = logging.getLogger("flaskapp") log.info("MLX API version: %s" % VERSION) - cx_app = connexion.App(__name__, specification_dir='./swagger/') - cx_app.add_api('swagger.yaml', arguments={'title': 'MLX API'}) + cx_app = connexion.App(__name__, specification_dir="./swagger/") + cx_app.add_api("swagger.yaml", arguments={"title": "MLX API"}) flask_app = cx_app.app flask_app.json_encoder = encoder.JSONEncoder log.info("Enable cross-origin support with 'flask-cors': origins='*'") - CORS(flask_app, origins='*') + CORS(flask_app, origins="*") start_times = dict() @@ -56,7 +62,13 @@ def after_request(response): elapsed_millis = time_delta.seconds * 1000 + time_delta.microseconds / 1000 outstanding_requests = len(start_times) log_func = get_log_method_by_response_status(response) - log_func('%s %s (%i ms) [%i]', msg, response.status, elapsed_millis, outstanding_requests) + log_func( + "%s %s (%i ms) [%i]", + msg, + response.status, + elapsed_millis, + outstanding_requests, + ) return response log_functions = [log.info, log.info, log.info, log.info, log.warning, log.error] @@ -74,5 +86,5 @@ def index(): serve(flask_app, host="0.0.0.0", port=8080, threads=32) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/api/server/swagger_server/code_templates/run_component.TEMPLATE.py b/api/server/swagger_server/code_templates/run_component.TEMPLATE.py index 06cab0bf..5b64dfce 100644 --- a/api/server/swagger_server/code_templates/run_component.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/run_component.TEMPLATE.py @@ -17,7 +17,7 @@ name='${name}', description='${description}' ) -def kfp_component_pipeline(${pipeline_method_args}): +def kfp_component_pipeline(${pipeline_method_args}): # noqa: E999 from kfp import components @@ -45,7 +45,7 @@ def kfp_component_pipeline(${pipeline_method_args}): # TODO: specify pipeline argument values arguments = ${parameter_dict} -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('COMPONENT_RUNS') diff --git a/api/server/swagger_server/code_templates/run_dataset.TEMPLATE.py b/api/server/swagger_server/code_templates/run_dataset.TEMPLATE.py index 25bdfbca..f67aa780 100644 --- a/api/server/swagger_server/code_templates/run_dataset.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/run_dataset.TEMPLATE.py @@ -17,7 +17,7 @@ name='${name}', description='${description}' ) -def dataset_pipeline(${pipeline_method_args}): +def dataset_pipeline(${pipeline_method_args}): # noqa: E999 from kfp.components import load_component_from_url @@ -55,7 +55,7 @@ def dataset_pipeline(${pipeline_method_args}): # TODO: specify pipeline argument values arguments = ${parameter_dict} -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('DATASET_RUNS') diff --git a/api/server/swagger_server/code_templates/run_notebook.TEMPLATE.py b/api/server/swagger_server/code_templates/run_notebook.TEMPLATE.py index 71c1d1ac..743d1c27 100644 --- a/api/server/swagger_server/code_templates/run_notebook.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/run_notebook.TEMPLATE.py @@ -54,7 +54,7 @@ def notebook_pipeline(): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('NOTEBOOK_RUNS') diff --git a/api/server/swagger_server/code_templates/run_notebook_with_dataset.TEMPLATE.py b/api/server/swagger_server/code_templates/run_notebook_with_dataset.TEMPLATE.py index f57650f1..390ad511 100644 --- a/api/server/swagger_server/code_templates/run_notebook_with_dataset.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/run_notebook_with_dataset.TEMPLATE.py @@ -60,7 +60,7 @@ def notebook_pipeline(): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('NOTEBOOK_RUNS') diff --git a/api/server/swagger_server/code_templates/run_pipeline.TEMPLATE.py b/api/server/swagger_server/code_templates/run_pipeline.TEMPLATE.py index 6bc67842..b256db25 100644 --- a/api/server/swagger_server/code_templates/run_pipeline.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/run_pipeline.TEMPLATE.py @@ -17,7 +17,7 @@ name='${name}', description='${description}' ) -def custom_pipeline(${pipeline_method_args}): +def custom_pipeline(${pipeline_method_args}): # noqa: E999 ${pipeline_function_body} @@ -38,7 +38,7 @@ def custom_pipeline(${pipeline_method_args}): # TODO: specify pipeline argument values arguments = ${parameter_dict} -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('PIPELINE_RUNS') diff --git a/api/server/swagger_server/code_templates/serve_kfserving.TEMPLATE.py b/api/server/swagger_server/code_templates/serve_kfserving.TEMPLATE.py index 94e1589c..d3b6ca42 100644 --- a/api/server/swagger_server/code_templates/serve_kfserving.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/serve_kfserving.TEMPLATE.py @@ -66,7 +66,7 @@ def model_pipeline(model_id='${model_identifier}'): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('MODEL_RUNS') diff --git a/api/server/swagger_server/code_templates/serve_knative.TEMPLATE.py b/api/server/swagger_server/code_templates/serve_knative.TEMPLATE.py index ab863e44..02175821 100644 --- a/api/server/swagger_server/code_templates/serve_knative.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/serve_knative.TEMPLATE.py @@ -73,7 +73,7 @@ def model_pipeline(model_id='${model_identifier}'): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('MODEL_RUNS') diff --git a/api/server/swagger_server/code_templates/serve_kubernetes.TEMPLATE.py b/api/server/swagger_server/code_templates/serve_kubernetes.TEMPLATE.py index 7f95bd5d..2384671a 100644 --- a/api/server/swagger_server/code_templates/serve_kubernetes.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/serve_kubernetes.TEMPLATE.py @@ -71,7 +71,7 @@ def model_pipeline(model_id='${model_identifier}'): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('MODEL_RUNS') diff --git a/api/server/swagger_server/code_templates/train_watsonml.TEMPLATE.py b/api/server/swagger_server/code_templates/train_watsonml.TEMPLATE.py index ceff86d5..8bf4697f 100644 --- a/api/server/swagger_server/code_templates/train_watsonml.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/train_watsonml.TEMPLATE.py @@ -80,7 +80,7 @@ def model_pipeline(model_id='${model_identifier}'): # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('MODEL_RUNS') diff --git a/api/server/swagger_server/code_templates/train_watsonml_w_credentials.TEMPLATE.py b/api/server/swagger_server/code_templates/train_watsonml_w_credentials.TEMPLATE.py index d1df85a8..1017a253 100644 --- a/api/server/swagger_server/code_templates/train_watsonml_w_credentials.TEMPLATE.py +++ b/api/server/swagger_server/code_templates/train_watsonml_w_credentials.TEMPLATE.py @@ -81,7 +81,7 @@ def model_pipeline(model_id='${model_identifier}', # Run the pipeline ############################################################ -client = TektonClient(${pipeline_server}) +client = TektonClient(${pipeline_server}) # noqa: E999 # Get or create an experiment and submit a pipeline run experiment = client.create_experiment('MODEL_RUNS') diff --git a/api/server/swagger_server/controllers/application_settings_controller.py b/api/server/swagger_server/controllers/application_settings_controller.py index d9714559..cf193ffd 100644 --- a/api/server/swagger_server/controllers/application_settings_controller.py +++ b/api/server/swagger_server/controllers/application_settings_controller.py @@ -2,13 +2,13 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 from swagger_server.models.api_settings import ApiSettings # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.models.dictionary import Dictionary # noqa: E501 -from swagger_server import util +from swagger_server import util # noqa: F401 def get_application_settings(): # noqa: E501 @@ -33,7 +33,7 @@ def modify_application_settings(dictionary): # noqa: E501 :rtype: ApiSettings """ if connexion.request.is_json: - dictionary = Dictionary.from_dict(connexion.request.get_json()) # noqa: E501 + dictionary = Dictionary.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -42,11 +42,11 @@ def set_application_settings(settings): # noqa: E501 Set and store the application settings. # noqa: E501 - :param settings: + :param settings: :type settings: dict | bytes :rtype: ApiSettings """ if connexion.request.is_json: - settings = ApiSettings.from_dict(connexion.request.get_json()) # noqa: E501 + settings = ApiSettings.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() diff --git a/api/server/swagger_server/controllers/catalog_service_controller.py b/api/server/swagger_server/controllers/catalog_service_controller.py index dd7c98e3..81326583 100644 --- a/api/server/swagger_server/controllers/catalog_service_controller.py +++ b/api/server/swagger_server/controllers/catalog_service_controller.py @@ -2,20 +2,26 @@ # # SPDX-License-Identifier: Apache-2.0 -from swagger_server.models.api_catalog_upload import ApiCatalogUpload # noqa: E501 -from swagger_server.models.api_catalog_upload_response import ApiCatalogUploadResponse # noqa: E501 -from swagger_server.models.api_list_catalog_items_response import ApiListCatalogItemsResponse # noqa: E501 -from swagger_server import util - - -def list_all_assets(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +from swagger_server.models.api_catalog_upload import ApiCatalogUpload # noqa: F401, E501 +from swagger_server.models.api_catalog_upload_response import ( # noqa: F401 + ApiCatalogUploadResponse, +) +from swagger_server.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_server import util # noqa: F401 + + +def list_all_assets( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_all_assets # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str @@ -47,7 +53,7 @@ def upload_multiple_assets(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: ApiCatalogUpload :rtype: ApiCatalogUploadResponse diff --git a/api/server/swagger_server/controllers/component_service_controller.py b/api/server/swagger_server/controllers/component_service_controller.py index e0a72271..0110570a 100644 --- a/api/server/swagger_server/controllers/component_service_controller.py +++ b/api/server/swagger_server/controllers/component_service_controller.py @@ -2,17 +2,23 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 from swagger_server.models.api_component import ApiComponent # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_components_response import ApiListComponentsResponse # noqa: E501 -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_components_response import ( # noqa: F401 + ApiListComponentsResponse, +) +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 def approve_components_for_publishing(component_ids): # noqa: E501 @@ -33,13 +39,13 @@ def create_component(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiComponent """ if connexion.request.is_json: - body = ApiComponent.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiComponent.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -48,7 +54,7 @@ def delete_component(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -61,7 +67,7 @@ def download_component_files(id, include_generated_code=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param include_generated_code: Include generated run script in download :type include_generated_code: bool @@ -76,7 +82,7 @@ def generate_component_code(id): # noqa: E501 Generate sample code to use component in a pipeline # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGenerateCodeResponse @@ -89,7 +95,7 @@ def get_component(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiComponent @@ -102,7 +108,7 @@ def get_component_template(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse @@ -110,14 +116,16 @@ def get_component_template(id): # noqa: E501 return util.invoke_controller_impl() -def list_components(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_components( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_components # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str @@ -134,9 +142,9 @@ def run_component(id, parameters, run_name=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str - :param parameters: + :param parameters: :type parameters: List[ApiParameter] :param run_name: name to identify the run on the Kubeflow Pipelines UI, defaults to component name :type run_name: str @@ -144,7 +152,9 @@ def run_component(id, parameters, run_name=None): # noqa: E501 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - parameters = [ApiParameter.from_dict(d) for d in connexion.request.get_json()] # noqa: E501 + parameters = [ + ApiParameter.from_dict(d) for d in connexion.request.get_json() + ] # noqa: E501 return util.invoke_controller_impl() @@ -168,7 +178,7 @@ def upload_component(uploadfile, name=None): # noqa: E501 :param uploadfile: The component YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :rtype: ApiComponent diff --git a/api/server/swagger_server/controllers/credential_service_controller.py b/api/server/swagger_server/controllers/credential_service_controller.py index 5d63436e..c8e803ac 100644 --- a/api/server/swagger_server/controllers/credential_service_controller.py +++ b/api/server/swagger_server/controllers/credential_service_controller.py @@ -2,13 +2,15 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 from swagger_server.models.api_credential import ApiCredential # noqa: E501 -from swagger_server.models.api_list_credentials_response import ApiListCredentialsResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 def create_credential(body): # noqa: E501 @@ -16,13 +18,13 @@ def create_credential(body): # noqa: E501 Creates a credential associated with a pipeline. # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiCredential """ if connexion.request.is_json: - body = ApiCredential.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiCredential.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -31,7 +33,7 @@ def delete_credential(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -44,7 +46,7 @@ def get_credential(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiCredential @@ -52,14 +54,16 @@ def get_credential(id): # noqa: E501 return util.invoke_controller_impl() -def list_credentials(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_credentials( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_credentials # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str diff --git a/api/server/swagger_server/controllers/dataset_service_controller.py b/api/server/swagger_server/controllers/dataset_service_controller.py index 51ba959b..ebedd10b 100644 --- a/api/server/swagger_server/controllers/dataset_service_controller.py +++ b/api/server/swagger_server/controllers/dataset_service_controller.py @@ -2,17 +2,23 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 from swagger_server.models.api_dataset import ApiDataset # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_datasets_response import ApiListDatasetsResponse # noqa: E501 -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_datasets_response import ( # noqa: F401 + ApiListDatasetsResponse, +) +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 def approve_datasets_for_publishing(dataset_ids): # noqa: E501 @@ -33,13 +39,13 @@ def create_dataset(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiDataset """ if connexion.request.is_json: - body = ApiDataset.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiDataset.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -48,7 +54,7 @@ def delete_dataset(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -61,7 +67,7 @@ def download_dataset_files(id, include_generated_code=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param include_generated_code: Include generated run script in download :type include_generated_code: bool @@ -76,7 +82,7 @@ def generate_dataset_code(id): # noqa: E501 Generate sample code to use dataset in a pipeline # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGenerateCodeResponse @@ -89,7 +95,7 @@ def get_dataset(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiDataset @@ -102,7 +108,7 @@ def get_dataset_template(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse @@ -110,14 +116,16 @@ def get_dataset_template(id): # noqa: E501 return util.invoke_controller_impl() -def list_datasets(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_datasets( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_datasets # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of 'field_name', 'field_name asc' or 'field_name desc'. Ascending by default. :type sort_by: str @@ -134,9 +142,9 @@ def run_dataset(id, parameters=None, run_name=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str - :param parameters: + :param parameters: :type parameters: list | bytes :param run_name: name to identify the run on the Kubeflow Pipelines UI, defaults to component name :type run_name: str @@ -144,7 +152,9 @@ def run_dataset(id, parameters=None, run_name=None): # noqa: E501 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - parameters = [ApiParameter.from_dict(d) for d in connexion.request.get_json()] # noqa: E501 + parameters = [ + ApiParameter.from_dict(d) for d in connexion.request.get_json() + ] # noqa: E501 return util.invoke_controller_impl() @@ -168,7 +178,7 @@ def upload_dataset(uploadfile, name=None): # noqa: E501 :param uploadfile: The dataset YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :rtype: ApiDataset diff --git a/api/server/swagger_server/controllers/health_check_controller.py b/api/server/swagger_server/controllers/health_check_controller.py index b808860e..ff19d1a8 100644 --- a/api/server/swagger_server/controllers/health_check_controller.py +++ b/api/server/swagger_server/controllers/health_check_controller.py @@ -2,11 +2,11 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 def health_check(check_database=None, check_object_store=None): # noqa: E501 diff --git a/api/server/swagger_server/controllers/inference_service_controller.py b/api/server/swagger_server/controllers/inference_service_controller.py index e3a145a2..555120c4 100644 --- a/api/server/swagger_server/controllers/inference_service_controller.py +++ b/api/server/swagger_server/controllers/inference_service_controller.py @@ -2,13 +2,15 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six +import connexion # noqa: F401 +import six # noqa: F401 -from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: E501 -from swagger_server.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: F401, E501 +from swagger_server.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 def create_service(body, namespace=None): # noqa: E501 @@ -16,15 +18,15 @@ def create_service(body, namespace=None): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes - :param namespace: + :param namespace: :type namespace: str :rtype: ApiInferenceservice """ if connexion.request.is_json: - body = ApiInferenceservice.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiInferenceservice.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -33,9 +35,9 @@ def get_inferenceservices(id, namespace=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiInferenceservice @@ -43,20 +45,22 @@ def get_inferenceservices(id, namespace=None): # noqa: E501 return util.invoke_controller_impl() -def list_inferenceservices(page_token=None, page_size=None, sort_by=None, filter=None, namespace=None): # noqa: E501 +def list_inferenceservices( + page_token=None, page_size=None, sort_by=None, filter=None, namespace=None +): # noqa: E501 """list_inferenceservices # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str :param filter: A string-serialized JSON dictionary with key-value pairs that correspond to the InferenceService's attribute names and their respective values to be filtered for. :type filter: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiListInferenceservicesResponse @@ -71,9 +75,9 @@ def upload_service(uploadfile, name=None, namespace=None): # noqa: E501 :param uploadfile: The inference service metadata to upload. Maximum size of 32MB is supported. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiInferenceservice diff --git a/api/server/swagger_server/controllers/model_service_controller.py b/api/server/swagger_server/controllers/model_service_controller.py index 2facf4f8..b7799f90 100644 --- a/api/server/swagger_server/controllers/model_service_controller.py +++ b/api/server/swagger_server/controllers/model_service_controller.py @@ -2,17 +2,23 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six - -from swagger_server.models.api_generate_model_code_response import ApiGenerateModelCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_models_response import ApiListModelsResponse # noqa: E501 +import connexion # noqa: F401 +import six # noqa: F401 + +from swagger_server.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_models_response import ( # noqa: F401 + ApiListModelsResponse, +) from swagger_server.models.api_model import ApiModel # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.models.dictionary import Dictionary # noqa: E501 -from swagger_server import util +from swagger_server import util # noqa: F401 def approve_models_for_publishing(model_ids): # noqa: E501 @@ -33,13 +39,13 @@ def create_model(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiModel """ if connexion.request.is_json: - body = ApiModel.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiModel.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -48,7 +54,7 @@ def delete_model(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -61,7 +67,7 @@ def download_model_files(id, include_generated_code=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param include_generated_code: Include generated run scripts in download :type include_generated_code: bool @@ -76,7 +82,7 @@ def generate_model_code(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGenerateModelCodeResponse @@ -89,7 +95,7 @@ def get_model(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiModel @@ -102,7 +108,7 @@ def get_model_template(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse @@ -110,14 +116,16 @@ def get_model_template(id): # noqa: E501 return util.invoke_controller_impl() -def list_models(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_models( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_models # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str @@ -129,12 +137,14 @@ def list_models(page_token=None, page_size=None, sort_by=None, filter=None): # return util.invoke_controller_impl() -def run_model(id, pipeline_stage, execution_platform, run_name=None, parameters=None): # noqa: E501 +def run_model( + id, pipeline_stage, execution_platform, run_name=None, parameters=None +): # noqa: E501 """run_model # noqa: E501 - :param id: + :param id: :type id: str :param pipeline_stage: pipeline stage, either 'train' or 'serve' :type pipeline_stage: str @@ -148,7 +158,7 @@ def run_model(id, pipeline_stage, execution_platform, run_name=None, parameters= :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - parameters = Dictionary.from_dict(connexion.request.get_json()) # noqa: E501 + parameters = Dictionary.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -172,7 +182,7 @@ def upload_model(uploadfile, name=None): # noqa: E501 :param uploadfile: The model YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :rtype: ApiModel diff --git a/api/server/swagger_server/controllers/notebook_service_controller.py b/api/server/swagger_server/controllers/notebook_service_controller.py index 55bebaef..8ce43006 100644 --- a/api/server/swagger_server/controllers/notebook_service_controller.py +++ b/api/server/swagger_server/controllers/notebook_service_controller.py @@ -2,17 +2,23 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six - -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_notebooks_response import ApiListNotebooksResponse # noqa: E501 +import connexion # noqa: F401 +import six # noqa: F401 + +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_notebooks_response import ( # noqa: F401 + ApiListNotebooksResponse, +) from swagger_server.models.api_notebook import ApiNotebook # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.models.dictionary import Dictionary # noqa: E501 -from swagger_server import util +from swagger_server import util # noqa: F401 def approve_notebooks_for_publishing(notebook_ids): # noqa: E501 @@ -33,13 +39,13 @@ def create_notebook(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiNotebook """ if connexion.request.is_json: - body = ApiNotebook.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiNotebook.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -48,7 +54,7 @@ def delete_notebook(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -61,7 +67,7 @@ def download_notebook_files(id, include_generated_code=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param include_generated_code: Include generated run script in download :type include_generated_code: bool @@ -76,7 +82,7 @@ def generate_notebook_code(id): # noqa: E501 Generate sample code to use notebook in a pipeline # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGenerateCodeResponse @@ -89,7 +95,7 @@ def get_notebook(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiNotebook @@ -102,7 +108,7 @@ def get_notebook_template(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse @@ -110,14 +116,16 @@ def get_notebook_template(id): # noqa: E501 return util.invoke_controller_impl() -def list_notebooks(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_notebooks( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_notebooks # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str @@ -134,7 +142,7 @@ def run_notebook(id, run_name=None, parameters=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param run_name: name to identify the run on the Kubeflow Pipelines UI, defaults to notebook name :type run_name: str @@ -144,7 +152,7 @@ def run_notebook(id, run_name=None, parameters=None): # noqa: E501 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - parameters = Dictionary.from_dict(connexion.request.get_json()) # noqa: E501 + parameters = Dictionary.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -168,7 +176,7 @@ def upload_notebook(uploadfile, name=None, enterprise_github_token=None): # noq :param uploadfile: The notebook metadata YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :param enterprise_github_token: Optional GitHub API token providing read-access to notebooks stored on Enterprise GitHub accounts. :type enterprise_github_token: str diff --git a/api/server/swagger_server/controllers/pipeline_service_controller.py b/api/server/swagger_server/controllers/pipeline_service_controller.py index 0632ad72..7f36252c 100644 --- a/api/server/swagger_server/controllers/pipeline_service_controller.py +++ b/api/server/swagger_server/controllers/pipeline_service_controller.py @@ -2,18 +2,26 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion -import six - -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_pipelines_response import ApiListPipelinesResponse # noqa: E501 +import connexion # noqa: F401 +import six # noqa: F401 + +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_pipelines_response import ( # noqa: F401 + ApiListPipelinesResponse, +) from swagger_server.models.api_pipeline import ApiPipeline # noqa: E501 -from swagger_server.models.api_pipeline_custom_run_payload import ApiPipelineCustomRunPayload # noqa: E501 -from swagger_server.models.api_pipeline_extended import ApiPipelineExtended # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_pipeline_custom_run_payload import ( + ApiPipelineCustomRunPayload, +) +from swagger_server.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.models.dictionary import Dictionary # noqa: E501 -from swagger_server import util +from swagger_server import util # noqa: F401 def approve_pipelines_for_publishing(pipeline_ids): # noqa: E501 @@ -34,13 +42,13 @@ def create_pipeline(body): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiPipeline """ if connexion.request.is_json: - body = ApiPipeline.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiPipeline.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -49,7 +57,7 @@ def delete_pipeline(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: None @@ -62,7 +70,7 @@ def download_pipeline_files(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: file | binary @@ -75,7 +83,7 @@ def get_pipeline(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiPipelineExtended @@ -88,7 +96,7 @@ def get_template(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse @@ -96,14 +104,16 @@ def get_template(id): # noqa: E501 return util.invoke_controller_impl() -def list_pipelines(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_pipelines( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_pipelines # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str @@ -128,7 +138,9 @@ def run_custom_pipeline(run_custom_pipeline_payload, run_name=None): # noqa: E5 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - run_custom_pipeline_payload = ApiPipelineCustomRunPayload.from_dict(connexion.request.get_json()) # noqa: E501 + run_custom_pipeline_payload = ApiPipelineCustomRunPayload.from_dict( + connexion.request.get_json() + ) return util.invoke_controller_impl() @@ -137,7 +149,7 @@ def run_pipeline(id, run_name=None, parameters=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :param run_name: name to identify the run on the Kubeflow Pipelines UI, defaults to pipeline name :type run_name: str @@ -147,7 +159,7 @@ def run_pipeline(id, run_name=None, parameters=None): # noqa: E501 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - parameters = Dictionary.from_dict(connexion.request.get_json()) # noqa: E501 + parameters = Dictionary.from_dict(connexion.request.get_json()) return util.invoke_controller_impl() @@ -164,7 +176,9 @@ def set_featured_pipelines(pipeline_ids): # noqa: E501 return util.invoke_controller_impl() -def upload_pipeline(uploadfile, name=None, description=None, annotations=None): # noqa: E501 +def upload_pipeline( + uploadfile, name=None, description=None, annotations=None +): # noqa: E501 """upload_pipeline # noqa: E501 diff --git a/api/server/swagger_server/controllers_impl/__init__.py b/api/server/swagger_server/controllers_impl/__init__.py index 779c09c5..56a5f96d 100644 --- a/api/server/swagger_server/controllers_impl/__init__.py +++ b/api/server/swagger_server/controllers_impl/__init__.py @@ -1,12 +1,12 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 import requests from werkzeug.datastructures import FileStorage -from kfp_tekton.compiler._k8s_helper import sanitize_k8s_name; +from kfp_tekton.compiler._k8s_helper import sanitize_k8s_name from swagger_server.data_access.minio_client import extract_yaml_from_tarfile from swagger_server.models.api_parameter import ApiParameter from swagger_server.util import ApiError @@ -17,6 +17,7 @@ # TODO: move into controllers_impl/util.py ############################################################################### + def get_yaml_file_content_from_uploadfile(uploadfile: FileStorage): file_name = uploadfile.filename @@ -31,7 +32,9 @@ def get_yaml_file_content_from_uploadfile(uploadfile: FileStorage): else: raise ApiError( f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'." - f"Supported file extensions: .tar.gz, .gz, .yaml, .yml", 501) + f"Supported file extensions: .tar.gz, .gz, .yaml, .yml", + 501, + ) return yaml_file_content @@ -42,11 +45,15 @@ def validate_parameters(api_parameters: [ApiParameter], parameters: dict) -> (st unexpected_parameters = set(parameters.keys()) - set(acceptable_parameters) if unexpected_parameters: - return f"Unexpected parameter(s): {list(unexpected_parameters)}. " \ - f"Acceptable parameter(s): {acceptable_parameters}", 422 + return ( + f"Unexpected parameter(s): {list(unexpected_parameters)}. " + f"Acceptable parameter(s): {acceptable_parameters}", + 422, + ) - missing_parameters = [p.name for p in api_parameters - if not p.default and p.name not in parameters] + missing_parameters = [ + p.name for p in api_parameters if not p.default and p.name not in parameters + ] if missing_parameters: return f"Missing required parameter(s): {missing_parameters}", 422 @@ -57,7 +64,10 @@ def validate_parameters(api_parameters: [ApiParameter], parameters: dict) -> (st def validate_id(id: str) -> (str, int): if id != sanitize_k8s_name(id): - return f"Identifiers must contain lower case alphanumeric characters or '-' only.", 422 + return ( + f"Identifiers must contain lower case alphanumeric characters or '-' only.", + 422, + ) return None, 200 @@ -70,9 +80,11 @@ def download_file_content_from_url(url: str, bearer_token: str = None) -> bytes: request_headers.update({"Authorization": f"Bearer {bearer_token}"}) try: - raw_url = url.replace("/blob/", "/") \ - .replace("/github.ibm.com/", "/raw.github.ibm.com/") \ + raw_url = ( + url.replace("/blob/", "/") + .replace("/github.ibm.com/", "/raw.github.ibm.com/") .replace("/github.com/", "/raw.githubusercontent.com/") + ) response = requests.get(raw_url, allow_redirects=True, headers=request_headers) @@ -83,5 +95,7 @@ def download_file_content_from_url(url: str, bearer_token: str = None) -> bytes: except Exception as e: raise ApiError(f"Could not download file '{url}'. \n{str(e)}", 422) - raise ApiError(f"Could not download file '{url}'. Reason: {response.reason}", - response.status_code) + raise ApiError( + f"Could not download file '{url}'. Reason: {response.reason}", + response.status_code, + ) diff --git a/api/server/swagger_server/controllers_impl/application_settings_controller_impl.py b/api/server/swagger_server/controllers_impl/application_settings_controller_impl.py index 1cf43e39..91ab7fce 100644 --- a/api/server/swagger_server/controllers_impl/application_settings_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/application_settings_controller_impl.py @@ -1,18 +1,20 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 -import connexion -import yaml +# +# SPDX-License-Identifier: Apache-2.0 +import connexion # noqa: F401 +import yaml # noqa: F401 from os.path import abspath, join, dirname -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 from swagger_server.models.api_settings import ApiSettings # noqa: E501 -from swagger_server.models.api_settings_section import ApiSettingsSection # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 -from swagger_server import util +from swagger_server.models.api_settings_section import ApiSettingsSection # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 +from swagger_server import util # noqa: F401 -SETTINGS_FILE = abspath(join(dirname(__file__), "..", "..", "application_settings.yaml")) +SETTINGS_FILE = abspath( + join(dirname(__file__), "..", "..", "application_settings.yaml") +) def get_application_settings(): # noqa: E501 @@ -50,7 +52,7 @@ def modify_application_settings(dictionary: dict): # noqa: E501 if setting.name in dictionary.keys(): setting.value = dictionary.get(setting.name) - with open(SETTINGS_FILE, 'w') as f: + with open(SETTINGS_FILE, "w") as f: yaml.dump(settings.to_dict(), f, default_flow_style=False) return settings, 200 @@ -69,7 +71,7 @@ def set_application_settings(settings): # noqa: E501 if connexion.request.is_json: settings = ApiSettings.from_dict(connexion.request.get_json()) - with open(SETTINGS_FILE, 'w') as f: + with open(SETTINGS_FILE, "w") as f: yaml.dump(settings.to_dict(), f, default_flow_style=False) return settings, 200 diff --git a/api/server/swagger_server/controllers_impl/catalog_service_controller_impl.py b/api/server/swagger_server/controllers_impl/catalog_service_controller_impl.py index 5cfe7ad2..49ed3cb9 100644 --- a/api/server/swagger_server/controllers_impl/catalog_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/catalog_service_controller_impl.py @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import traceback @@ -10,29 +10,52 @@ from swagger_server.models import ApiCatalogUpload, ApiCatalogUploadError from swagger_server.models import ApiCatalogUploadResponse, ApiListCatalogItemsResponse -from swagger_server.models import ApiComponent, ApiDataset, ApiModel, ApiNotebook, ApiPipelineExtension +from swagger_server.models import ( + ApiComponent, + ApiDataset, + ApiModel, + ApiNotebook, + ApiPipelineExtension, +) from swagger_server.controllers_impl import download_file_content_from_url -from swagger_server.controllers_impl.component_service_controller_impl import list_components, upload_component_from_url -from swagger_server.controllers_impl.dataset_service_controller_impl import list_datasets, upload_dataset_from_url -from swagger_server.controllers_impl.model_service_controller_impl import list_models, upload_model_from_url -from swagger_server.controllers_impl.notebook_service_controller_impl import list_notebooks, upload_notebook_from_url -from swagger_server.controllers_impl.pipeline_service_controller_impl import list_pipelines, upload_pipeline_from_url +from swagger_server.controllers_impl.component_service_controller_impl import ( + list_components, + upload_component_from_url, +) +from swagger_server.controllers_impl.dataset_service_controller_impl import ( + list_datasets, + upload_dataset_from_url, +) +from swagger_server.controllers_impl.model_service_controller_impl import ( + list_models, + upload_model_from_url, +) +from swagger_server.controllers_impl.notebook_service_controller_impl import ( + list_notebooks, + upload_notebook_from_url, +) +from swagger_server.controllers_impl.pipeline_service_controller_impl import ( + list_pipelines, + upload_pipeline_from_url, +) from swagger_server.util import ApiError -def list_all_assets(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_all_assets( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_all_assets - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary with key-value pairs that correspond to the ApiComponent's attribute names and their respective values to be filtered for. + :param filter: A string-serialized JSON dictionary with key-value pairs that correspond to the ApiComponent's attribute names and their respective values to be filtered for. # noqa: E501 :type filter: str :rtype: ApiListCatalogItemsResponse @@ -45,19 +68,21 @@ def list_all_assets(page_token=None, page_size=None, sort_by=None, filter=None): offset = int(page_token) if page_token and page_token.isdigit() else 0 if page_size or page_token: - print(f"WARNING: page_size and page_token are not implemented on {__file__}#list_all_assets()") + print( + f"WARNING: page_size and page_token are not implemented on {__file__}#list_all_assets()" + ) list_methods = { "components": list_components, "datasets": list_datasets, "models": list_models, "notebooks": list_notebooks, - "pipelines": list_pipelines + "pipelines": list_pipelines, } api_response = ApiListCatalogItemsResponse( - components=[], datasets=[], models=[], notebooks=[], pipelines=[], - total_size=0) + components=[], datasets=[], models=[], notebooks=[], pipelines=[], total_size=0 + ) for asset_type, list_method in list_methods.items(): @@ -102,7 +127,7 @@ def upload_multiple_assets(body: ApiCatalogUpload): # noqa: E501 :rtype: ApiCatalogUploadResponse """ if connexion.request.is_json: - body = ApiCatalogUpload.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiCatalogUpload.from_dict(connexion.request.get_json()) return _upload_multiple_assets(body) @@ -126,41 +151,54 @@ def get_access_token_for_url(url: str) -> str: "datasets": upload_dataset_from_url, "models": upload_model_from_url, "notebooks": upload_notebook_from_url, - "pipelines": upload_pipeline_from_url + "pipelines": upload_pipeline_from_url, } api_response = ApiCatalogUploadResponse( - components=[], datasets=[], models=[], notebooks=[], pipelines=[], - total_created=0, errors=[], total_errors=0) + components=[], + datasets=[], + models=[], + notebooks=[], + pipelines=[], + total_created=0, + errors=[], + total_errors=0, + ) for asset_type, upload_method in upload_methods.items(): for asset in body.__getattribute__(asset_type) or []: try: api_object, status = upload_method( - url=asset.url, name=asset.name, - access_token=get_access_token_for_url(asset.url)) + url=asset.url, + name=asset.name, + access_token=get_access_token_for_url(asset.url), + ) if 200 <= status < 300: api_response.__getattribute__(asset_type).append(api_object) api_response.total_created += 1 else: # TODO: remove this? - api_error = ApiCatalogUploadError(**asset.to_dict(), - error_message=f"THIS SHOULD NOT HAPPEN: {str(api_object).strip()}", - status_code=500) + api_error = ApiCatalogUploadError( + **asset.to_dict(), + error_message=f"THIS SHOULD NOT HAPPEN: {str(api_object).strip()}", + status_code=500, + ) api_response.errors.append(api_error) print(f"THIS SHOULD NOT HAPPEN: {api_error}") print(traceback.format_exc()) except ApiError as e: - api_error = ApiCatalogUploadError(**asset.to_dict(), - error_message=e.message, - status_code=e.http_status_code) + api_error = ApiCatalogUploadError( + **asset.to_dict(), + error_message=e.message, + status_code=e.http_status_code, + ) api_response.errors.append(api_error) except Exception as e: - api_error = ApiCatalogUploadError(**asset.to_dict(), - error_message=str(e), - status_code=500) + api_error = ApiCatalogUploadError( + **asset.to_dict(), error_message=str(e), status_code=500 + ) api_response.errors.append(api_error) print(traceback.format_exc()) @@ -172,7 +210,7 @@ def get_access_token_for_url(url: str) -> str: "datasets": ApiDataset, "models": ApiModel, "notebooks": ApiNotebook, - "pipelines": ApiPipelineExtension + "pipelines": ApiPipelineExtension, } for asset_type, api_class in api_classes.items(): asset_list = api_response.__getattribute__(asset_type) @@ -180,9 +218,12 @@ def get_access_token_for_url(url: str) -> str: update_multiple(api_class, asset_ids, "publish_approved", publish_all) update_multiple(api_class, asset_ids, "featured", feature_all) - response_status = \ - 201 if api_response.total_created > 0 and api_response.total_errors == 0 else \ - 207 if api_response.total_created > 0 and api_response.total_errors > 0 else \ - max([e.status_code for e in api_response.errors]) + response_status = ( + 201 + if api_response.total_created > 0 and api_response.total_errors == 0 + else 207 + if api_response.total_created > 0 and api_response.total_errors > 0 + else max([e.status_code for e in api_response.errors]) + ) return api_response, response_status diff --git a/api/server/swagger_server/controllers_impl/component_service_controller_impl.py b/api/server/swagger_server/controllers_impl/component_service_controller_impl.py index 746f4f5a..be061aaf 100644 --- a/api/server/swagger_server/controllers_impl/component_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/component_service_controller_impl.py @@ -1,32 +1,56 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import tarfile -import yaml +import yaml # noqa: F401 from datetime import datetime from io import BytesIO from typing import AnyStr from werkzeug.datastructures import FileStorage -from swagger_server.controllers_impl import download_file_content_from_url, \ - get_yaml_file_content_from_uploadfile, validate_parameters -from swagger_server.data_access.minio_client import store_file, delete_objects, \ - get_file_content_and_url, NoSuchKey, enable_anonymous_read_access, create_tarfile -from swagger_server.data_access.mysql_client import store_data, generate_id, load_data, \ - delete_data, num_rows, update_multiple -from swagger_server.gateways.kubeflow_pipeline_service import generate_component_run_script, \ - run_component_in_experiment, _host as KFP_HOST +from swagger_server.controllers_impl import ( + download_file_content_from_url, + get_yaml_file_content_from_uploadfile, + validate_parameters, +) +from swagger_server.data_access.minio_client import ( + store_file, + delete_objects, + get_file_content_and_url, + NoSuchKey, + enable_anonymous_read_access, + create_tarfile, +) +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, + update_multiple, +) +from swagger_server.gateways.kubeflow_pipeline_service import ( + generate_component_run_script, + run_component_in_experiment, + _host as KFP_HOST, +) from swagger_server.models.api_component import ApiComponent # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_components_response import ApiListComponentsResponse # noqa: E501 +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_components_response import ( # noqa: F401 + ApiListComponentsResponse, +) from swagger_server.models.api_metadata import ApiMetadata -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 def approve_components_for_publishing(component_ids): # noqa: E501 @@ -55,7 +79,7 @@ def create_component(body): # noqa: E501 :rtype: ApiComponent """ if connexion.request.is_json: - body = ApiComponent.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiComponent.from_dict(connexion.request.get_json()) api_component = body @@ -93,9 +117,12 @@ def download_component_files(id, include_generated_code=None): # noqa: E501 :rtype: file | binary """ - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", prefix=f"components/{id}/", - file_extensions=[".yaml", ".yml", ".py", ".md"], - keep_open=include_generated_code) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"components/{id}/", + file_extensions=[".yaml", ".yml", ".py", ".md"], + keep_open=include_generated_code, + ) if len(tar.members) == 0: return f"Could not find component with id '{id}'", 404 @@ -112,13 +139,17 @@ def download_component_files(id, include_generated_code=None): # noqa: E501 tarinfo = tarfile.TarInfo(name=file_name) tarinfo.size = len(file_content) - file_obj = BytesIO(file_content.encode('utf-8')) + file_obj = BytesIO(file_content.encode("utf-8")) tar.addfile(tarinfo, file_obj) tar.close() - return bytes_io.getvalue(), 200, {"Content-Disposition": f"attachment; filename={id}.tgz"} + return ( + bytes_io.getvalue(), + 200, + {"Content-Disposition": f"attachment; filename={id}.tgz"}, + ) def generate_component_code(id): # noqa: E501 @@ -180,9 +211,11 @@ def get_component_template(id): # noqa: E501 :rtype: ApiGetTemplateResponse """ try: - template_yaml, url = get_file_content_and_url(bucket_name="mlpipeline", - prefix=f"components/{id}/", - file_name="template.yaml") + template_yaml, url = get_file_content_and_url( + bucket_name="mlpipeline", + prefix=f"components/{id}/", + file_name="template.yaml", + ) template_response = ApiGetTemplateResponse(template=template_yaml, url=url) return template_response, 200 @@ -196,16 +229,18 @@ def get_component_template(id): # noqa: E501 return str(e), 500 -def list_components(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_components( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_components :param page_token: :type page_token: str :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListComponentsResponse @@ -219,8 +254,13 @@ def list_components(page_token=None, page_size=None, sort_by=None, filter=None): filter_dict = json.loads(filter) if filter else None - api_components: [ApiComponent] = load_data(ApiComponent, filter_dict=filter_dict, sort_by=sort_by, - count=page_size, offset=offset) + api_components: [ApiComponent] = load_data( + ApiComponent, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_components) == page_size else None @@ -229,8 +269,11 @@ def list_components(page_token=None, page_size=None, sort_by=None, filter=None): if total_size == next_page_token: next_page_token = None - comp_list = ApiListComponentsResponse(components=api_components, total_size=total_size, - next_page_token=next_page_token) + comp_list = ApiListComponentsResponse( + components=api_components, + total_size=total_size, + next_page_token=next_page_token, + ) return comp_list, 200 @@ -250,16 +293,22 @@ def run_component(id, parameters, run_name=None): # noqa: E501 return f"Kubeflow Pipeline host is 'UNAVAILABLE'", 503 if connexion.request.is_json: - parameters = [ApiParameter.from_dict(d) for d in connexion.request.get_json()] # noqa: E501 + parameters = [ + ApiParameter.from_dict(d) for d in connexion.request.get_json() + ] # noqa: E501 - parameter_dict = {p.name: p.value for p in parameters if p.value and p.value.strip() != ""} + parameter_dict = { + p.name: p.value for p in parameters if p.value and p.value.strip() != "" + } api_component, status_code = get_component(id) if status_code > 200: return f"Component with id '{id}' does not exist", 404 - parameter_errors, status_code = validate_parameters(api_component.parameters, parameter_dict) + parameter_errors, status_code = validate_parameters( + api_component.parameters, parameter_dict + ) if parameter_errors: return parameter_errors, status_code @@ -269,7 +318,9 @@ def run_component(id, parameters, run_name=None): # noqa: E501 enable_anonymous_read_access(bucket_name="mlpipeline", prefix="components/*") try: - run_id = run_component_in_experiment(api_component, api_template.url, parameter_dict, run_name) + run_id = run_component_in_experiment( + api_component, api_template.url, parameter_dict, run_name + ) return ApiRunCodeResponse(run_url=f"/runs/details/{run_id}"), 200 except Exception as e: @@ -293,12 +344,14 @@ def set_featured_components(component_ids): # noqa: E501 return None, 200 -def upload_component(uploadfile: FileStorage, name=None, existing_id=None): # noqa: E501 +def upload_component( + uploadfile: FileStorage, name=None, existing_id=None +): # noqa: E501 """upload_component :param uploadfile: The component to upload. Maximum size of 32MB is supported. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :param existing_id: The ID of a component to be replaced, INTERNAL USE ONLY :type existing_id: str @@ -315,7 +368,7 @@ def upload_component_file(id, uploadfile): # noqa: E501 :param id: The id of the component. :type id: str - :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) + :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) # noqa: E501 :type uploadfile: werkzeug.datastructures.FileStorage :rtype: ApiComponent @@ -325,13 +378,19 @@ def upload_component_file(id, uploadfile): # noqa: E501 file_ext = file_name.split(".")[-1] if file_ext not in ["tgz", "gz", "yaml", "yml", "py", "md"]: - return f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", 501 + return ( + f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", + 501, + ) if file_ext in ["tgz", "gz", "yaml", "yml"]: delete_component(id) return upload_component(uploadfile, existing_id=id) else: - return f"The API method 'upload_component_file' is not implemented for file type '{file_ext}'.", 501 + return ( + f"The API method 'upload_component_file' is not implemented for file type '{file_ext}'.", + 501, + ) return "Not implemented (yet).", 501 @@ -357,6 +416,7 @@ def upload_component_from_url(url, name=None, access_token=None): # noqa: E501 # private helper methods, not swagger-generated ############################################################################### + def _upload_component_yaml(yaml_file_content: AnyStr, name=None, existing_id=None): yaml_dict = yaml.load(yaml_file_content, Loader=yaml.FullLoader) @@ -369,29 +429,43 @@ def _upload_component_yaml(yaml_file_content: AnyStr, name=None, existing_id=Non description = (yaml_dict.get("description") or name).strip()[:255] filter_categories = yaml_dict.get("filter_categories") or dict() - metadata = ApiMetadata(annotations=template_metadata.get("annotations"), - labels=template_metadata.get("labels"), - tags=template_metadata.get("tags")) - - parameters = [ApiParameter(name=p.get("name"), description=p.get("description"), - default=p.get("default"), value=p.get("value")) - for p in yaml_dict.get("inputs", [])] - - api_component = ApiComponent(id=component_id, - created_at=created_at, - name=name, - description=description, - metadata=metadata, - parameters=parameters, - filter_categories=filter_categories) + metadata = ApiMetadata( + annotations=template_metadata.get("annotations"), + labels=template_metadata.get("labels"), + tags=template_metadata.get("tags"), + ) + + parameters = [ + ApiParameter( + name=p.get("name"), + description=p.get("description"), + default=p.get("default"), + value=p.get("value"), + ) + for p in yaml_dict.get("inputs", []) + ] + + api_component = ApiComponent( + id=component_id, + created_at=created_at, + name=name, + description=description, + metadata=metadata, + parameters=parameters, + filter_categories=filter_categories, + ) uuid = store_data(api_component) api_component.id = uuid - store_file(bucket_name="mlpipeline", prefix=f"components/{component_id}/", - file_name="template.yaml", file_content=yaml_file_content, - content_type="text/yaml") + store_file( + bucket_name="mlpipeline", + prefix=f"components/{component_id}/", + file_name="template.yaml", + file_content=yaml_file_content, + content_type="text/yaml", + ) enable_anonymous_read_access(bucket_name="mlpipeline", prefix="components/*") diff --git a/api/server/swagger_server/controllers_impl/credential_service_controller_impl.py b/api/server/swagger_server/controllers_impl/credential_service_controller_impl.py index 78d638a3..73d20729 100644 --- a/api/server/swagger_server/controllers_impl/credential_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/credential_service_controller_impl.py @@ -1,17 +1,30 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 -import connexion +# +# SPDX-License-Identifier: Apache-2.0 +import connexion # noqa: F401 import json from datetime import datetime -from swagger_server.data_access.mysql_client import store_data, generate_id, load_data, delete_data, num_rows -from swagger_server.gateways.kubernetes_service import create_secret, get_secret, delete_secret, list_secrets,\ - secret_name_prefix +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, +) +from swagger_server.gateways.kubernetes_service import ( + create_secret, + get_secret, + delete_secret, + list_secrets, + secret_name_prefix, +) from swagger_server.models.api_credential import ApiCredential # noqa: E501 -from swagger_server.models.api_list_credentials_response import ApiListCredentialsResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 def create_credential(body): # noqa: E501 @@ -19,7 +32,7 @@ def create_credential(body): # noqa: E501 Creates a credential associated with a pipeline. # noqa: E501 - :param body: + :param body: :type body: dict | bytes :rtype: ApiCredential @@ -29,7 +42,9 @@ def create_credential(body): # noqa: E501 api_credential: ApiCredential = body - api_credential.id = api_credential.id or f"{secret_name_prefix}-{generate_id(length=16)}".lower() + api_credential.id = ( + api_credential.id or f"{secret_name_prefix}-{generate_id(length=16)}".lower() + ) api_credential.created_at = datetime.now() error = store_data(api_credential) @@ -38,9 +53,14 @@ def create_credential(body): # noqa: E501 return error, 400 # TODO: do we need to generate some token or return something generated by K8s? - secret = create_secret(api_credential.id, - {key: value for key, value in api_credential.to_dict().items() - if key not in ["id", "created_at"]}) + secret = create_secret( + api_credential.id, + { + key: value + for key, value in api_credential.to_dict().items() + if key not in ["id", "created_at"] + }, + ) # TODO: remove credential if kubernetes secret was not created @@ -50,7 +70,7 @@ def create_credential(body): # noqa: E501 def delete_credential(id): # noqa: E501 """delete_credential - :param id: + :param id: :type id: str :rtype: None @@ -65,7 +85,7 @@ def delete_credential(id): # noqa: E501 def get_credential(id): # noqa: E501 """get_credential - :param id: + :param id: :type id: str :rtype: ApiComponent @@ -82,16 +102,18 @@ def get_credential(id): # noqa: E501 return api_credential, 200 -def list_credentials(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_credentials( + page_token=None, page_size=None, sort_by=None, filter=None +): """list_credentials :param page_token: :type page_token: str :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListCredentialsResponse @@ -105,8 +127,13 @@ def list_credentials(page_token=None, page_size=None, sort_by=None, filter=None) filter_dict = json.loads(filter) if filter else None - api_credentials: [ApiCredential] = load_data(ApiCredential, filter_dict=filter_dict, sort_by=sort_by, - count=page_size, offset=offset) + api_credentials: [ApiCredential] = load_data( + ApiCredential, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_credentials) == page_size else None @@ -118,6 +145,9 @@ def list_credentials(page_token=None, page_size=None, sort_by=None, filter=None) secrets = list_secrets(name_prefix=secret_name_prefix) # TODO: consolidate kubernetes secrets with MLX registered credentials (i.e. add status field?) - comp_list = ApiListCredentialsResponse(credentials=api_credentials, total_size=total_size, - next_page_token=next_page_token) - return comp_list, 200 \ No newline at end of file + comp_list = ApiListCredentialsResponse( + credentials=api_credentials, + total_size=total_size, + next_page_token=next_page_token, + ) + return comp_list, 200 diff --git a/api/server/swagger_server/controllers_impl/dataset_service_controller_impl.py b/api/server/swagger_server/controllers_impl/dataset_service_controller_impl.py index c28af81d..d2e61d74 100644 --- a/api/server/swagger_server/controllers_impl/dataset_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/dataset_service_controller_impl.py @@ -2,31 +2,55 @@ # # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import tarfile -import yaml +import yaml # noqa: F401 from datetime import datetime from io import BytesIO from typing import AnyStr from werkzeug.datastructures import FileStorage -from swagger_server.controllers_impl import download_file_content_from_url, \ - get_yaml_file_content_from_uploadfile, validate_id -from swagger_server.data_access.minio_client import store_file, delete_objects, \ - get_file_content_and_url, NoSuchKey, enable_anonymous_read_access, create_tarfile -from swagger_server.data_access.mysql_client import store_data, generate_id, \ - load_data, delete_data, num_rows, update_multiple -from swagger_server.gateways.kubeflow_pipeline_service import generate_dataset_run_script, \ - run_dataset_in_experiment, _host as KFP_HOST +from swagger_server.controllers_impl import ( + download_file_content_from_url, + get_yaml_file_content_from_uploadfile, + validate_id, +) +from swagger_server.data_access.minio_client import ( + store_file, + delete_objects, + get_file_content_and_url, + NoSuchKey, + enable_anonymous_read_access, + create_tarfile, +) +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, + update_multiple, +) +from swagger_server.gateways.kubeflow_pipeline_service import ( + generate_dataset_run_script, + run_dataset_in_experiment, + _host as KFP_HOST, +) from swagger_server.models.api_dataset import ApiDataset # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_datasets_response import ApiListDatasetsResponse # noqa: E501 +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_datasets_response import ( # noqa: F401 + ApiListDatasetsResponse, +) from swagger_server.models.api_metadata import ApiMetadata -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 def approve_datasets_for_publishing(dataset_ids): # noqa: E501 @@ -55,7 +79,7 @@ def create_dataset(body): # noqa: E501 :rtype: ApiDataset """ if connexion.request.is_json: - body = ApiDataset.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiDataset.from_dict(connexion.request.get_json()) api_dataset = body @@ -93,9 +117,12 @@ def download_dataset_files(id, include_generated_code=None): # noqa: E501 :rtype: file | binary """ - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", prefix=f"datasets/{id}/", - file_extensions=[".yaml", ".yml", ".py", ".md"], - keep_open=include_generated_code) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"datasets/{id}/", + file_extensions=[".yaml", ".yml", ".py", ".md"], + keep_open=include_generated_code, + ) if len(tar.members) == 0: return f"Could not find dataset with id '{id}'", 404 @@ -112,13 +139,17 @@ def download_dataset_files(id, include_generated_code=None): # noqa: E501 tarinfo = tarfile.TarInfo(name=file_name) tarinfo.size = len(file_content) - file_obj = BytesIO(file_content.encode('utf-8')) + file_obj = BytesIO(file_content.encode("utf-8")) tar.addfile(tarinfo, file_obj) tar.close() - return bytes_io.getvalue(), 200, {"Content-Disposition": f"attachment; filename={id}.tgz"} + return ( + bytes_io.getvalue(), + 200, + {"Content-Disposition": f"attachment; filename={id}.tgz"}, + ) def generate_dataset_code(id): # noqa: E501 @@ -180,9 +211,11 @@ def get_dataset_template(id): # noqa: E501 :rtype: ApiGetTemplateResponse """ try: - template_yaml, url = get_file_content_and_url(bucket_name="mlpipeline", - prefix=f"datasets/{id}/", - file_name="template.yaml") + template_yaml, url = get_file_content_and_url( + bucket_name="mlpipeline", + prefix=f"datasets/{id}/", + file_name="template.yaml", + ) template_response = ApiGetTemplateResponse(template=template_yaml, url=url) return template_response, 200 @@ -196,16 +229,18 @@ def get_dataset_template(id): # noqa: E501 return str(e), 500 -def list_datasets(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_datasets( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_datasets :param page_token: :type page_token: str :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListDatasetsResponse @@ -219,8 +254,13 @@ def list_datasets(page_token=None, page_size=None, sort_by=None, filter=None): filter_dict = json.loads(filter) if filter else None - api_datasets: [ApiDataset] = load_data(ApiDataset, filter_dict=filter_dict, sort_by=sort_by, - count=page_size, offset=offset) + api_datasets: [ApiDataset] = load_data( + ApiDataset, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_datasets) == page_size else None @@ -229,8 +269,9 @@ def list_datasets(page_token=None, page_size=None, sort_by=None, filter=None): if total_size == next_page_token: next_page_token = None - comp_list = ApiListDatasetsResponse(datasets=api_datasets, total_size=total_size, - next_page_token=next_page_token) + comp_list = ApiListDatasetsResponse( + datasets=api_datasets, total_size=total_size, next_page_token=next_page_token + ) return comp_list, 200 @@ -250,14 +291,18 @@ def run_dataset(id, parameters, run_name=None): # noqa: E501 return f"Kubeflow Pipeline host is 'UNAVAILABLE'", 503 if connexion.request.is_json: - parameters = [ApiParameter.from_dict(d) for d in connexion.request.get_json()] # noqa: E501 + parameters = [ + ApiParameter.from_dict(d) for d in connexion.request.get_json() + ] # noqa: E501 api_dataset, status_code = get_dataset(id) if status_code > 200: return f"Component with id '{id}' does not exist", 404 - parameter_dict = {p.name: p.value for p in parameters if p.value and p.value.strip() != ""} + parameter_dict = { + p.name: p.value for p in parameters if p.value and p.value.strip() != "" + } # parameter_errors, status_code = validate_parameters(api_dataset.parameters, parameter_dict) @@ -269,9 +314,9 @@ def run_dataset(id, parameters, run_name=None): # noqa: E501 enable_anonymous_read_access(bucket_name="mlpipeline", prefix="datasets/*") try: - run_id = run_dataset_in_experiment(api_dataset, api_template.url, - run_name=run_name, - parameters=parameter_dict) + run_id = run_dataset_in_experiment( + api_dataset, api_template.url, run_name=run_name, parameters=parameter_dict + ) return ApiRunCodeResponse(run_url=f"/runs/details/{run_id}"), 200 except Exception as e: @@ -298,9 +343,9 @@ def set_featured_datasets(dataset_ids): # noqa: E501 def upload_dataset(uploadfile: FileStorage, name=None, existing_id=None): # noqa: E501 """upload_dataset - :param uploadfile: The dataset YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. + :param uploadfile: The dataset YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB. # noqa: E501 :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :param existing_id: The ID of a dataset to be replaced, INTERNAL USE ONLY :type existing_id: str @@ -317,7 +362,7 @@ def upload_dataset_file(id, uploadfile): # noqa: E501 :param id: The id of the dataset. :type id: str - :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) + :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) # noqa: E501 :type uploadfile: werkzeug.datastructures.FileStorage :rtype: ApiDataset @@ -327,13 +372,19 @@ def upload_dataset_file(id, uploadfile): # noqa: E501 file_ext = file_name.split(".")[-1] if file_ext not in ["tgz", "gz", "yaml", "yml", "py", "md"]: - return f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", 501 + return ( + f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", + 501, + ) if file_ext in ["tgz", "gz", "yaml", "yml"]: delete_dataset(id) return upload_dataset(uploadfile, existing_id=id) else: - return f"The API method 'upload_dataset_file' is not implemented for file type '{file_ext}'.", 501 + return ( + f"The API method 'upload_dataset_file' is not implemented for file type '{file_ext}'.", + 501, + ) return "Not implemented (yet).", 501 @@ -359,6 +410,7 @@ def upload_dataset_from_url(url, name=None, access_token=None): # noqa: E501 # private helper methods, not swagger-generated ############################################################################### + def _upload_dataset_yaml(yaml_file_content: AnyStr, name=None, existing_id=None): yaml_dict = yaml.load(yaml_file_content, Loader=yaml.FullLoader) @@ -401,15 +453,19 @@ def _upload_dataset_yaml(yaml_file_content: AnyStr, name=None, existing_id=None) # number_of_records = int(num_records_number_str) number_of_records = yaml_dict["content"][0].get("records", 0) - related_assets = [a["application"].get("asset_id") - for a in yaml_dict.get("related_assets", []) - if "MLX" in a.get("application", {}).get("name", "") - and "asset_id" in a.get("application", {})] + related_assets = [ + a["application"].get("asset_id") + for a in yaml_dict.get("related_assets", []) + if "MLX" in a.get("application", {}).get("name", "") + and "asset_id" in a.get("application", {}) + ] template_metadata = yaml_dict.get("metadata") or dict() - metadata = ApiMetadata(annotations=template_metadata.get("annotations"), - labels=template_metadata.get("labels"), - tags=template_metadata.get("tags") or yaml_dict.get("seo_tags")) + metadata = ApiMetadata( + annotations=template_metadata.get("annotations"), + labels=template_metadata.get("labels"), + tags=template_metadata.get("tags") or yaml_dict.get("seo_tags"), + ) # TODO: add "version" to ApiDataset @@ -425,16 +481,20 @@ def _upload_dataset_yaml(yaml_file_content: AnyStr, name=None, existing_id=None) license=license_name, metadata=metadata, related_assets=related_assets, - filter_categories=filter_categories + filter_categories=filter_categories, ) uuid = store_data(api_dataset) api_dataset.id = uuid - store_file(bucket_name="mlpipeline", prefix=f"datasets/{api_dataset.id}/", - file_name="template.yaml", file_content=yaml_file_content, - content_type="text/yaml") + store_file( + bucket_name="mlpipeline", + prefix=f"datasets/{api_dataset.id}/", + file_name="template.yaml", + file_content=yaml_file_content, + content_type="text/yaml", + ) enable_anonymous_read_access(bucket_name="mlpipeline", prefix="datasets/*") diff --git a/api/server/swagger_server/controllers_impl/health_check_controller_impl.py b/api/server/swagger_server/controllers_impl/health_check_controller_impl.py index 545710ed..71b774d1 100644 --- a/api/server/swagger_server/controllers_impl/health_check_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/health_check_controller_impl.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 from swagger_server.data_access import minio_client from swagger_server.data_access import mysql_client from swagger_server.models.api_status import ApiStatus diff --git a/api/server/swagger_server/controllers_impl/inference_service_controller_impl.py b/api/server/swagger_server/controllers_impl/inference_service_controller_impl.py index b05b0089..23d7576e 100644 --- a/api/server/swagger_server/controllers_impl/inference_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/inference_service_controller_impl.py @@ -1,9 +1,11 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: E501 -from swagger_server.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse # noqa: E501 +from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: F401, E501 +from swagger_server.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) from swagger_server.gateways.kfserving_client import get_all_services from swagger_server.gateways.kfserving_client import post_service @@ -17,9 +19,9 @@ def get_inferenceservices(id, namespace=None): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiInferenceservice @@ -27,33 +29,47 @@ def get_inferenceservices(id, namespace=None): # noqa: E501 log = logging.getLogger("inf_serv") # Attempt to find the id in a model mesh predictor try: - single_service = get_all_services(id, namespace=namespace, group="serving.kserve.io", version="v1alpha1", plural="predictors") + single_service = get_all_services( + id, + namespace=namespace, + group="serving.kserve.io", + version="v1alpha1", + plural="predictors", + ) return single_service, 200 except: pass # Attempt to find the id in a kserve inferenceservice try: - single_service = get_all_services(id, namespace=namespace, group="serving.kserve.io", version="v1beta1", plural="inferenceservices") + single_service = get_all_services( + id, + namespace=namespace, + group="serving.kserve.io", + version="v1beta1", + plural="inferenceservices", + ) return single_service, 200 except Exception as err: log.exception("Error when trying to find an inferenceservice: ") return str(err), 500 -def list_inferenceservices(page_token=None, page_size=None, sort_by=None, filter=None, namespace=None): # noqa: E501 +def list_inferenceservices( + page_token=None, page_size=None, sort_by=None, filter=None, namespace=None +): # noqa: E501 """list_inferenceservices # noqa: E501 - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name desc\" Ascending by default. :type sort_by: str :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. :type filter: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiListInferenceservicesResponse @@ -61,9 +77,19 @@ def list_inferenceservices(page_token=None, page_size=None, sort_by=None, filter log = logging.getLogger("inf_serv") try: # Combine the list of items from the modelmesh predictors and kserve inferenceservices - all_mm_services = get_all_services(namespace=namespace, group="serving.kserve.io", version="v1alpha1", plural="predictors") - all_k_services = get_all_services(namespace=namespace, group="serving.kserve.io", version="v1beta1", plural="inferenceservices") - all_mm_services['items'] = all_mm_services['items'] + all_k_services['items'] + all_mm_services = get_all_services( + namespace=namespace, + group="serving.kserve.io", + version="v1alpha1", + plural="predictors", + ) + all_k_services = get_all_services( + namespace=namespace, + group="serving.kserve.io", + version="v1beta1", + plural="inferenceservices", + ) + all_mm_services["items"] = all_mm_services["items"] + all_k_services["items"] return all_mm_services, 200 except Exception as err: log.exception("Error when trying to list inferenceservices: ") @@ -75,9 +101,9 @@ def create_service(body, namespace=None): # noqa: E501 # noqa: E501 - :param body: + :param body: :type body: dict | bytes - :param namespace: + :param namespace: :type namespace: str :rtype: ApiInferenceservice @@ -98,18 +124,19 @@ def upload_service(uploadfile, name=None, namespace=None): # noqa: E501 :param uploadfile: The component to upload. Maximum size of 32MB is supported. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str - :param namespace: + :param namespace: :type namespace: str :rtype: ApiComponent """ log = logging.getLogger("inf_serv") try: - uploaded_service = from_client_upload_service(upload_file=uploadfile, namespace=namespace) + uploaded_service = from_client_upload_service( + upload_file=uploadfile, namespace=namespace + ) return uploaded_service, 200 except Exception as err: log.exception("Error when deploying an inferenceservice: ") return str(err), 500 - diff --git a/api/server/swagger_server/controllers_impl/model_service_controller_impl.py b/api/server/swagger_server/controllers_impl/model_service_controller_impl.py index 4a68dbe3..07175e56 100644 --- a/api/server/swagger_server/controllers_impl/model_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/model_service_controller_impl.py @@ -1,11 +1,11 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import tarfile -import yaml +import yaml # noqa: F401 from datetime import datetime from io import BytesIO @@ -14,18 +14,39 @@ from swagger_server.controllers_impl import download_file_content_from_url, validate_id from swagger_server.controllers_impl import get_yaml_file_content_from_uploadfile -from swagger_server.data_access.minio_client import store_file, delete_objects, \ - get_file_content_and_url, enable_anonymous_read_access, create_tarfile, NoSuchKey -from swagger_server.data_access.mysql_client import store_data, generate_id, \ - load_data, delete_data, num_rows, update_multiple -from swagger_server.gateways.kubeflow_pipeline_service import generate_model_run_script,\ - run_model_in_experiment, _host as KFP_HOST -from swagger_server.models.api_generate_model_code_response import ApiGenerateModelCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_models_response import ApiListModelsResponse # noqa: E501 +from swagger_server.data_access.minio_client import ( + store_file, + delete_objects, + get_file_content_and_url, + enable_anonymous_read_access, + create_tarfile, + NoSuchKey, +) +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, + update_multiple, +) +from swagger_server.gateways.kubeflow_pipeline_service import ( + generate_model_run_script, + run_model_in_experiment, + _host as KFP_HOST, +) +from swagger_server.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_models_response import ( # noqa: F401 + ApiListModelsResponse, +) from swagger_server.models.api_model import ApiModel # noqa: E501 from swagger_server.models.api_model_script import ApiModelScript # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 def approve_models_for_publishing(model_ids): # noqa: E501 @@ -48,13 +69,13 @@ def approve_models_for_publishing(model_ids): # noqa: E501 def create_model(body): # noqa: E501 """create_model - :param body: + :param body: :type body: dict | bytes :rtype: ApiModel """ if connexion.request.is_json: - body = ApiModel.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiModel.from_dict(connexion.request.get_json()) api_model = body @@ -69,7 +90,7 @@ def create_model(body): # noqa: E501 def delete_model(id): # noqa: E501 """delete_model - :param id: + :param id: :type id: str :rtype: None @@ -93,15 +114,20 @@ def download_model_files(id, include_generated_code=None): # noqa: E501 :rtype: file | binary """ - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", prefix=f"models/{id}/", - file_extensions=[".yaml", ".yml", ".py", ".md"], - keep_open=include_generated_code) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"models/{id}/", + file_extensions=[".yaml", ".yml", ".py", ".md"], + keep_open=include_generated_code, + ) if len(tar.members) == 0: return f"Could not find model with id '{id}'", 404 if include_generated_code: - generate_code_response: ApiGenerateModelCodeResponse = generate_model_code(id)[0] + generate_code_response: ApiGenerateModelCodeResponse = generate_model_code(id)[ + 0 + ] for s in generate_code_response.scripts: file_name = f"run_{s.pipeline_stage}_{s.execution_platform}.py" @@ -111,7 +137,7 @@ def download_model_files(id, include_generated_code=None): # noqa: E501 file_content = s.script_code file_size = len(file_content) - file_obj = BytesIO(file_content.encode('utf-8')) + file_obj = BytesIO(file_content.encode("utf-8")) tarinfo = tarfile.TarInfo(name=file_name) tarinfo.size = file_size @@ -119,7 +145,11 @@ def download_model_files(id, include_generated_code=None): # noqa: E501 tar.close() - return bytes_io.getvalue(), 200, {"Content-Disposition": f"attachment; filename={id}.tgz"} + return ( + bytes_io.getvalue(), + 200, + {"Content-Disposition": f"attachment; filename={id}.tgz"}, + ) def generate_model_code(id): # noqa: E501 @@ -127,7 +157,7 @@ def generate_model_code(id): # noqa: E501 # noqa: E501 - :param id: + :param id: :type id: str :rtype: ApiGenerateModelCodeResponse @@ -144,10 +174,14 @@ def generate_model_code(id): # noqa: E501 source_combinations = [] if api_model.trainable: - source_combinations.extend([("train", p) for p in api_model.trainable_tested_platforms]) + source_combinations.extend( + [("train", p) for p in api_model.trainable_tested_platforms] + ) if api_model.servable: - source_combinations.extend([("serve", p) for p in api_model.servable_tested_platforms]) + source_combinations.extend( + [("serve", p) for p in api_model.servable_tested_platforms] + ) for stage, platform in source_combinations: # TODO: re-enable check for uploaded script, until then save time by not doing Minio lookup @@ -159,9 +193,9 @@ def generate_model_code(id): # noqa: E501 if not source_code: source_code = generate_model_run_script(api_model, stage, platform) - api_model_script = ApiModelScript(pipeline_stage=stage, - execution_platform=platform, - script_code=source_code) + api_model_script = ApiModelScript( + pipeline_stage=stage, execution_platform=platform, script_code=source_code + ) generate_code_response.scripts.append(api_model_script) @@ -171,7 +205,7 @@ def generate_model_code(id): # noqa: E501 def get_model(id): # noqa: E501 """get_model - :param id: + :param id: :type id: str :rtype: ApiModel @@ -187,16 +221,16 @@ def get_model(id): # noqa: E501 def get_model_template(id): # noqa: E501 """get_model_template - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse """ try: - template_yaml, url = get_file_content_and_url(bucket_name="mlpipeline", - prefix=f"models/{id}/", - file_name="template.yaml") + template_yaml, url = get_file_content_and_url( + bucket_name="mlpipeline", prefix=f"models/{id}/", file_name="template.yaml" + ) template_response = ApiGetTemplateResponse(template=template_yaml) return template_response, 200 @@ -210,16 +244,18 @@ def get_model_template(id): # noqa: E501 return str(e), 500 -def list_models(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_models( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_models - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListModelsResponse @@ -233,11 +269,13 @@ def list_models(page_token=None, page_size=None, sort_by=None, filter=None): # filter_dict = json.loads(filter) if filter else None - api_models: [ApiModel] = load_data(ApiModel, - filter_dict=filter_dict, - sort_by=sort_by, - count=page_size, - offset=offset) + api_models: [ApiModel] = load_data( + ApiModel, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_models) == page_size else None @@ -246,15 +284,19 @@ def list_models(page_token=None, page_size=None, sort_by=None, filter=None): # if total_size == next_page_token: next_page_token = None - model_list = ApiListModelsResponse(models=api_models, total_size=total_size, next_page_token=next_page_token) + model_list = ApiListModelsResponse( + models=api_models, total_size=total_size, next_page_token=next_page_token + ) return model_list, 200 -def run_model(id, pipeline_stage, execution_platform, run_name=None, parameters: dict = None): # noqa: E501 +def run_model( + id, pipeline_stage, execution_platform, run_name=None, parameters: dict = None +): # noqa: E501 """run_model - :param id: + :param id: :type id: str :param pipeline_stage: pipeline stage, either 'train' or 'serve' :type pipeline_stage: str @@ -275,13 +317,17 @@ def run_model(id, pipeline_stage, execution_platform, run_name=None, parameters: if status_code > 200: return f"Model with id '{id}' does not exist", 404 - parameter_errors, status_code = _validate_run_parameters(api_model, pipeline_stage, execution_platform, parameters) + parameter_errors, status_code = _validate_run_parameters( + api_model, pipeline_stage, execution_platform, parameters + ) if parameter_errors: return parameter_errors, status_code try: - run_id = run_model_in_experiment(api_model, pipeline_stage, execution_platform, run_name, parameters) + run_id = run_model_in_experiment( + api_model, pipeline_stage, execution_platform, run_name, parameters + ) return ApiRunCodeResponse(run_url=f"/runs/details/{run_id}"), 200 except Exception as e: @@ -310,7 +356,7 @@ def upload_model(uploadfile: FileStorage, name=None, existing_id=None): # noqa: :param uploadfile: The model to upload. Maximum size of 32MB is supported. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :param existing_id: The model identifier of the model to be replaced, INTERNAL USE ONLY :type existing_id: str @@ -327,7 +373,7 @@ def upload_model_file(id, uploadfile): # noqa: E501 :param id: The model identifier. :type id: str - :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) + :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) # noqa: E501 :type uploadfile: werkzeug.datastructures.FileStorage :rtype: ApiModel @@ -336,13 +382,19 @@ def upload_model_file(id, uploadfile): # noqa: E501 file_ext = file_name.split(".")[-1] if file_ext not in ["tgz", "gz", "yaml", "yml", "py", "md"]: - return f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", 501 + return ( + f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", + 501, + ) if file_ext in ["tgz", "gz", "yaml", "yml"]: delete_model(id) return upload_model(uploadfile, existing_id=id) else: - return f"The API method 'upload_model_file' is not implemented for file type '{file_ext}'.", 501 + return ( + f"The API method 'upload_model_file' is not implemented for file type '{file_ext}'.", + 501, + ) return "Something went wrong?", 500 @@ -368,6 +420,7 @@ def upload_model_from_url(url, name=None, access_token=None): # noqa: E501 # private helper methods, not swagger-generated ############################################################################### + def _upload_model_yaml(yaml_file_content: AnyStr, name=None, existing_id=None): model_def = yaml.load(yaml_file_content, Loader=yaml.FullLoader) @@ -378,7 +431,9 @@ def _upload_model_yaml(yaml_file_content: AnyStr, name=None, existing_id=None): return errors, status api_model = ApiModel( - id=existing_id or model_def.get("model_identifier") or generate_id(name=name or model_def["name"]), + id=existing_id + or model_def.get("model_identifier") + or generate_id(name=name or model_def["name"]), created_at=datetime.now(), name=name or model_def["name"], description=model_def["description"].strip(), @@ -387,59 +442,99 @@ def _upload_model_yaml(yaml_file_content: AnyStr, name=None, existing_id=None): framework=model_def["framework"], filter_categories=model_def.get("filter_categories") or dict(), trainable=model_def.get("train", {}).get("trainable") or False, - trainable_tested_platforms=model_def.get("train", {}).get("tested_platforms") or [], - trainable_credentials_required=model_def.get("train", {}).get("credentials_required") or False, + trainable_tested_platforms=model_def.get("train", {}).get("tested_platforms") + or [], + trainable_credentials_required=model_def.get("train", {}).get( + "credentials_required" + ) + or False, trainable_parameters=model_def.get("train", {}).get("input_params") or [], servable=model_def.get("serve", {}).get("servable") or False, - servable_tested_platforms=model_def.get("serve", {}).get("tested_platforms") or [], - servable_credentials_required=model_def.get("serve", {}).get("credentials_required") or False, - servable_parameters=model_def.get("serve", {}).get("input_params") or []) + servable_tested_platforms=model_def.get("serve", {}).get("tested_platforms") + or [], + servable_credentials_required=model_def.get("serve", {}).get( + "credentials_required" + ) + or False, + servable_parameters=model_def.get("serve", {}).get("input_params") or [], + ) # convert comma-separate strings to lists if type(api_model.trainable_tested_platforms) == str: - api_model.trainable_tested_platforms = api_model.trainable_tested_platforms.replace(", ", ",").split(",") + api_model.trainable_tested_platforms = ( + api_model.trainable_tested_platforms.replace(", ", ",").split(",") + ) if type(api_model.servable_tested_platforms) == str: - api_model.servable_tested_platforms = api_model.servable_tested_platforms.replace(", ", ",").split(",") + api_model.servable_tested_platforms = ( + api_model.servable_tested_platforms.replace(", ", ",").split(",") + ) uuid = store_data(api_model) api_model.id = uuid - store_file(bucket_name="mlpipeline", prefix=f"models/{api_model.id}/", - file_name="template.yaml", file_content=yaml_file_content, - content_type="text/yaml") + store_file( + bucket_name="mlpipeline", + prefix=f"models/{api_model.id}/", + file_name="template.yaml", + file_content=yaml_file_content, + content_type="text/yaml", + ) enable_anonymous_read_access(bucket_name="mlpipeline", prefix="models/*") return api_model, 201 -def _validate_run_parameters(api_model: ApiModel, pipeline_stage: str, execution_platform: str, parameters=dict()): +def _validate_run_parameters( + api_model: ApiModel, pipeline_stage: str, execution_platform: str, parameters=dict() +): if pipeline_stage == "train": if not api_model.trainable: return f"Model '{api_model.id}' is not trainable", 422 if execution_platform not in api_model.trainable_tested_platforms: - return f"'{execution_platform}' is not a tested platform to {pipeline_stage} model '{api_model.id}'. " \ - f"Tested platforms: {api_model.trainable_tested_platforms}", 422 - - if api_model.trainable_credentials_required and not {"github_url", "github_token"} <= parameters.keys(): - return f"'github_url' and 'github_token' are required to {pipeline_stage} model '{api_model.id}'", 422 + return ( + f"'{execution_platform}' is not a tested platform to {pipeline_stage} model '{api_model.id}'. " + f"Tested platforms: {api_model.trainable_tested_platforms}", + 422, + ) + + if ( + api_model.trainable_credentials_required + and not {"github_url", "github_token"} <= parameters.keys() + ): + return ( + f"'github_url' and 'github_token' are required to {pipeline_stage} model '{api_model.id}'", + 422, + ) elif pipeline_stage == "serve": if not api_model.servable: return f"Model '{api_model.id}' is not servable", 422 if execution_platform not in api_model.servable_tested_platforms: - return f"'{execution_platform}' is not a tested platform to {pipeline_stage} model '{api_model.id}'. " \ - f"Tested platforms: {api_model.servable_tested_platforms}", 422 - - if api_model.servable_credentials_required and not {"github_url", "github_token"} <= parameters.keys(): - return f"'github_url' and 'github_token' are required to {pipeline_stage} model '{api_model.id}'", 422 + return ( + f"'{execution_platform}' is not a tested platform to {pipeline_stage} model '{api_model.id}'. " + f"Tested platforms: {api_model.servable_tested_platforms}", + 422, + ) + + if ( + api_model.servable_credentials_required + and not {"github_url", "github_token"} <= parameters.keys() + ): + return ( + f"'github_url' and 'github_token' are required to {pipeline_stage} model '{api_model.id}'", + 422, + ) else: - return f"Invalid pipeline_stage: '{pipeline_stage}'. Must be one of ['train', 'serve']", 422 + return ( + f"Invalid pipeline_stage: '{pipeline_stage}'. Must be one of ['train', 'serve']", + 422, + ) return None, 200 diff --git a/api/server/swagger_server/controllers_impl/notebook_service_controller_impl.py b/api/server/swagger_server/controllers_impl/notebook_service_controller_impl.py index 764b6584..c52e653f 100644 --- a/api/server/swagger_server/controllers_impl/notebook_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/notebook_service_controller_impl.py @@ -1,36 +1,61 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import requests import tarfile -import yaml +import yaml # noqa: F401 from datetime import datetime from io import BytesIO -from os import environ as env +from os import environ as env # noqa: F401 from typing import AnyStr from urllib.parse import urlparse from werkzeug.datastructures import FileStorage -from swagger_server.controllers_impl import download_file_content_from_url, \ - get_yaml_file_content_from_uploadfile, validate_parameters, validate_id -from swagger_server.data_access.minio_client import store_file, delete_objects, \ - get_file_content_and_url, enable_anonymous_read_access, NoSuchKey, \ - create_tarfile, get_object_url -from swagger_server.data_access.mysql_client import store_data, generate_id, \ - load_data, delete_data, num_rows, update_multiple -from swagger_server.gateways.kubeflow_pipeline_service import generate_notebook_run_script,\ - run_notebook_in_experiment, _host as KFP_HOST -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_notebooks_response import ApiListNotebooksResponse # noqa: E501 +from swagger_server.controllers_impl import ( # noqa: F401 + download_file_content_from_url, + get_yaml_file_content_from_uploadfile, + validate_parameters, + validate_id, +) +from swagger_server.data_access.minio_client import ( + store_file, + delete_objects, + get_file_content_and_url, + enable_anonymous_read_access, + NoSuchKey, + create_tarfile, + get_object_url, +) +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, + update_multiple, +) +from swagger_server.gateways.kubeflow_pipeline_service import ( + generate_notebook_run_script, + run_notebook_in_experiment, + _host as KFP_HOST, +) +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_notebooks_response import ( # noqa: F401 + ApiListNotebooksResponse, +) from swagger_server.models.api_metadata import ApiMetadata from swagger_server.models.api_notebook import ApiNotebook # noqa: E501 -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 from swagger_server.util import ApiError @@ -57,13 +82,13 @@ def approve_notebooks_for_publishing(notebook_ids): # noqa: E501 def create_notebook(body): # noqa: E501 """create_notebook - :param body: + :param body: :type body: dict | bytes :rtype: ApiNotebook """ if connexion.request.is_json: - body = ApiNotebook.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiNotebook.from_dict(connexion.request.get_json()) api_notebook = body @@ -78,7 +103,7 @@ def create_notebook(body): # noqa: E501 def delete_notebook(id): # noqa: E501 """delete_notebook - :param id: + :param id: :type id: str :rtype: None @@ -93,16 +118,19 @@ def delete_notebook(id): # noqa: E501 def download_notebook_files(id, include_generated_code=None): # noqa: E501 """Returns the notebook artifacts compressed into a .tgz (.tar.gz) file. - :param id: + :param id: :type id: str :param include_generated_code: Include generated run script in download :type include_generated_code: bool :rtype: file | binary """ - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", prefix=f"notebooks/{id}/", - file_extensions=[".yaml", ".yml", ".py", ".md"], - keep_open=include_generated_code) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"notebooks/{id}/", + file_extensions=[".yaml", ".yml", ".py", ".md"], + keep_open=include_generated_code, + ) if len(tar.members) == 0: return f"Could not find notebook with id '{id}'", 404 @@ -119,19 +147,23 @@ def download_notebook_files(id, include_generated_code=None): # noqa: E501 tarinfo = tarfile.TarInfo(name=file_name) tarinfo.size = len(file_content) - file_obj = BytesIO(file_content.encode('utf-8')) + file_obj = BytesIO(file_content.encode("utf-8")) tar.addfile(tarinfo, file_obj) tar.close() - return bytes_io.getvalue(), 200, {"Content-Disposition": f"attachment; filename={id}.tgz"} + return ( + bytes_io.getvalue(), + 200, + {"Content-Disposition": f"attachment; filename={id}.tgz"}, + ) def generate_notebook_code(id): # noqa: E501 """generate_notebook_code - :param id: + :param id: :type id: str :rtype: ApiGenerateCodeResponse @@ -162,7 +194,7 @@ def generate_notebook_code(id): # noqa: E501 def get_notebook(id): """get_notebook - :param id: + :param id: :type id: str :rtype: ApiNotebook @@ -178,14 +210,17 @@ def get_notebook(id): def get_notebook_template(id): # noqa: E501 """get_notebook_template - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse """ try: - template_yaml, url = get_file_content_and_url(bucket_name="mlpipeline", prefix=f"notebooks/{id}/", - file_name="template.yaml") + template_yaml, url = get_file_content_and_url( + bucket_name="mlpipeline", + prefix=f"notebooks/{id}/", + file_name="template.yaml", + ) template_response = ApiGetTemplateResponse(template=template_yaml, url=url) return template_response, 200 @@ -199,16 +234,18 @@ def get_notebook_template(id): # noqa: E501 return str(e), 500 -def list_notebooks(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_notebooks( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_notebooks - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListNotebooksResponse @@ -222,8 +259,13 @@ def list_notebooks(page_token=None, page_size=None, sort_by=None, filter=None): filter_dict = json.loads(filter) if filter else None - api_notebooks: [ApiNotebook] = load_data(ApiNotebook, filter_dict=filter_dict, sort_by=sort_by, - count=page_size, offset=offset) + api_notebooks: [ApiNotebook] = load_data( + ApiNotebook, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_notebooks) == page_size else None @@ -232,15 +274,16 @@ def list_notebooks(page_token=None, page_size=None, sort_by=None, filter=None): if total_size == next_page_token: next_page_token = None - notebooks = ApiListNotebooksResponse(notebooks=api_notebooks, total_size=total_size, - next_page_token=next_page_token) + notebooks = ApiListNotebooksResponse( + notebooks=api_notebooks, total_size=total_size, next_page_token=next_page_token + ) return notebooks, 200 def run_notebook(id, run_name=None, parameters: dict = None): # noqa: E501 """run_notebook - :param id: + :param id: :type id: str :param run_name: name to identify the run on the Kubeflow Pipelines UI, defaults to notebook name :type run_name: str @@ -253,7 +296,7 @@ def run_notebook(id, run_name=None, parameters: dict = None): # noqa: E501 return f"Kubeflow Pipeline host is 'UNAVAILABLE'", 503 if not parameters and connexion.request.is_json: - parameter_dict = dict(connexion.request.get_json()) # noqa: E501 + parameter_dict = dict(connexion.request.get_json()) else: parameter_dict = parameters @@ -275,15 +318,17 @@ def run_notebook(id, run_name=None, parameters: dict = None): # noqa: E501 enable_anonymous_read_access(bucket_name="mlpipeline", prefix="notebooks/*") try: - run_id = run_notebook_in_experiment(notebook=api_notebook, - parameters=parameter_dict, - run_name=run_name) + run_id = run_notebook_in_experiment( + notebook=api_notebook, parameters=parameter_dict, run_name=run_name + ) # expected output notebook based on: # https://github.com/elyra-ai/kfp-notebook/blob/c8f1298/etc/docker-scripts/bootstrapper.py#L188-L190 - notebook_url = get_object_url(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_extensions=[".ipynb"]) + notebook_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_extensions=[".ipynb"], + ) # TODO: create a "sandboxed" notebook in a subfolder since Elyra overwrites # the original notebook instead of creating an "-output.ipynb" file: # https://github.com/elyra-ai/kfp-notebook/blob/c8f1298/etc/docker-scripts/bootstrapper.py#L205 @@ -292,8 +337,13 @@ def run_notebook(id, run_name=None, parameters: dict = None): # noqa: E501 # instead return link to the generated output .html for the time being notebook_output_html = notebook_url.replace(".ipynb", ".html") - return ApiRunCodeResponse(run_url=f"/runs/details/{run_id}", - run_output_location=notebook_output_html), 200 + return ( + ApiRunCodeResponse( + run_url=f"/runs/details/{run_id}", + run_output_location=notebook_output_html, + ), + 200, + ) except Exception as e: return f"Error while trying to run notebook {id}: {e}", 500 @@ -316,12 +366,14 @@ def set_featured_notebooks(notebook_ids): # noqa: E501 return None, 200 -def upload_notebook(uploadfile: FileStorage, name=None, enterprise_github_token=None, existing_id=None): # noqa: E501 +def upload_notebook( + uploadfile: FileStorage, name=None, enterprise_github_token=None, existing_id=None +): # noqa: E501 """upload_notebook :param uploadfile: The notebook to upload. Maximum size of 32MB is supported. :type uploadfile: werkzeug.datastructures.FileStorage - :param name: + :param name: :type name: str :param enterprise_github_token: Optional GitHub API token providing read-access to notebooks stored on Enterprise GitHub accounts. :type enterprise_github_token: str @@ -340,7 +392,7 @@ def upload_notebook_file(id, uploadfile): # noqa: E501 :param id: The id of the notebook. :type id: str - :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) + :param uploadfile: The file to upload, overwriting existing. Can be a GZip-compressed TAR file (.tgz), a YAML file (.yaml), Python script (.py), or Markdown file (.md) # noqa: E501 :type uploadfile: werkzeug.datastructures.FileStorage :rtype: ApiNotebook @@ -350,13 +402,19 @@ def upload_notebook_file(id, uploadfile): # noqa: E501 file_ext = file_name.split(".")[-1] if file_ext not in ["tgz", "gz", "yaml", "yml", "py", "md"]: - return f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", 501 + return ( + f"File extension not supported: '{file_ext}', uploadfile: '{file_name}'.", + 501, + ) if file_ext in ["tgz", "gz", "yaml", "yml"]: delete_notebook(id) return upload_notebook(uploadfile, existing_id=id) else: - return f"The API method 'upload_notebook_file' is not implemented for file type '{file_ext}'.", 501 + return ( + f"The API method 'upload_notebook_file' is not implemented for file type '{file_ext}'.", + 501, + ) return "Not implemented (yet).", 501 @@ -382,7 +440,10 @@ def upload_notebook_from_url(url, name=None, access_token=None): # noqa: E501 # private helper methods, not swagger-generated ############################################################################### -def _upload_notebook_yaml(yaml_file_content: AnyStr, name=None, access_token=None, existing_id=None): + +def _upload_notebook_yaml( + yaml_file_content: AnyStr, name=None, access_token=None, existing_id=None +): yaml_dict = yaml.load(yaml_file_content, Loader=yaml.FullLoader) @@ -393,7 +454,11 @@ def _upload_notebook_yaml(yaml_file_content: AnyStr, name=None, access_token=Non if errors: return errors, status - notebook_id = existing_id or yaml_dict.get("id") or generate_id(name=name or yaml_dict["name"]) + notebook_id = ( + existing_id + or yaml_dict.get("id") + or generate_id(name=name or yaml_dict["name"]) + ) created_at = datetime.now() name = name or yaml_dict["name"] description = yaml_dict["description"].strip() @@ -401,9 +466,11 @@ def _upload_notebook_yaml(yaml_file_content: AnyStr, name=None, access_token=Non requirements = yaml_dict["implementation"]["github"].get("requirements") filter_categories = yaml_dict.get("filter_categories") or dict() - metadata = ApiMetadata(annotations=template_metadata.get("annotations"), - labels=template_metadata.get("labels"), - tags=template_metadata.get("tags")) + metadata = ApiMetadata( + annotations=template_metadata.get("annotations"), + labels=template_metadata.get("labels"), + tags=template_metadata.get("tags"), + ) notebook_content = _download_notebook(url, enterprise_github_api_token=access_token) @@ -412,27 +479,35 @@ def _upload_notebook_yaml(yaml_file_content: AnyStr, name=None, access_token=Non # kfp-notebook has inputs and outputs ? parameters = dict() - api_notebook = ApiNotebook(id=notebook_id, - created_at=created_at, - name=name, - description=description, - url=url, - metadata=metadata, - parameters=parameters, - filter_categories=filter_categories) + api_notebook = ApiNotebook( + id=notebook_id, + created_at=created_at, + name=name, + description=description, + url=url, + metadata=metadata, + parameters=parameters, + filter_categories=filter_categories, + ) uuid = store_data(api_notebook) api_notebook.id = uuid - store_file(bucket_name="mlpipeline", prefix=f"notebooks/{notebook_id}/", - file_name="template.yaml", file_content=yaml_file_content, - content_type="text/yaml") - - s3_url = store_file(bucket_name="mlpipeline", - prefix=f"notebooks/{notebook_id}/", - file_name=url.split("/")[-1].split("?")[0], - file_content=json.dumps(notebook_content).encode()) + store_file( + bucket_name="mlpipeline", + prefix=f"notebooks/{notebook_id}/", + file_name="template.yaml", + file_content=yaml_file_content, + content_type="text/yaml", + ) + + s3_url = store_file( + bucket_name="mlpipeline", + prefix=f"notebooks/{notebook_id}/", + file_name=url.split("/")[-1].split("?")[0], + file_content=json.dumps(notebook_content).encode(), + ) if requirements: @@ -445,17 +520,30 @@ def _upload_notebook_yaml(yaml_file_content: AnyStr, name=None, access_token=Non # TODO: remove this after fixing the Elyra-AI/KFP-Notebook runner so that # Elyra should install its own requirements in addition to the provided requirements requirements_elyra_url = "https://github.com/elyra-ai/kfp-notebook/blob/master/etc/requirements-elyra.txt" - requirements_elyra_txt = download_file_content_from_url(requirements_elyra_url).decode() - requirements_elyra = "\n".join([line for line in requirements_elyra_txt.split("\n") - if not line.startswith("#")]) - - requirements_all = f"# Required packages for {api_notebook.name}:\n" \ - f"{requirements_txt}\n" \ - f"# Requirements from {requirements_elyra_url}:\n" \ - f"{requirements_elyra}" - - store_file(bucket_name="mlpipeline", prefix=f"notebooks/{notebook_id}/", - file_name="requirements.txt", file_content=requirements_all.encode()) + requirements_elyra_txt = download_file_content_from_url( + requirements_elyra_url + ).decode() + requirements_elyra = "\n".join( + [ + line + for line in requirements_elyra_txt.split("\n") + if not line.startswith("#") + ] + ) + + requirements_all = ( + f"# Required packages for {api_notebook.name}:\n" + f"{requirements_txt}\n" + f"# Requirements from {requirements_elyra_url}:\n" + f"{requirements_elyra}" + ) + + store_file( + bucket_name="mlpipeline", + prefix=f"notebooks/{notebook_id}/", + file_name="requirements.txt", + file_content=requirements_all.encode(), + ) # if the url included an access token, replace the original url with the s3 url if "?token=" in url or "github.ibm.com" in url: @@ -472,14 +560,23 @@ def _download_notebook(url: str, enterprise_github_api_token: str) -> dict: if "ibm.com" in url and "?token=" not in url: if not enterprise_github_api_token and not ghe_api_token: - raise ApiError(f"Must provide API token to access notebooks on Enterprise GitHub: {url}", 422) + raise ApiError( + f"Must provide API token to access notebooks on Enterprise GitHub: {url}", + 422, + ) else: - request_headers.update({'Authorization': f'token {enterprise_github_api_token or ghe_api_token}'}) + request_headers.update( + { + "Authorization": f"token {enterprise_github_api_token or ghe_api_token}" + } + ) try: - raw_url = url.replace("/github.ibm.com/", "/raw.github.ibm.com/")\ - .replace("/github.com/", "/raw.githubusercontent.com/")\ - .replace("/blob/", "/") + raw_url = ( + url.replace("/github.ibm.com/", "/raw.github.ibm.com/") + .replace("/github.com/", "/raw.githubusercontent.com/") + .replace("/blob/", "/") + ) response = requests.get(raw_url, allow_redirects=True, headers=request_headers) if response.ok: @@ -489,8 +586,10 @@ def _download_notebook(url: str, enterprise_github_api_token: str) -> dict: except Exception as e: raise ApiError(f"Could not download notebook file '{url}'. \n{str(e)}", 422) - raise ApiError(f"Could not download notebook file '{url}'. Reason: {response.reason}", - response.status_code) + raise ApiError( + f"Could not download notebook file '{url}'. Reason: {response.reason}", + response.status_code, + ) def _extract_notebook_parameters(notebook_dict: dict) -> [ApiParameter]: diff --git a/api/server/swagger_server/controllers_impl/pipeline_service_controller_impl.py b/api/server/swagger_server/controllers_impl/pipeline_service_controller_impl.py index 10cca6b7..6ab93ed6 100644 --- a/api/server/swagger_server/controllers_impl/pipeline_service_controller_impl.py +++ b/api/server/swagger_server/controllers_impl/pipeline_service_controller_impl.py @@ -1,12 +1,12 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -import connexion +import connexion # noqa: F401 import json import os import typing -import yaml +import yaml # noqa: F401 from datetime import datetime from collections import Counter @@ -14,22 +14,49 @@ from swagger_server.controllers_impl import download_file_content_from_url from swagger_server.controllers_impl import get_yaml_file_content_from_uploadfile -from swagger_server.data_access.minio_client import store_file, delete_object, \ - delete_objects, get_file_content_and_url, NoSuchKey, \ - enable_anonymous_read_access, create_tarfile -from swagger_server.data_access.mysql_client import store_data, generate_id, load_data, \ - delete_data, num_rows, update_multiple -from swagger_server.gateways.kubeflow_pipeline_service import upload_pipeline_to_kfp,\ - delete_kfp_pipeline, run_pipeline_in_experiment, run_custom_pipeline_in_experiment, \ - _host as KFP_HOST -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_pipelines_response import ApiListPipelinesResponse # noqa: E501 +from swagger_server.data_access.minio_client import ( + store_file, + delete_object, + delete_objects, + get_file_content_and_url, + NoSuchKey, + enable_anonymous_read_access, + create_tarfile, +) +from swagger_server.data_access.mysql_client import ( + store_data, + generate_id, + load_data, + delete_data, + num_rows, + update_multiple, +) +from swagger_server.gateways.kubeflow_pipeline_service import ( + upload_pipeline_to_kfp, + delete_kfp_pipeline, + run_pipeline_in_experiment, + run_custom_pipeline_in_experiment, + _host as KFP_HOST, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_pipelines_response import ( # noqa: F401 + ApiListPipelinesResponse, +) from swagger_server.models.api_pipeline import ApiPipeline # noqa: E501 -from swagger_server.models import ApiPipelineCustomRunPayload, ApiPipelineTask # , ApiPipelineDAG +from swagger_server.models import ( + ApiPipelineCustomRunPayload, + ApiPipelineTask, +) # , ApiPipelineDAG from swagger_server.models.api_parameter import ApiParameter -from swagger_server.models.api_pipeline_extension import ApiPipelineExtension # noqa: E501 -from swagger_server.models.api_pipeline_extended import ApiPipelineExtended # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 +from swagger_server.models.api_pipeline_extension import ( + ApiPipelineExtension, +) +from swagger_server.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 from tempfile import mkstemp @@ -59,13 +86,13 @@ def approve_pipelines_for_publishing(pipeline_ids): # noqa: E501 def create_pipeline(body): # noqa: E501 """create_pipeline - :param body: + :param body: :type body: dict | bytes :rtype: ApiPipeline """ if connexion.request.is_json: - body = ApiPipeline.from_dict(connexion.request.get_json()) # noqa: E501 + body = ApiPipeline.from_dict(connexion.request.get_json()) return "Not implemented, yet", 501 @@ -73,7 +100,7 @@ def create_pipeline(body): # noqa: E501 def delete_pipeline(id): # noqa: E501 """delete_pipeline - :param id: + :param id: :type id: str :rtype: None @@ -85,7 +112,9 @@ def delete_pipeline(id): # noqa: E501 if id == "*": delete_objects(bucket_name="mlpipeline", prefix=f"pipelines/") else: - delete_object(bucket_name="mlpipeline", prefix="pipelines", file_name=f"{id}") + delete_object( + bucket_name="mlpipeline", prefix="pipelines", file_name=f"{id}" + ) else: # wildcard '*' deletes (and recreates) entire table, not desired for pipelines table, KFP API does not accept "*" if id != "*": @@ -108,14 +137,21 @@ def download_pipeline_files(id): # noqa: E501 :rtype: file """ - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", prefix=f"pipelines/{id}", - file_extensions=[""], - keep_open=False) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"pipelines/{id}", + file_extensions=[""], + keep_open=False, + ) if len(tar.members) == 0: return f"Could not find pipeline with id '{id}'", 404 - return bytes_io.getvalue(), 200, {"Content-Disposition": f"attachment; filename={id}.tgz"} + return ( + bytes_io.getvalue(), + 200, + {"Content-Disposition": f"attachment; filename={id}.tgz"}, + ) def get_pipeline(id): # noqa: E501 @@ -126,7 +162,9 @@ def get_pipeline(id): # noqa: E501 :rtype: ApiPipelineExtended """ - api_pipelines: [ApiPipelineExtended] = load_data(ApiPipelineExtended, filter_dict={"id": id}) + api_pipelines: [ApiPipelineExtended] = load_data( + ApiPipelineExtended, filter_dict={"id": id} + ) if not api_pipelines: return "Not found", 404 @@ -139,13 +177,15 @@ def get_pipeline(id): # noqa: E501 def get_template(id): # noqa: E501 """get_template - :param id: + :param id: :type id: str :rtype: ApiGetTemplateResponse """ try: - template_yaml, url = get_file_content_and_url(bucket_name="mlpipeline", prefix="pipelines", file_name=id) + template_yaml, url = get_file_content_and_url( + bucket_name="mlpipeline", prefix="pipelines", file_name=id + ) template_response = ApiGetTemplateResponse(template=template_yaml, url=url) return template_response, 200 @@ -159,16 +199,18 @@ def get_template(id): # noqa: E501 return str(e), 500 -def list_pipelines(page_token=None, page_size=None, sort_by=None, filter=None): # noqa: E501 +def list_pipelines( + page_token=None, page_size=None, sort_by=None, filter=None +): # noqa: E501 """list_pipelines - :param page_token: + :param page_token: :type page_token: str - :param page_size: + :param page_size: :type page_size: int - :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. + :param sort_by: Can be format of \"field_name\", \"field_name asc\" or \"field_name des\" Ascending by default. # noqa: E501 :type sort_by: str - :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. + :param filter: A string-serialized JSON dictionary containing key-value pairs with name of the object property to apply filter on and the value of the respective property. # noqa: E501 :type filter: str :rtype: ApiListPipelinesResponse @@ -187,8 +229,13 @@ def list_pipelines(page_token=None, page_size=None, sort_by=None, filter=None): if "filter_categories" in filter_dict: del filter_dict["filter_categories"] - api_pipelines: [ApiPipeline] = load_data(ApiPipelineExtended, filter_dict=filter_dict, - sort_by=sort_by, count=page_size, offset=offset) + api_pipelines: [ApiPipeline] = load_data( + ApiPipelineExtended, + filter_dict=filter_dict, + sort_by=sort_by, + count=page_size, + offset=offset, + ) next_page_token = offset + page_size if len(api_pipelines) == page_size else None @@ -197,8 +244,9 @@ def list_pipelines(page_token=None, page_size=None, sort_by=None, filter=None): if total_size == next_page_token: next_page_token = None - pipeline_list = ApiListPipelinesResponse(pipelines=api_pipelines, total_size=total_size, - next_page_token=next_page_token) + pipeline_list = ApiListPipelinesResponse( + pipelines=api_pipelines, total_size=total_size, next_page_token=next_page_token + ) return pipeline_list, 200 @@ -215,39 +263,58 @@ def run_custom_pipeline(run_custom_pipeline_payload, run_name=None): # noqa: E5 :rtype: ApiRunCodeResponse """ if connexion.request.is_json: - run_custom_pipeline_payload = ApiPipelineCustomRunPayload.from_dict(connexion.request.get_json()) # noqa: E501 + run_custom_pipeline_payload = ApiPipelineCustomRunPayload.from_dict( + connexion.request.get_json() + ) run_parameters = run_custom_pipeline_payload.run_parameters or {} custom_pipeline = run_custom_pipeline_payload.custom_pipeline # ensure unique task names task_names = [t.name for t in custom_pipeline.dag.tasks] - duplicate_task_names = [name for name, count in Counter(task_names).items() if count > 1] + duplicate_task_names = [ + name for name, count in Counter(task_names).items() if count > 1 + ] assert not duplicate_task_names, f"duplicate task names: {duplicate_task_names}" # validate pipeline dependencies - pipeline_tasks_by_name: typing.Dict[str, ApiPipelineTask] = {t.name: t for t in custom_pipeline.dag.tasks} + pipeline_tasks_by_name: typing.Dict[str, ApiPipelineTask] = { + t.name: t for t in custom_pipeline.dag.tasks + } for t in pipeline_tasks_by_name.values(): for required_task_name in t.dependencies or []: - assert required_task_name in pipeline_tasks_by_name, \ - f"missing task '{required_task_name}', as dependency for task '{t.name}'" + assert ( + required_task_name in pipeline_tasks_by_name + ), f"missing task '{required_task_name}', as dependency for task '{t.name}'" # validate input parameters - missing_run_parameters = {p.name for p in custom_pipeline.inputs.parameters - if p.default is None and p.value is None} - run_parameters.keys() - assert not missing_run_parameters, f"missing parameters to run pipeline: {missing_run_parameters}" + missing_run_parameters = { + p.name + for p in custom_pipeline.inputs.parameters + if p.default is None and p.value is None + } - run_parameters.keys() + assert ( + not missing_run_parameters + ), f"missing parameters to run pipeline: {missing_run_parameters}" # make sure we enable anonymous read access to pipeline task components - for artifact_type in set([t.artifact_type for t in pipeline_tasks_by_name.values()]): - enable_anonymous_read_access(bucket_name="mlpipeline", prefix=f"{artifact_type}s/*") + for artifact_type in set( + [t.artifact_type for t in pipeline_tasks_by_name.values()] + ): + enable_anonymous_read_access( + bucket_name="mlpipeline", prefix=f"{artifact_type}s/*" + ) try: - run_id = run_custom_pipeline_in_experiment(custom_pipeline, run_name, run_parameters) + run_id = run_custom_pipeline_in_experiment( + custom_pipeline, run_name, run_parameters + ) return ApiRunCodeResponse(run_url=f"/runs/details/{run_id}"), 200 except Exception as e: # TODO: remove traceback? import traceback + print(traceback.format_exc()) return f"Error while trying to run custom pipeline '{run_name}': {e}", 500 @@ -268,7 +335,7 @@ def run_pipeline(id, run_name=None, parameters=None): # noqa: E501 return f"Kubeflow Pipeline host is 'UNAVAILABLE'", 503 if not parameters and connexion.request.is_json: - parameter_dict = dict(connexion.request.get_json()) # noqa: E501 + parameter_dict = dict(connexion.request.get_json()) else: parameter_dict = parameters @@ -313,7 +380,9 @@ def set_featured_pipelines(pipeline_ids): # noqa: E501 return None, 200 -def upload_pipeline(uploadfile, name=None, description=None, labels=None, annotations=None): # noqa: E501 +def upload_pipeline( + uploadfile, name=None, description=None, labels=None, annotations=None +): # noqa: E501 """upload_pipeline :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported. @@ -322,16 +391,18 @@ def upload_pipeline(uploadfile, name=None, description=None, labels=None, annota :type name: str :param description: A description for this pipeline, optional :type description: str - :param labels: A string representation of a JSON dictionary of labels describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels + :param labels: A string representation of a JSON dictionary of labels describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels # noqa: E501 :type labels: str - :param annotations: A string representation of a JSON dictionary of annotations describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations + :param annotations: A string representation of a JSON dictionary of annotations describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations # noqa: E501 :type annotations: str :rtype: ApiPipelineExtended """ yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile) - return _upload_pipeline_yaml(yaml_file_content, name, description, labels, annotations) + return _upload_pipeline_yaml( + yaml_file_content, name, description, labels, annotations + ) def upload_pipeline_from_url(url, name=None, access_token=None): # noqa: E501 @@ -355,7 +426,14 @@ def upload_pipeline_from_url(url, name=None, access_token=None): # noqa: E501 # private helper methods, not swagger-generated ############################################################################### -def _upload_pipeline_yaml(yaml_file_content: AnyStr, name=None, description=None, labels=None, annotations=None): + +def _upload_pipeline_yaml( + yaml_file_content: AnyStr, + name=None, + description=None, + labels=None, + annotations=None, +): (fd, filename) = mkstemp(suffix=".yaml") @@ -365,7 +443,9 @@ def _upload_pipeline_yaml(yaml_file_content: AnyStr, name=None, description=None if KFP_HOST == "UNAVAILABLE": # when running inside Docker Compose w/out KFP we store pipelines ourselves - api_pipeline: ApiPipeline = _store_pipeline(yaml_file_content, name, description) + api_pipeline: ApiPipeline = _store_pipeline( + yaml_file_content, name, description + ) else: # when deployed on top of KFP, we let KFP store pipelines @@ -375,17 +455,25 @@ def _upload_pipeline_yaml(yaml_file_content: AnyStr, name=None, description=None yaml_dict = yaml.load(yaml_file_content, Loader=yaml.FullLoader) template_metadata = yaml_dict.get("metadata") or dict() annotations = template_metadata.get("annotations", {}) - pipeline_spec = json.loads(annotations.get("pipelines.kubeflow.org/pipeline_spec", "{}")) - description = description or pipeline_spec.get("description", "").strip() + pipeline_spec = json.loads( + annotations.get("pipelines.kubeflow.org/pipeline_spec", "{}") + ) + description = ( + description or pipeline_spec.get("description", "").strip() + ) - api_pipeline: ApiPipeline = upload_pipeline_to_kfp(filename, name, description) + api_pipeline: ApiPipeline = upload_pipeline_to_kfp( + filename, name, description + ) store_data(ApiPipelineExtension(id=api_pipeline.id)) if annotations: if type(annotations) == str: annotations = json.loads(annotations) - update_multiple(ApiPipelineExtension, [api_pipeline.id], "annotations", annotations) + update_multiple( + ApiPipelineExtension, [api_pipeline.id], "annotations", annotations + ) api_pipeline_extended, _ = get_pipeline(api_pipeline.id) @@ -403,7 +491,9 @@ def _store_pipeline(yaml_file_content: AnyStr, name=None, description=None): template_metadata = yaml_dict.get("metadata") or dict() annotations = template_metadata.get("annotations", {}) - pipeline_spec = json.loads(annotations.get("pipelines.kubeflow.org/pipeline_spec", "{}")) + pipeline_spec = json.loads( + annotations.get("pipelines.kubeflow.org/pipeline_spec", "{}") + ) name = name or template_metadata["name"] description = description or pipeline_spec.get("description", "").strip() @@ -411,24 +501,36 @@ def _store_pipeline(yaml_file_content: AnyStr, name=None, description=None): pipeline_id = "-".join([generate_id(length=l) for l in [8, 4, 4, 4, 12]]) created_at = datetime.now() - parameters = [ApiParameter(name=p.get("name"), description=p.get("description"), - default=p.get("default"), value=p.get("value")) - for p in yaml_dict["spec"].get("params", {})] - - api_pipeline = ApiPipeline(id=pipeline_id, - created_at=created_at, - name=name, - description=description, - parameters=parameters, - namespace=namespace) + parameters = [ + ApiParameter( + name=p.get("name"), + description=p.get("description"), + default=p.get("default"), + value=p.get("value"), + ) + for p in yaml_dict["spec"].get("params", {}) + ] + + api_pipeline = ApiPipeline( + id=pipeline_id, + created_at=created_at, + name=name, + description=description, + parameters=parameters, + namespace=namespace, + ) uuid = store_data(api_pipeline) api_pipeline.id = uuid - store_file(bucket_name="mlpipeline", prefix=f"pipelines/", - file_name=f"{pipeline_id}", file_content=yaml_file_content, - content_type="text/yaml") + store_file( + bucket_name="mlpipeline", + prefix=f"pipelines/", + file_name=f"{pipeline_id}", + file_content=yaml_file_content, + content_type="text/yaml", + ) enable_anonymous_read_access(bucket_name="mlpipeline", prefix="pipelines/*") @@ -441,11 +543,17 @@ def _validate_parameters(api_pipeline: ApiPipeline, parameters: dict) -> (str, i unexpected_parameters = set(parameters.keys()) - set(acceptable_parameters) if unexpected_parameters: - return f"Unexpected parameter(s): {list(unexpected_parameters)}. " \ - f"Acceptable parameter(s): {acceptable_parameters}", 422 - - missing_parameters = [p.name for p in api_pipeline.parameters - if not (p.default or p.value) and p.name not in parameters] + return ( + f"Unexpected parameter(s): {list(unexpected_parameters)}. " + f"Acceptable parameter(s): {acceptable_parameters}", + 422, + ) + + missing_parameters = [ + p.name + for p in api_pipeline.parameters + if not (p.default or p.value) and p.name not in parameters + ] # TODO: figure out a way to determine if a pipeline parameter is required or not. # just testing for default value is not an indicator diff --git a/api/server/swagger_server/data_access/minio_client.py b/api/server/swagger_server/data_access/minio_client.py index c0d689a8..a83fe6d5 100644 --- a/api/server/swagger_server/data_access/minio_client.py +++ b/api/server/swagger_server/data_access/minio_client.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 import json import os @@ -10,7 +10,7 @@ from io import BytesIO from minio import Minio from minio.error import NoSuchKey, NoSuchBucketPolicy, ResponseError -from pprint import pprint +from pprint import pprint # noqa: F401 from swagger_server.util import ApiError from tarfile import TarFile from urllib3 import Timeout @@ -18,10 +18,12 @@ _namespace = os.environ.get("POD_NAMESPACE", "kubeflow") -_host = os.environ.get("MINIO_SERVICE_SERVICE_HOST", "minio-service.%s.svc.cluster.local" % _namespace) +_host = os.environ.get( + "MINIO_SERVICE_SERVICE_HOST", "minio-service.%s.svc.cluster.local" % _namespace +) _port = os.environ.get("MINIO_SERVICE_SERVICE_PORT", "9000") -_access_key = 'minio' -_secret_key = 'minio123' +_access_key = "minio" +_secret_key = "minio123" _bucket_policy_sid = "AllowPublicReadAccess" @@ -30,16 +32,15 @@ "Action": ["s3:GetObject"], "Effect": "Allow", "Principal": {"AWS": ["*"]}, - "Resource": [] -} -_bucket_policy_template = { - "Version": "2012-10-17", - "Statement": [_bucket_policy_stmt] + "Resource": [], } +_bucket_policy_template = {"Version": "2012-10-17", "Statement": [_bucket_policy_stmt]} def _get_minio_client(timeout=None): - client = Minio(f"{_host}:{_port}", access_key=_access_key, secret_key=_secret_key, secure=False) + client = Minio( + f"{_host}:{_port}", access_key=_access_key, secret_key=_secret_key, secure=False + ) if timeout != Timeout.DEFAULT_TIMEOUT: client._http.connection_pool_kw["timeout"] = timeout @@ -53,7 +54,13 @@ def health_check(): return True -def store_file(bucket_name, prefix, file_name, file_content, content_type="application/octet-stream") -> str: +def store_file( + bucket_name, + prefix, + file_name, + file_content, + content_type="application/octet-stream", +) -> str: client = _get_minio_client() f = tempfile.TemporaryFile() f.write(file_content) @@ -76,7 +83,9 @@ def store_tgz(bucket_name, prefix, tgz_file: FileStorage): if file_ext in [".yaml", ".yml", ".md", ".py"]: object_name = f"{prefix.rstrip('/')}/{member.name}" f = tar.extractfile(member) - client.put_object(bucket_name, object_name, f, f.raw.size, "text/plain") # f.read() + client.put_object( + bucket_name, object_name, f, f.raw.size, "text/plain" + ) # f.read() f.close() tar.close() @@ -85,7 +94,9 @@ def store_tgz(bucket_name, prefix, tgz_file: FileStorage): return True -def extract_yaml_from_tarfile(uploadfile: FileStorage, filename_filter: str = "", reset_after_read=False) -> str: +def extract_yaml_from_tarfile( + uploadfile: FileStorage, filename_filter: str = "", reset_after_read=False +) -> str: tar = tarfile.open(fileobj=uploadfile.stream, mode="r:gz") for member in tar.getmembers(): @@ -104,7 +115,9 @@ def extract_yaml_from_tarfile(uploadfile: FileStorage, filename_filter: str = "" yaml_file_content = f.read() if reset_after_read: - uploadfile.stream.seek(0) # reset the upload file stream, we might need to re-read later + uploadfile.stream.seek( + 0 + ) # reset the upload file stream, we might need to re-read later f.close() tar.close() @@ -113,7 +126,9 @@ def extract_yaml_from_tarfile(uploadfile: FileStorage, filename_filter: str = "" return None -def create_tarfile(bucket_name: str, prefix: str, file_extensions: [str], keep_open=False) -> (TarFile, BytesIO): +def create_tarfile( + bucket_name: str, prefix: str, file_extensions: [str], keep_open=False +) -> (TarFile, BytesIO): client = _get_minio_client() objects = client.list_objects(bucket_name, prefix=prefix, recursive=True) @@ -148,10 +163,12 @@ def get_file_content_and_url(bucket_name, prefix, file_name) -> (str, str): object_name = f"{prefix.rstrip('/')}/{file_name}" file_content = client.get_object(bucket_name, object_name) object_url = f"http://{_host}:{_port}/{bucket_name}/{object_name}" - return file_content.data.decode('utf-8'), object_url + return file_content.data.decode("utf-8"), object_url -def retrieve_file_content_and_url(bucket_name, prefix, file_extensions: [str], file_name_filter="") -> [(str, str)]: +def retrieve_file_content_and_url( + bucket_name, prefix, file_extensions: [str], file_name_filter="" +) -> [(str, str)]: client = _get_minio_client() objects = client.list_objects(bucket_name, prefix=prefix, recursive=True) @@ -163,14 +180,18 @@ def retrieve_file_content_and_url(bucket_name, prefix, file_extensions: [str], f if file_ext in file_extensions and file_name_filter in o.object_name: file_content = client.get_object(bucket_name, o.object_name) object_url = f"http://{_host}:{_port}/{bucket_name}/{o.object_name}" - files_w_url.append((file_content.data.decode('utf-8'), object_url)) + files_w_url.append((file_content.data.decode("utf-8"), object_url)) return files_w_url -def retrieve_file_content(bucket_name, prefix, file_extensions: [str], file_name_filter: str = ""): +def retrieve_file_content( + bucket_name, prefix, file_extensions: [str], file_name_filter: str = "" +): - files_w_url = retrieve_file_content_and_url(bucket_name, prefix, file_extensions, file_name_filter) + files_w_url = retrieve_file_content_and_url( + bucket_name, prefix, file_extensions, file_name_filter + ) if files_w_url: (file_content, url) = files_w_url[0] # TODO: return first result only? @@ -179,9 +200,13 @@ def retrieve_file_content(bucket_name, prefix, file_extensions: [str], file_name return None -def get_object_url(bucket_name, prefix, file_extensions: [str], file_name_filter: str = ""): +def get_object_url( + bucket_name, prefix, file_extensions: [str], file_name_filter: str = "" +): - files_w_url = retrieve_file_content_and_url(bucket_name, prefix, file_extensions, file_name_filter) + files_w_url = retrieve_file_content_and_url( + bucket_name, prefix, file_extensions, file_name_filter + ) if files_w_url: (file_content, url) = files_w_url[0] # TODO: return first result only? @@ -210,7 +235,9 @@ def delete_objects(bucket_name, prefix): objects = client.list_objects(bucket_name, prefix=prefix, recursive=True) object_names = [obj.object_name for obj in objects if not obj.is_dir] maybe_errors = client.remove_objects(bucket_name, object_names) - actual_errors = [(e.object_name, e.error_code, e.error_message) for e in maybe_errors] + actual_errors = [ + (e.object_name, e.error_code, e.error_message) for e in maybe_errors + ] if actual_errors: pprint(actual_errors) @@ -235,8 +262,9 @@ def _update_bucket_policy(bucket_name: str, prefix: str): except NoSuchBucketPolicy: bucket_policy = dict(_bucket_policy_template) - getobject_stmts = [s for s in bucket_policy["Statement"] if s.get("Sid") == _bucket_policy_sid] or \ - [s for s in bucket_policy["Statement"] if "s3:GetObject" in s["Action"]] + getobject_stmts = [ + s for s in bucket_policy["Statement"] if s.get("Sid") == _bucket_policy_sid + ] or [s for s in bucket_policy["Statement"] if "s3:GetObject" in s["Action"]] if not getobject_stmts: bucket_policy["Statement"].append(_bucket_policy_stmt) @@ -246,7 +274,9 @@ def _update_bucket_policy(bucket_name: str, prefix: str): new_resource = f"arn:aws:s3:::{bucket_name}/{prefix}" - if new_resource not in resources and not any([r.strip("*") in new_resource for r in resources]): + if new_resource not in resources and not any( + [r.strip("*") in new_resource for r in resources] + ): resources.append(new_resource) new_policy_str = json.dumps(bucket_policy) @@ -256,9 +286,9 @@ def _update_bucket_policy(bucket_name: str, prefix: str): except ResponseError as e: - if e.code == 'XMinioPolicyNesting': + if e.code == "XMinioPolicyNesting": raise ApiError( f"{e.message.split('.')[0]}." f" New policy: '{new_policy_str}'." - f" Existing policy: '{client.get_bucket_policy(bucket_name)}'") - + f" Existing policy: '{client.get_bucket_policy(bucket_name)}'" + ) diff --git a/api/server/swagger_server/data_access/mysql_client.py b/api/server/swagger_server/data_access/mysql_client.py index 0f90689c..402c2ad6 100644 --- a/api/server/swagger_server/data_access/mysql_client.py +++ b/api/server/swagger_server/data_access/mysql_client.py @@ -1,5 +1,5 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 import inflection @@ -11,11 +11,11 @@ from kfp_tekton.compiler._k8s_helper import sanitize_k8s_name from mysql.connector import connect, errorcode, Error from mysql.connector.errors import IntegrityError -from os import environ as env +from os import environ as env # noqa: F401 from random import choice -from string import ascii_letters, digits, hexdigits +from string import ascii_letters, digits, hexdigits # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models import * # required for dynamic Api ...Extension class loading during View-table creation +from swagger_server.models import * # noqa: F403 required for dynamic Api ...Extension class loading during View-table creation from swagger_server.util import _deserialize, ApiError from typing import List @@ -23,20 +23,20 @@ _namespace = env.get("POD_NAMESPACE", "kubeflow") _host = env.get("MYSQL_SERVICE_HOST", "mysql.%s.svc.cluster.local" % _namespace) _port = env.get("MYSQL_SERVICE_PORT", "3306") -_database = 'mlpipeline' -_user = 'root' +_database = "mlpipeline" +_user = "root" existing_tables = dict() # map Python data types of the Swagger model object's attributes to MySQL column types type_map = { - str: 'varchar(255)', - int: 'int(11)', - bool: 'tinyint(1)', - list: 'longtext', - dict: 'longtext', - Model: 'longtext', - datetime: 'bigint(20)' + str: "varchar(255)", + int: "int(11)", + bool: "tinyint(1)", + list: "longtext", + dict: "longtext", + Model: "longtext", + datetime: "bigint(20)", } # some attributes do not comply to the defaults in the type_map (KFP idiosyncrasy) custom_col_types = { @@ -49,35 +49,42 @@ }, } # add custom column types for sub-classes of ApiAsset -for asset_type in ApiAsset.__subclasses__(): +for asset_type in ApiAsset.__subclasses__(): # noqa: F405 custom_col_types[asset_type.__name__] = custom_col_types["ApiAsset"] # add custom column types for sub-class(es) of ApiPipeline custom_col_types["ApiPipelineExtended"] = custom_col_types["ApiPipeline"] # some Swagger attributes names have special MySQL column names (KFP idiosyncrasy) -attribute_name_to_column_name = { - 'id': 'UUID', - 'created_at': 'CreatedAtInSec' -} -column_name_to_attribute_name = {v: k for (k, v) in attribute_name_to_column_name.items()} # Note: overrides duplicates +attribute_name_to_column_name = {"id": "UUID", "created_at": "CreatedAtInSec"} +column_name_to_attribute_name = { + v: k for (k, v) in attribute_name_to_column_name.items() +} # Note: overrides duplicates ############################################################################## # methods to convert between Swagger and MySQL ############################################################################## -def _convert_value_to_mysql(value, python_type: type, mysql_type_override: str = None, quote_str=False): + +def _convert_value_to_mysql( + value, python_type: type, mysql_type_override: str = None, quote_str=False +): # turn child attributes of type swagger._base_model.Model into dicts def to_dict(v): return v.to_dict() if hasattr(v, "to_dict") else v - if type(python_type) == typing._GenericAlias: # or str(python_type).startswith("typing."): + if ( + type(python_type) == typing._GenericAlias + ): # or str(python_type).startswith("typing."): python_type = eval(python_type._name.lower()) - if value and not issubclass(type(value), python_type) \ - and not (isinstance(value, dict) and issubclass(python_type, Model)): + if ( + value + and not issubclass(type(value), python_type) + and not (isinstance(value, dict) and issubclass(python_type, Model)) + ): err_msg = f"The type '{type(value)}' does not match expected target type '{python_type}' for value '{value}'" raise ApiError(err_msg, 422) @@ -99,8 +106,14 @@ def to_dict(v): elif python_type == list: # or isinstance(value, list): mysql_value = json.dumps(list(map(to_dict, value))) - elif python_type == dict or issubclass(python_type, Model) and isinstance(value, dict): - mysql_value = json.dumps(dict(map(lambda item: (item[0], to_dict(item[1])), value.items()))) + elif ( + python_type == dict + or issubclass(python_type, Model) + and isinstance(value, dict) + ): + mysql_value = json.dumps( + dict(map(lambda item: (item[0], to_dict(item[1])), value.items())) + ) elif python_type == datetime: # or isinstance(value, datetime): mysql_value = int(value.timestamp()) @@ -122,9 +135,10 @@ def _convert_value_to_python(value, target_type: type): return datetime.fromtimestamp(value) elif isinstance(value, str) and ( - type(target_type) == typing._GenericAlias or issubclass(target_type, Model)): + type(target_type) == typing._GenericAlias or issubclass(target_type, Model) + ): - json_dict = json.loads(value or '{}') + json_dict = json.loads(value or "{}") swaggered_value = _deserialize(json_dict, target_type) return swaggered_value @@ -134,14 +148,16 @@ def _convert_value_to_python(value, target_type: type): def _convert_attr_name_to_col_name(swagger_attr_name: str): - return attribute_name_to_column_name.get(swagger_attr_name) \ - or inflection.camelize(swagger_attr_name) + return attribute_name_to_column_name.get(swagger_attr_name) or inflection.camelize( + swagger_attr_name + ) def _convert_col_name_to_attr_name(mysql_column_name: str): - return column_name_to_attribute_name.get(mysql_column_name) \ - or inflection.underscore(mysql_column_name) + return column_name_to_attribute_name.get( + mysql_column_name + ) or inflection.underscore(mysql_column_name) def _get_table_name(swagger_object_or_class) -> str: @@ -163,22 +179,25 @@ def _get_table_name(swagger_object_or_class) -> str: # general helper methods ############################################################################## + def generate_id(name: str = None, length: int = 36) -> str: if name: # return name.lower().replace(" ", "-").replace("---", "-").replace("-–-", "–") return sanitize_k8s_name(name) else: # return ''.join([choice(ascii_letters + digits + '-') for n in range(length)]) - return ''.join([choice(hexdigits) for n in range(length)]).lower() + return "".join([choice(hexdigits) for n in range(length)]).lower() ############################################################################## # helper methods to create MySQL tables ############################################################################## + def _get_column_type(swagger_field_name, swagger_field_type, swagger_class) -> str: - return custom_col_types.get(swagger_class.__name__, {}).get(swagger_field_name) or \ - _get_mysql_type_declaration(swagger_field_type) + return custom_col_types.get(swagger_class.__name__, {}).get( + swagger_field_name + ) or _get_mysql_type_declaration(swagger_field_type) def _get_mysql_type_declaration(python_class_or_type) -> str: @@ -193,19 +212,25 @@ def _get_mysql_type_declaration(python_class_or_type) -> str: if clazz in type_map: return type_map[clazz] - elif isinstance(python_class_or_type, Model) or issubclass(python_class_or_type, Model): + elif isinstance(python_class_or_type, Model) or issubclass( + python_class_or_type, Model + ): clazz = Model if clazz in type_map: return type_map[clazz] - raise ValueError(f"Cannot find MySQL data type for Python type {python_class_or_type}") + raise ValueError( + f"Cannot find MySQL data type for Python type {python_class_or_type}" + ) def _get_mysql_default_value_declaration(default_value): if default_value: - raise ValueError("DEFAULT value not implemented for MySQL CREATE TABLE statement," - f" default: '{default_value}'") + raise ValueError( + "DEFAULT value not implemented for MySQL CREATE TABLE statement," + f" default: '{default_value}'" + ) # TODO: generate MySQL default value declaration return default_value or "NOT NULL" @@ -240,7 +265,9 @@ def _get_create_table_statement(swagger_class) -> str: create_table_stmt.append(f" PRIMARY KEY (`{id_field_name}`)") else: - raise ValueError("CREATE TABLE statement requires PRIMARY KEY field. Expected 'id'") + raise ValueError( + "CREATE TABLE statement requires PRIMARY KEY field. Expected 'id'" + ) if "name" in sig.parameters.keys(): name_field = _convert_attr_name_to_col_name("name") @@ -262,24 +289,28 @@ def _get_create_view_statement(swagger_class) -> str: base_table_name = view_name.replace("_extended", "") extension_table_name = view_name.replace("s_extended", "_extensions") - extension_swagger_class = eval(swagger_class.__name__.replace("Extended", "Extension")) + extension_swagger_class = eval( + swagger_class.__name__.replace("Extended", "Extension") + ) ext_sig = inspect.signature(extension_swagger_class.__init__) b_id_col_name = _convert_attr_name_to_col_name("id") e_id_col_name = _convert_attr_name_to_col_name("id") - e_non_id_col_names = [_convert_attr_name_to_col_name(p.name) - for _, p in ext_sig.parameters.items() - if p.name not in ["id", "self"]] + e_non_id_col_names = [ + _convert_attr_name_to_col_name(p.name) + for _, p in ext_sig.parameters.items() + if p.name not in ["id", "self"] + ] e_non_id_col_list = ", ".join([f"e.`{cn}`" for cn in e_non_id_col_names]) create_view_stmt = f""" - CREATE VIEW `{view_name}` AS + CREATE VIEW `{view_name}` AS SELECT b.*, {e_non_id_col_list} FROM `{base_table_name}` AS b - LEFT OUTER JOIN `{extension_table_name}` AS e + LEFT OUTER JOIN `{extension_table_name}` AS e ON b.`{b_id_col_name}`=e.`{e_id_col_name}` """ @@ -292,6 +323,7 @@ def _get_create_view_statement(swagger_class) -> str: # helper methods to create SQL (query) statement ############################################################################## + def _get_where_clause(swagger_class, filter_dict=dict()) -> str: if not filter_dict: @@ -304,7 +336,9 @@ def _get_where_clause(swagger_class, filter_dict=dict()) -> str: for attribute_name, attribute_value in filter_dict.items(): if attribute_name not in sig.parameters.keys(): - raise ValueError(f"{swagger_class} does not have an '{attribute_name}' attribute.") + raise ValueError( + f"{swagger_class} does not have an '{attribute_name}' attribute." + ) attribute_type = sig.parameters[attribute_name].annotation column_type = _get_column_type(attribute_name, attribute_type, swagger_class) @@ -312,7 +346,9 @@ def _get_where_clause(swagger_class, filter_dict=dict()) -> str: if column_type == "json" and type(attribute_value) == dict: for key, value in attribute_value.items(): - predicates.append(f"json_contains(json_extract({column_name}, '$.{key}'), '{json.dumps(value)}')") + predicates.append( + f"json_contains(json_extract({column_name}, '$.{key}'), '{json.dumps(value)}')" + ) # where json_contains(json_extract(FilterCategories, '$.platform'), '"kubernetes"') # where json_contains(json_extract(FilterCategories, '$.platform'), '["kubernetes", "kfserving"]') @@ -343,7 +379,9 @@ def _get_where_clause(swagger_class, filter_dict=dict()) -> str: else: # CAUTION: assuming everything else is string type - column_value = _convert_value_to_mysql(attribute_value, attribute_type, quote_str=True) + column_value = _convert_value_to_mysql( + attribute_value, attribute_type, quote_str=True + ) # if type(column_value) == str: # quote_str=True # column_value = f"'{column_value}'" @@ -390,12 +428,21 @@ def _get_limit_clause(count, offset): # methods to connect to MySQL and execute db operations ############################################################################## + def _get_connection(timeout: int = 10): - return connect(host=_host, port=_port, user=_user, database=_database, connection_timeout=timeout) + return connect( + host=_host, + port=_port, + user=_user, + database=_database, + connection_timeout=timeout, + ) -def _verify_or_create_table(table_name: str, swagger_class_or_object, validate_schema=True) -> bool: +def _verify_or_create_table( + table_name: str, swagger_class_or_object, validate_schema=True +) -> bool: if table_name not in existing_tables: @@ -413,17 +460,25 @@ def _verify_or_create_table(table_name: str, swagger_class_or_object, validate_s if not table_exists: if swagger_class.__name__.endswith("Extended"): # first, create the table that is being "extended" (only required if KFP did not create it) - base_swagger_class = eval(swagger_class.__name__.replace("Extended", "")) + base_swagger_class = eval( + swagger_class.__name__.replace("Extended", "") + ) base_table_name = _get_table_name(base_swagger_class) create_table_stmt = _get_create_table_statement(base_swagger_class) - base_table_created = _run_create_table_statement(base_table_name, create_table_stmt) + base_table_created = _run_create_table_statement( + base_table_name, create_table_stmt + ) existing_tables[base_table_name] = base_table_created # second, create the table with the additional columns - extension_swagger_class = eval(swagger_class.__name__.replace("Extended", "Extension")) + extension_swagger_class = eval( + swagger_class.__name__.replace("Extended", "Extension") + ) extension_table_name = _get_table_name(extension_swagger_class) create_table_stmt = _get_create_table_statement(extension_swagger_class) - ext_table_created = _run_create_table_statement(extension_table_name, create_table_stmt) + ext_table_created = _run_create_table_statement( + extension_table_name, create_table_stmt + ) existing_tables[extension_table_name] = ext_table_created # third, create the table-view that extends the base table with the additional columns from the ext table @@ -432,7 +487,9 @@ def _verify_or_create_table(table_name: str, swagger_class_or_object, validate_s existing_tables[table_name] = view_created else: create_table_stmt = _get_create_table_statement(swagger_class) - table_created = _run_create_table_statement(table_name, create_table_stmt) + table_created = _run_create_table_statement( + table_name, create_table_stmt + ) existing_tables[table_name] = table_created return True @@ -454,9 +511,11 @@ def _validate_schema(table_name: str, swagger_class): swagger_columns_w_type.append((col_name, col_type)) - query = f"SELECT COLUMN_NAME, SUBSTR(COLUMN_TYPE,1,64) as COLUMN_TYPE " \ - f" FROM INFORMATION_SCHEMA.COLUMNS " \ - f" WHERE TABLE_SCHEMA = '{_database}' AND TABLE_NAME = '{table_name}'" + query = ( + f"SELECT COLUMN_NAME, SUBSTR(COLUMN_TYPE,1,64) as COLUMN_TYPE " + f" FROM INFORMATION_SCHEMA.COLUMNS " + f" WHERE TABLE_SCHEMA = '{_database}' AND TABLE_NAME = '{table_name}'" + ) cnx = _get_connection() cursor = cnx.cursor(buffered=True) @@ -476,7 +535,9 @@ def _validate_schema(table_name: str, swagger_class): cursor.close() cnx.close() - if table_columns_w_type and set(table_columns_w_type) != set(swagger_columns_w_type): + if table_columns_w_type and set(table_columns_w_type) != set( + swagger_columns_w_type + ): if isinstance(swagger_class, Model): swagger_class = type(swagger_class) @@ -484,13 +545,15 @@ def _validate_schema(table_name: str, swagger_class): cols_found = "\n - ".join([f"'{n}' {t}" for n, t in table_columns_w_type]) cols_expect = "\n - ".join([f"'{n}' {t}" for n, t in swagger_columns_w_type]) - err_msg = f"The MySQL table '{_database}.{table_name}' does not match Swagger" \ - f" class '{swagger_class.__name__}'.\n" \ - f" Found table with columns:\n" \ - f" - {cols_found}.\n" \ - f" Expected table with columns:\n" \ - f" - {cols_expect}.\n" \ + err_msg = ( + f"The MySQL table '{_database}.{table_name}' does not match Swagger" + f" class '{swagger_class.__name__}'.\n" + f" Found table with columns:\n" + f" - {cols_found}.\n" + f" Expected table with columns:\n" + f" - {cols_expect}.\n" f" Delete and recreate the table by calling the API endpoint 'DELETE /{table_name}/*'" + ) raise ApiError(err_msg) @@ -504,7 +567,7 @@ def _run_create_table_statement(table_name, table_description: tuple) -> bool: cursor = cnx.cursor(buffered=True) try: - print(f"Creating table '{table_name}': ", end='') + print(f"Creating table '{table_name}': ", end="") cursor.execute(table_description) cnx.commit() print("OK") @@ -530,6 +593,7 @@ def _run_create_table_statement(table_name, table_description: tuple) -> bool: # _host = "123.34.13.12" + def health_check(): cnx = _get_connection(timeout=1) cnx.connect() @@ -544,15 +608,17 @@ def num_rows(swagger_class: type) -> int: _verify_or_create_table(table_name, swagger_class) - query = f"SELECT TABLE_ROWS FROM INFORMATION_SCHEMA.TABLES " \ - f"WHERE TABLE_SCHEMA = '{_database}' AND TABLE_NAME = '{table_name}'" + query = ( + f"SELECT TABLE_ROWS FROM INFORMATION_SCHEMA.TABLES " + f"WHERE TABLE_SCHEMA = '{_database}' AND TABLE_NAME = '{table_name}'" + ) cnx = _get_connection() cursor = cnx.cursor() try: cursor.execute(query) - num_records, = cursor.fetchone() + (num_records,) = cursor.fetchone() except Error as err: print(err.msg) @@ -575,7 +641,9 @@ def store_data(swagger_object: Model) -> str: # TODO: remove generate_id() calls in controller_impl methods, do it here if "id" in swagger_fields and not swagger_object.id: - swagger_object.id = generate_id(swagger_object.name if "name" in swagger_fields else None) + swagger_object.id = generate_id( + swagger_object.name if "name" in swagger_fields else None + ) # TODO: remove creating a new data in controller_impl methods, do it here if "created_at" in swagger_fields and not swagger_object.created_at: @@ -594,11 +662,13 @@ def store_data(swagger_object: Model) -> str: column_values.append(col_value) column_names_str = ", ".join(column_names) - values_list_str = ('%s,' * len(column_values)).rstrip(',') + values_list_str = ("%s," * len(column_values)).rstrip(",") - insert_stmt = (f"INSERT INTO {table_name} " - f"({column_names_str}) " - f"VALUES ({values_list_str})") + insert_stmt = ( + f"INSERT INTO {table_name} " + f"({column_names_str}) " + f"VALUES ({values_list_str})" + ) cnx = _get_connection() cnx.autocommit = True @@ -635,13 +705,17 @@ def update_multiple(swagger_class: type, ids: List[str], attribute_name: str, va sig = inspect.signature(swagger_class.__init__) if attribute_name not in sig.parameters.keys(): - raise ValueError(f"{swagger_class} does not have an attribute with name '{attribute_name}'.") + raise ValueError( + f"{swagger_class} does not have an attribute with name '{attribute_name}'." + ) if ids and "id" not in sig.parameters.keys(): raise ValueError(f"{swagger_class} does not have an 'id' attribute.") update_column_name = _convert_attr_name_to_col_name(attribute_name) - update_column_value = _convert_value_to_mysql(value, sig.parameters.get(attribute_name).annotation, quote_str=False) + update_column_value = _convert_value_to_mysql( + value, sig.parameters.get(attribute_name).annotation, quote_str=False + ) # if type(update_column_value) == str: # update_column_value = f"'{update_column_value}'" @@ -688,7 +762,9 @@ def delete_data(swagger_class: type, id: str) -> bool: sig = inspect.signature(swagger_class.__init__) if not id: - raise ValueError(f"Must specify 'id' column value to delete row from table '{table_name}'") + raise ValueError( + f"Must specify 'id' column value to delete row from table '{table_name}'" + ) elif "id" not in sig.parameters.keys(): raise ValueError(f"{swagger_class} does not have an 'id' attribute.") @@ -733,7 +809,13 @@ def delete_data(swagger_class: type, id: str) -> bool: return True # TODO: determine return value -def load_data(swagger_class: type, filter_dict: dict = None, sort_by: str = None, count: int = 100, offset: int = 0) -> [Model]: +def load_data( + swagger_class: type, + filter_dict: dict = None, + sort_by: str = None, + count: int = 100, + offset: int = 0, +) -> [Model]: table_name = _get_table_name(swagger_class) @@ -755,18 +837,25 @@ def load_data(swagger_class: type, filter_dict: dict = None, sort_by: str = None try: cursor.execute(query) - swagger_attr_names = [_convert_col_name_to_attr_name(c) for c in cursor.column_names] + swagger_attr_names = [ + _convert_col_name_to_attr_name(c) for c in cursor.column_names + ] - assert set(swagger_attr_names) <= set(sig.parameters.keys()), \ - f"Mismatch between database schema and API spec for {table_name}. " \ - f"Expected columns: {[_convert_attr_name_to_col_name(k) for k in sig.parameters.keys() if k != 'self']}. " \ + assert set(swagger_attr_names) <= set(sig.parameters.keys()), ( + f"Mismatch between database schema and API spec for {table_name}. " + f"Expected columns: {[_convert_attr_name_to_col_name(k) for k in sig.parameters.keys() if k != 'self']}. " f"Database columns: {cursor.column_names}" + ) - swagger_attr_types = [sig.parameters.get(a).annotation for a in swagger_attr_names] + swagger_attr_types = [ + sig.parameters.get(a).annotation for a in swagger_attr_names + ] for row_values in cursor: value_type_tuples = zip(list(row_values), swagger_attr_types) - swagger_attr_values = [_convert_value_to_python(v, t) for v, t in value_type_tuples] + swagger_attr_values = [ + _convert_value_to_python(v, t) for v, t in value_type_tuples + ] swagger_attr_dict = dict(zip(swagger_attr_names, swagger_attr_values)) swagger_object = swagger_class(**swagger_attr_dict) diff --git a/api/server/swagger_server/encoder.py b/api/server/swagger_server/encoder.py index a5d1d7a7..0156fd01 100644 --- a/api/server/swagger_server/encoder.py +++ b/api/server/swagger_server/encoder.py @@ -3,7 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 from connexion.apps.flask_app import FlaskJSONEncoder -import six +import six # noqa: F401 from swagger_server.models.base_model_ import Model diff --git a/api/server/swagger_server/gateways/kfserving_client.py b/api/server/swagger_server/gateways/kfserving_client.py index 0ff51efc..9abe68f8 100644 --- a/api/server/swagger_server/gateways/kfserving_client.py +++ b/api/server/swagger_server/gateways/kfserving_client.py @@ -1,8 +1,8 @@ # Copyright 2021 The MLX Contributors -# +# # SPDX-License-Identifier: Apache-2.0 -import yaml +import yaml # noqa: F401 from kubernetes import client, config @@ -12,7 +12,7 @@ def get_all_services(name=None, namespace=None, group=None, version=None, plural api = client.CustomObjectsApi() if not namespace: - namespace = 'default' + namespace = "default" if name is None: resource = api.list_namespaced_custom_object( @@ -32,19 +32,21 @@ def get_all_services(name=None, namespace=None, group=None, version=None, plural return resource -def post_service(inferenceservice=None, namespace=None, group=None, version=None, plural=None): +def post_service( + inferenceservice=None, namespace=None, group=None, version=None, plural=None +): config.load_incluster_config() - api = client.CustomObjectsApi() + api = client.CustomObjectsApi() service_dict = inferenceservice.to_dict() # Get resource information from the dict - version_split = service_dict['apiVersion'].split("/") + version_split = service_dict["apiVersion"].split("/") group = version_split[0] version = version_split[1] - plural = service_dict['kind'].lower() + "s" + plural = service_dict["kind"].lower() + "s" if not namespace: - namespace = service_dict['metadata'].get('namespace', 'default') + namespace = service_dict["metadata"].get("namespace", "default") # create the resource ns_obj = api.create_namespaced_custom_object( @@ -57,20 +59,22 @@ def post_service(inferenceservice=None, namespace=None, group=None, version=None return ns_obj -def from_client_upload_service(upload_file=None, namespace=None, group=None, version=None, plural=None): +def from_client_upload_service( + upload_file=None, namespace=None, group=None, version=None, plural=None +): config.load_incluster_config() api = client.CustomObjectsApi() yaml_object = yaml.safe_load(upload_file) # Get resource information from the yaml - name = yaml_object['metadata']['name'] - version_split = yaml_object['apiVersion'].split("/") + name = yaml_object["metadata"]["name"] + version_split = yaml_object["apiVersion"].split("/") group = version_split[0] version = version_split[1] - plural = yaml_object['kind'].lower() + "s" + plural = yaml_object["kind"].lower() + "s" if not namespace: - namespace = yaml_object['metadata'].get('namespace', 'default') + namespace = yaml_object["metadata"].get("namespace", "default") # create the resource ns_obj = api.create_namespaced_custom_object( @@ -80,4 +84,4 @@ def from_client_upload_service(upload_file=None, namespace=None, group=None, ver plural=plural, body=yaml_object, ) - return ns_obj \ No newline at end of file + return ns_obj diff --git a/api/server/swagger_server/gateways/kubeflow_pipeline_service.py b/api/server/swagger_server/gateways/kubeflow_pipeline_service.py index 93b3552a..df2d7745 100644 --- a/api/server/swagger_server/gateways/kubeflow_pipeline_service.py +++ b/api/server/swagger_server/gateways/kubeflow_pipeline_service.py @@ -6,7 +6,7 @@ import json import os import re -import yaml +import yaml # noqa: F401 from datetime import datetime @@ -17,15 +17,21 @@ from kfp_server_api import ApiPipeline as KfpPipeline from kfp_server_api.rest import ApiException as PipelineApiException -from os import environ as env +from os import environ as env # noqa: F401 from os.path import abspath, join, dirname from string import Template from swagger_server.data_access.mysql_client import generate_id -from swagger_server.data_access.minio_client import get_object_url,\ - create_tarfile, store_file, _host as minio_host, _port as minio_port,\ - _access_key as minio_access_key, _secret_key as minio_secret_key,\ - retrieve_file_content +from swagger_server.data_access.minio_client import ( + get_object_url, + create_tarfile, + store_file, + _host as minio_host, + _port as minio_port, + _access_key as minio_access_key, + _secret_key as minio_secret_key, + retrieve_file_content, +) from swagger_server.models import ApiDataset from swagger_server.models.api_component import ApiComponent from swagger_server.models.api_model import ApiModel @@ -41,20 +47,28 @@ CODE_TEMPLATE_DIR = abspath(join(dirname(__file__), "..", "code_templates")) _namespace = env.get("POD_NAMESPACE", "kubeflow") -_host = env.get("ML_PIPELINE_SERVICE_HOST", "ml-pipeline.%s.svc.cluster.local" % _namespace) +_host = env.get( + "ML_PIPELINE_SERVICE_HOST", "ml-pipeline.%s.svc.cluster.local" % _namespace +) _port = env.get("ML_PIPELINE_SERVICE_PORT", "8888") _api_base_path = env.get("ML_PIPELINE_SERVICE_API_BASE_PATH", "") -_pipeline_service_url = env.get("ML_PIPELINE_SERVICE_URL", f"{_host}:{_port}/{_api_base_path}".rstrip("/")) +_pipeline_service_url = env.get( + "ML_PIPELINE_SERVICE_URL", f"{_host}:{_port}/{_api_base_path}".rstrip("/") +) -def upload_pipeline_to_kfp(uploadfile: str, name: str = None, description: str = None) -> ApiPipeline: +def upload_pipeline_to_kfp( + uploadfile: str, name: str = None, description: str = None +) -> ApiPipeline: kfp_client = KfpClient() try: - kfp_pipeline: KfpPipeline = kfp_client.upload_pipeline(pipeline_package_path=uploadfile, - pipeline_name=name, - description=description) + kfp_pipeline: KfpPipeline = kfp_client.upload_pipeline( + pipeline_package_path=uploadfile, + pipeline_name=name, + description=description, + ) api_pipeline: ApiPipeline = ApiPipeline.from_dict(kfp_pipeline.to_dict()) api_pipeline.status = kfp_pipeline.error return api_pipeline @@ -62,11 +76,15 @@ def upload_pipeline_to_kfp(uploadfile: str, name: str = None, description: str = except PipelineApiException as e: kfp_host = _pipeline_service_url - print(f"Error calling PipelineServiceApi ({kfp_host}) -> upload_pipeline(name='{name}'): {e}") + print( + f"Error calling PipelineServiceApi ({kfp_host}) -> upload_pipeline(name='{name}'): {e}" + ) error_body = json.loads(e.body) or {"error_message": str(e)} error_msg = error_body["error_message"] - status_code = 409 if "already exist. Please specify a new name" in error_msg else e.status + status_code = ( + 409 if "already exist. Please specify a new name" in error_msg else e.status + ) raise ApiError(error_msg, status_code) @@ -87,8 +105,13 @@ def delete_kfp_pipeline(pipeline_id: str): except PipelineApiException as e: kfp_host = api_instance.api_client.configuration.host - print(f"Exception when calling PipelineServiceApi ({kfp_host}) -> delete_pipeline: %s\n" % e) - raise ApiError(message=f"{e.body}\nKFP URL: {kfp_host}", http_status_code=e.status or 422) + print( + f"Exception when calling PipelineServiceApi ({kfp_host}) -> delete_pipeline: %s\n" + % e + ) + raise ApiError( + message=f"{e.body}\nKFP URL: {kfp_host}", http_status_code=e.status or 422 + ) def quote_string_value(value): @@ -106,7 +129,9 @@ def generate_method_arg_from_parameter(parameter): if parameter.value or parameter.default: value = quote_string_value(parameter.value or parameter.default) arg = f"{param_name}={value}" - elif parameter.value == '' or parameter.default == '': # TODO: should empty string != None ? + elif ( + parameter.value == "" or parameter.default == "" + ): # TODO: should empty string != None ? arg = f"{param_name}=''" else: arg = param_name @@ -124,8 +149,12 @@ def generate_pipeline_method_args(parameters: [ApiParameter]) -> str: return ",\n ".join(args) -def generate_component_run_script(component: ApiComponent, component_template_url, run_parameters=dict(), - run_name: str = None): +def generate_component_run_script( + component: ApiComponent, + component_template_url, + run_parameters=dict(), + run_name: str = None, +): name = component.name + " " + generate_id(length=4) description = component.description.strip() @@ -134,19 +163,25 @@ def generate_component_run_script(component: ApiComponent, component_template_ur parameter_names = ",".join([sanitize(p.name) for p in component.parameters]) - parameter_dict = json.dumps({sanitize(p.name): run_parameters.get(p.name) or p.default or "" - for p in component.parameters}, - indent=4).replace('"', "'") + parameter_dict = json.dumps( + { + sanitize(p.name): run_parameters.get(p.name) or p.default or "" + for p in component.parameters + }, + indent=4, + ).replace('"', "'") - pipeline_server = "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + pipeline_server = ( + "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + ) - run_name = (run_name or "").replace("'", "\"") or component.name + run_name = (run_name or "").replace("'", '"') or component.name substitutions = dict(locals()) template_file = f"run_component.TEMPLATE.py" - with open(join(CODE_TEMPLATE_DIR, template_file), 'r') as f: + with open(join(CODE_TEMPLATE_DIR, template_file), "r") as f: template_raw = f.read() template_rendered = Template(template_raw).substitute(substitutions) @@ -156,7 +191,9 @@ def generate_component_run_script(component: ApiComponent, component_template_ur return run_script -def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, hide_secrets=True): +def generate_custom_pipeline_function_body( + custom_pipeline: ApiPipelineCustom, hide_secrets=True +): function_body = """ from kfp import components @@ -176,22 +213,35 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h if task.artifact_type == "notebook": component_s3_prefix = f"components/jupyter/" - notebook_url = get_object_url(bucket_name="mlpipeline", - prefix=f"notebooks/{task.artifact_id}/", - file_extensions=[".ipynb"]) + notebook_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"notebooks/{task.artifact_id}/", + file_extensions=[".ipynb"], + ) if not notebook_url: raise ApiError(f"Could not find notebook '{task.artifact_id}'") - task_parameters = list(task.arguments.parameters) if task.arguments and task.arguments.parameters else [] + task_parameters = ( + list(task.arguments.parameters) + if task.arguments and task.arguments.parameters + else [] + ) for p in task_parameters: if type(p.value) == str and p.value.startswith("{{inputs.parameters."): - raise ApiError("Referencing '{{inputs.parameters.*}}' is not supported for notebook parameter" - f" values: {task.to_dict()}", 422) - - notebook_parameters = {p.name: p.value or p.default for p in task_parameters} - notebook_parameters_str = json.dumps(notebook_parameters) if notebook_parameters else "" + raise ApiError( + "Referencing '{{inputs.parameters.*}}' is not supported for notebook parameter" + f" values: {task.to_dict()}", + 422, + ) + + notebook_parameters = { + p.name: p.value or p.default for p in task_parameters + } + notebook_parameters_str = ( + json.dumps(notebook_parameters) if notebook_parameters else "" + ) jupyter_component_parameters = { "notebook_url": notebook_url, @@ -201,23 +251,25 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h "bucket_name": "", "object_name": "", "access_key": "", - "secret_access_key": "" + "secret_access_key": "", } if not hide_secrets: output_folder = f"notebooks/{task.artifact_id}/runs/{datetime.now().strftime('%Y%m%d-%H%M%S')}" notebook_file_name = notebook_url.split("/")[-1] - output_file_name = notebook_file_name.replace(r'.ipynb', '_out.ipynb') + output_file_name = notebook_file_name.replace(r".ipynb", "_out.ipynb") output_file_path = f"{output_folder}/{output_file_name}" output_bucket = "mlpipeline" - jupyter_component_parameters.update({ - "endpoint_url": "minio-service:9000", # f"{minio_host}:{minio_port}", - "bucket_name": output_bucket, - "object_name": output_file_path, - "access_key": minio_access_key, - "secret_access_key": minio_secret_key - }) + jupyter_component_parameters.update( + { + "endpoint_url": "minio-service:9000", # f"{minio_host}:{minio_port}", + "bucket_name": output_bucket, + "object_name": output_file_path, + "access_key": minio_access_key, + "secret_access_key": minio_secret_key, + } + ) for name, value in jupyter_component_parameters.items(): parameters.append(f"{name} = '{value}'") @@ -226,24 +278,42 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h component_s3_prefix = f"components/{task.artifact_id}/" # replace parameter values that reference pipeline input parameters {{inputs.parameters.parameter_name}} - task_parameters = list(task.arguments.parameters) if task.arguments and task.arguments.parameters else [] - - missing_parameter_values = [p.name for p in task_parameters - if not p.value and not p.default and p.description \ - and p.description.title().startswith("Required")] + task_parameters = ( + list(task.arguments.parameters) + if task.arguments and task.arguments.parameters + else [] + ) + + missing_parameter_values = [ + p.name + for p in task_parameters + if not p.value + and not p.default + and p.description + and p.description.title().startswith("Required") + ] if missing_parameter_values: - raise ApiError(f"Missing required task parameters {missing_parameter_values}", 422) + raise ApiError( + f"Missing required task parameters {missing_parameter_values}", 422 + ) for p in task_parameters: if type(p.value) == str and p.value.startswith("{{inputs.parameters."): - match = re.match(r"{{inputs.parameters.(?P\w+)}}", p.value) + match = re.match( + r"{{inputs.parameters.(?P\w+)}}", + p.value, + ) if not match: - raise ApiError(f"Cannot match pipeline input.parameter '{p.value}'", 422) + raise ApiError( + f"Cannot match pipeline input.parameter '{p.value}'", 422 + ) - pipeline_param_ref = match.groupdict().get("pipeline_parameter_name") + pipeline_param_ref = match.groupdict().get( + "pipeline_parameter_name" + ) parameters.append(f"{p.name} = {pipeline_param_ref}") else: @@ -251,14 +321,19 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h parameters.append(arg) else: - raise ApiError(f"Unknown or unsupported artifact_type '{task.artifact_type}':\n'{task}'", 422) + raise ApiError( + f"Unknown or unsupported artifact_type '{task.artifact_type}':\n'{task}'", + 422, + ) comp_name = "comp_" + re.sub(r"\W+", "_", task.name, flags=re.ASCII).lower() op_name = "op_" + re.sub(r"\W+", "_", task.name, flags=re.ASCII).lower() - template_url = get_object_url(bucket_name="mlpipeline", - prefix=component_s3_prefix, - file_extensions=[".yaml", ".yml"]) + template_url = get_object_url( + bucket_name="mlpipeline", + prefix=component_s3_prefix, + file_extensions=[".yaml", ".yml"], + ) if not template_url: raise ApiError(f"Could not find component template '{component_s3_prefix}'") @@ -267,7 +342,7 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h "comp_name": comp_name, "op_name": op_name, "template_url": template_url, - "component_args": ", ".join(parameters) + "component_args": ", ".join(parameters), } template_rendered = Template(component_template_raw).substitute(substitutions) function_body += template_rendered @@ -275,30 +350,48 @@ def generate_custom_pipeline_function_body(custom_pipeline: ApiPipelineCustom, h for task in custom_pipeline.dag.tasks: for required_task_name in task.dependencies or []: substitutions = { - "op_name": "op_" + re.sub(r"\W+", "_", task.name, flags=re.ASCII).lower(), - "required_op_name": "op_" + re.sub(r"\W+", "_", required_task_name, flags=re.ASCII).lower() + "op_name": "op_" + + re.sub(r"\W+", "_", task.name, flags=re.ASCII).lower(), + "required_op_name": "op_" + + re.sub(r"\W+", "_", required_task_name, flags=re.ASCII).lower(), } - template_rendered = Template(op_dependency_template_raw).substitute(substitutions) + template_rendered = Template(op_dependency_template_raw).substitute( + substitutions + ) function_body += template_rendered return function_body -def generate_custom_pipeline_run_script(custom_pipeline: ApiPipelineCustom, run_parameters=dict(), - run_name: str = None, hide_secrets=True): +def generate_custom_pipeline_run_script( + custom_pipeline: ApiPipelineCustom, + run_parameters=dict(), + run_name: str = None, + hide_secrets=True, +): name = custom_pipeline.name description = (custom_pipeline.description or "").strip() - pipeline_method_args = generate_pipeline_method_args(custom_pipeline.inputs.parameters) + pipeline_method_args = generate_pipeline_method_args( + custom_pipeline.inputs.parameters + ) - parameter_dict = json.dumps({p.name: run_parameters.get(p.name) or p.value or p.default # or "" - for p in custom_pipeline.inputs.parameters}, - indent=4).replace(': null', ': None') + parameter_dict = json.dumps( + { + p.name: run_parameters.get(p.name) or p.value or p.default # or "" + for p in custom_pipeline.inputs.parameters + }, + indent=4, + ).replace(": null", ": None") - pipeline_function_body = generate_custom_pipeline_function_body(custom_pipeline, hide_secrets) + pipeline_function_body = generate_custom_pipeline_function_body( + custom_pipeline, hide_secrets + ) - pipeline_server = "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + pipeline_server = ( + "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + ) run_name = run_name or custom_pipeline.name @@ -306,7 +399,7 @@ def generate_custom_pipeline_run_script(custom_pipeline: ApiPipelineCustom, run_ template_file = f"run_pipeline.TEMPLATE.py" - with open(join(CODE_TEMPLATE_DIR, template_file), 'r') as f: + with open(join(CODE_TEMPLATE_DIR, template_file), "r") as f: template_raw = f.read() template_rendered = Template(template_raw).substitute(substitutions) @@ -316,8 +409,13 @@ def generate_custom_pipeline_run_script(custom_pipeline: ApiPipelineCustom, run_ return run_script -def generate_dataset_run_script(dataset: ApiDataset, dataset_template_url, run_parameters=dict(), - run_name: str = None, fail_on_missing_prereqs=False): +def generate_dataset_run_script( + dataset: ApiDataset, + dataset_template_url, + run_parameters=dict(), + run_name: str = None, + fail_on_missing_prereqs=False, +): name = f"{dataset.name} ({generate_id(length=4)})" description = dataset.description.strip().replace("'", "\\'") @@ -326,8 +424,10 @@ def generate_dataset_run_script(dataset: ApiDataset, dataset_template_url, run_p # dataset_parameters = dataset.parameters # TODO: ApiParameters should not be defined here - dataset_parameters = [ApiParameter(name="action", default="create"), - ApiParameter(name="namespace", default=_namespace)] + dataset_parameters = [ + ApiParameter(name="action", default="create"), + ApiParameter(name="namespace", default=_namespace), + ] pipeline_method_args = generate_pipeline_method_args(dataset_parameters) @@ -336,22 +436,26 @@ def generate_dataset_run_script(dataset: ApiDataset, dataset_template_url, run_p # TODO: the action parameter is required by DLF-to-PVC op, so it should not be dynamically generated here parameter_dict = { "action": "create", - "namespace": run_parameters.get("namespace", _namespace) + "namespace": run_parameters.get("namespace", _namespace), } - # see component name at https://github.com/machine-learning-exchange/mlx/blob/main/components/component-samples/dax-to-dlf/component.yaml#L1 + # see component name at https://github.com/machine-learning-exchange/mlx/blob/main/components/component-samples/dax-to-dlf/component.yaml#L1 # noqa: E501 dax_to_dlf_component_id = generate_id(name="Generate Dataset Metadata") - # see component name at https://github.com/machine-learning-exchange/mlx/blob/main/components/component-samples/dlf/component.yaml#L1 + # see component name at https://github.com/machine-learning-exchange/mlx/blob/main/components/component-samples/dlf/component.yaml#L1 # noqa: E501 dlf_to_pvc_component_id = generate_id(name="Create Dataset Volume") - dax_to_dlf_component_url = get_object_url(bucket_name="mlpipeline", - prefix=f"components/{dax_to_dlf_component_id}/", - file_extensions=[".yaml"]) + dax_to_dlf_component_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"components/{dax_to_dlf_component_id}/", + file_extensions=[".yaml"], + ) - dlf_to_pvc_component_url = get_object_url(bucket_name="mlpipeline", - prefix=f"components/{dlf_to_pvc_component_id}/", - file_extensions=[".yaml"]) + dlf_to_pvc_component_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"components/{dlf_to_pvc_component_id}/", + file_extensions=[".yaml"], + ) if fail_on_missing_prereqs: @@ -363,15 +467,17 @@ def generate_dataset_run_script(dataset: ApiDataset, dataset_template_url, run_p namespace = run_parameters.get("namespace", _namespace) - pipeline_server = "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + pipeline_server = ( + "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + ) - run_name = (run_name or "").replace("'", "\"") or dataset.name + run_name = (run_name or "").replace("'", '"') or dataset.name substitutions = dict(locals()) template_file = f"run_dataset.TEMPLATE.py" - with open(join(CODE_TEMPLATE_DIR, template_file), 'r') as f: + with open(join(CODE_TEMPLATE_DIR, template_file), "r") as f: template_raw = f.read() template_rendered = Template(template_raw).substitute(substitutions) @@ -381,27 +487,43 @@ def generate_dataset_run_script(dataset: ApiDataset, dataset_template_url, run_p return run_script -def generate_model_run_script(model: ApiModel, pipeline_stage: str, execution_platform: str, - run_name: str = None, parameters=dict(), hide_secrets=True): - - if pipeline_stage == "serve" and model.servable_credentials_required or \ - pipeline_stage == "train" and model.trainable_credentials_required: - - template_file = f"{pipeline_stage}_{execution_platform.lower()}_w_credentials.TEMPLATE.py" +def generate_model_run_script( + model: ApiModel, + pipeline_stage: str, + execution_platform: str, + run_name: str = None, + parameters=dict(), + hide_secrets=True, +): + + if ( + pipeline_stage == "serve" + and model.servable_credentials_required + or pipeline_stage == "train" + and model.trainable_credentials_required + ): + + template_file = ( + f"{pipeline_stage}_{execution_platform.lower()}_w_credentials.TEMPLATE.py" + ) else: template_file = f"{pipeline_stage}_{execution_platform.lower()}.TEMPLATE.py" - with open(join(CODE_TEMPLATE_DIR, template_file), 'r') as f: + with open(join(CODE_TEMPLATE_DIR, template_file), "r") as f: template_raw = f.read() - pipeline_server = "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + pipeline_server = ( + "" if "POD_NAMESPACE" in os.environ else f"'{_pipeline_service_url}'" + ) substitutions = { "model_identifier": model.id, "pipeline_server": pipeline_server, # "model_name": "maintenance-model-pg", # TODO: generate_id(name=model.name), "run_name": run_name or model.id, - "generated_secret": "" if hide_secrets else f"secret-{generate_id(length=8).lower()}" + "generated_secret": "" + if hide_secrets + else f"secret-{generate_id(length=8).lower()}", } model_parameters = [] @@ -422,51 +544,65 @@ def generate_model_run_script(model: ApiModel, pipeline_stage: str, execution_pl return run_script -def generate_notebook_run_script(api_notebook: ApiNotebook, - parameters: dict = {}, - run_name: str = None, - hide_secrets: bool = True): +def generate_notebook_run_script( + api_notebook: ApiNotebook, + parameters: dict = {}, + run_name: str = None, + hide_secrets: bool = True, +): if "dataset_pvc" in parameters: template_file = "run_notebook_with_dataset.TEMPLATE.py" else: template_file = "run_notebook.TEMPLATE.py" - with open(join(CODE_TEMPLATE_DIR, template_file), 'r') as f: + with open(join(CODE_TEMPLATE_DIR, template_file), "r") as f: template_raw = f.read() notebook_file = api_notebook.url.split("/")[-1] - requirements_url = get_object_url(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_extensions=[".txt"], - file_name_filter="requirements") + requirements_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_extensions=[".txt"], + file_name_filter="requirements", + ) - cos_dependencies_archive_url = get_object_url(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_extensions=[".tar.gz"], - file_name_filter="elyra-dependencies-archive") + cos_dependencies_archive_url = get_object_url( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_extensions=[".tar.gz"], + file_name_filter="elyra-dependencies-archive", + ) if not cos_dependencies_archive_url: - tar, bytes_io = create_tarfile(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_extensions=[".ipynb"]) + tar, bytes_io = create_tarfile( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_extensions=[".ipynb"], + ) - cos_dependencies_archive_url = store_file(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_name="elyra-dependencies-archive.tar.gz", - file_content=bytes_io.getvalue()) + cos_dependencies_archive_url = store_file( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_name="elyra-dependencies-archive.tar.gz", + file_content=bytes_io.getvalue(), + ) cos_dependencies_archive = cos_dependencies_archive_url.split("/")[-1] # TODO: move this into a ApiNotebook.image as opposed to parsing yaml here - yaml_file_content = retrieve_file_content(bucket_name="mlpipeline", - prefix=f"notebooks/{api_notebook.id}/", - file_extensions=[".yaml", ".yml"]) + yaml_file_content = retrieve_file_content( + bucket_name="mlpipeline", + prefix=f"notebooks/{api_notebook.id}/", + file_extensions=[".yaml", ".yml"], + ) metadata_yaml = yaml.load(yaml_file_content, Loader=yaml.FullLoader) - image = metadata_yaml["implementation"]["github"].get("image", "tensorflow/tensorflow:latest") + image = metadata_yaml["implementation"]["github"].get( + "image", "tensorflow/tensorflow:latest" + ) # TODO: elyra-ai/kfp-notebook generates output notebook as: "-output.ipynb" # https://github.com/elyra-ai/kfp-notebook/blob/c8f1298/etc/docker-scripts/bootstrapper.py#L188-L190 @@ -477,7 +613,7 @@ def generate_notebook_run_script(api_notebook: ApiNotebook, # output_file_url = f"http://{minio_host}:{minio_port}/mlpipeline/{output_file_path}" # TODO: do we really need this url: - # client = TektonClient(${pipeline_server}) + # client = TektonClient(${pipeline_server}) # noqa: E999 # vs: # client = TektonClient() # ... kfp.Client can figure out the in-cluster IP and port automatically @@ -496,44 +632,63 @@ def generate_notebook_run_script(api_notebook: ApiNotebook, "requirements_url": requirements_url or "", "image": image, "pipeline_server": kfp_url, - "run_name": run_name or api_notebook.name + "run_name": run_name or api_notebook.name, } # TODO: make the `dataset_pvc` and `mount_path` parameters part of the Swagger spec? if "dataset_pvc" in parameters: - substitutions.update({ - "dataset_pvc": parameters["dataset_pvc"], - "mount_path": parameters.get("mount_path", "/tmp/data") - }) + substitutions.update( + { + "dataset_pvc": parameters["dataset_pvc"], + "mount_path": parameters.get("mount_path", "/tmp/data"), + } + ) if not hide_secrets: - substitutions.update({ - "cos_endpoint": f"http://{minio_host}:{minio_port}/minio", - "cos_username": minio_access_key, - "cos_password": minio_secret_key - }) + substitutions.update( + { + "cos_endpoint": f"http://{minio_host}:{minio_port}/minio", + "cos_username": minio_access_key, + "cos_password": minio_secret_key, + } + ) run_script = Template(template_raw).substitute(substitutions) return run_script -def run_component_in_experiment(component: ApiComponent, component_template_url: str, parameters: dict, - run_name: str = None, wait_for_status: bool = False): +def run_component_in_experiment( + component: ApiComponent, + component_template_url: str, + parameters: dict, + run_name: str = None, + wait_for_status: bool = False, +): - source_code = generate_component_run_script(component, component_template_url, parameters, run_name) + source_code = generate_component_run_script( + component, component_template_url, parameters, run_name + ) return run_code_in_experiment(source_code, wait_for_status) -def run_custom_pipeline_in_experiment(custom_pipeline: ApiPipelineCustom, run_name: str, parameters: dict, - wait_for_status: bool = False): +def run_custom_pipeline_in_experiment( + custom_pipeline: ApiPipelineCustom, + run_name: str, + parameters: dict, + wait_for_status: bool = False, +): try: - source_code = generate_custom_pipeline_run_script(custom_pipeline, parameters, run_name, hide_secrets=False) + source_code = generate_custom_pipeline_run_script( + custom_pipeline, parameters, run_name, hide_secrets=False + ) except Exception as e: # TODO: remove this debug logging for development only - print(f"Error trying to generate code for custom pipeline run '{run_name or custom_pipeline.name}': {e}") + print( + f"Error trying to generate code for custom pipeline run '{run_name or custom_pipeline.name}': {e}" + ) print(custom_pipeline) print(parameters) raise e @@ -542,17 +697,21 @@ def run_custom_pipeline_in_experiment(custom_pipeline: ApiPipelineCustom, run_na run_id = run_code_in_experiment(source_code, wait_for_status) except SyntaxError as e: - print(f"SyntaxError trying to run pipeline DSL '{run_name or custom_pipeline.name}': {e}") + print( + f"SyntaxError trying to run pipeline DSL '{run_name or custom_pipeline.name}': {e}" + ) print(source_code) print("Custom pipeline payload:") print(custom_pipeline) - raise ApiError(f"SyntaxError trying to run pipeline DSL: {e.msg}\n" - f"{source_code}", - 500) + raise ApiError( + f"SyntaxError trying to run pipeline DSL: {e.msg}\n" f"{source_code}", 500 + ) except Exception as e: # TODO: remove this debug logging for development only - print(f"Error trying to run custom pipeline code '{run_name or custom_pipeline.name}': {e}") + print( + f"Error trying to run custom pipeline code '{run_name or custom_pipeline.name}': {e}" + ) print(custom_pipeline) print(source_code) raise e @@ -566,61 +725,107 @@ def run_custom_pipeline_in_experiment(custom_pipeline: ApiPipelineCustom, run_na return run_id -def run_dataset_in_experiment(dataset: ApiDataset, dataset_template_url: str, parameters: dict = {}, - run_name: str = None, wait_for_status: bool = False): +def run_dataset_in_experiment( + dataset: ApiDataset, + dataset_template_url: str, + parameters: dict = {}, + run_name: str = None, + wait_for_status: bool = False, +): - source_code = generate_dataset_run_script(dataset, dataset_template_url, parameters, run_name, - fail_on_missing_prereqs=True) + source_code = generate_dataset_run_script( + dataset, + dataset_template_url, + parameters, + run_name, + fail_on_missing_prereqs=True, + ) return run_code_in_experiment(source_code, wait_for_status) -def run_model_in_experiment(model: ApiModel, pipeline_stage: str, execution_platform: str, run_name: str = None, - parameters: dict = None, wait_for_status: bool = False): - - source_code = generate_model_run_script(model, pipeline_stage, execution_platform.lower(), run_name, parameters, - hide_secrets=False) +def run_model_in_experiment( + model: ApiModel, + pipeline_stage: str, + execution_platform: str, + run_name: str = None, + parameters: dict = None, + wait_for_status: bool = False, +): + + source_code = generate_model_run_script( + model, + pipeline_stage, + execution_platform.lower(), + run_name, + parameters, + hide_secrets=False, + ) return run_code_in_experiment(source_code, wait_for_status) -def run_notebook_in_experiment(notebook: ApiNotebook, parameters: dict, run_name: str, - wait_for_status: bool = False): +def run_notebook_in_experiment( + notebook: ApiNotebook, + parameters: dict, + run_name: str, + wait_for_status: bool = False, +): - source_code = generate_notebook_run_script(notebook, parameters, run_name, hide_secrets=False) + source_code = generate_notebook_run_script( + notebook, parameters, run_name, hide_secrets=False + ) return run_code_in_experiment(source_code, wait_for_status) -def run_pipeline_in_experiment(api_pipeline: ApiPipeline, parameters: dict = None, run_name: str = None, - namespace: str = None, wait_for_status: bool = False): +def run_pipeline_in_experiment( + api_pipeline: ApiPipeline, + parameters: dict = None, + run_name: str = None, + namespace: str = None, + wait_for_status: bool = False, +): try: client = KfpClient() # if not namespace: ... client._context_setting['namespace'] and client.get_kfp_healthz().multi_user is True: - experiment = client.create_experiment('PIPELINE_RUNS', namespace=namespace) - run_result = client.run_pipeline(experiment_id=experiment.id, - job_name=run_name or api_pipeline.name, - params=parameters, - pipeline_id=api_pipeline.id) + experiment = client.create_experiment("PIPELINE_RUNS", namespace=namespace) + run_result = client.run_pipeline( + experiment_id=experiment.id, + job_name=run_name or api_pipeline.name, + params=parameters, + pipeline_id=api_pipeline.id, + ) run_id = run_result.id if wait_for_status: run_details = wait_for_run_status(client, run_id, 10) - run_status = json.loads(run_details.pipeline_runtime.workflow_manifest)["status"] - - if run_status \ - and run_status.get("phase", "").lower() in ["failed", "error"] \ - and run_status.get("message"): - raise RuntimeError(f"Run {run_id} failed with error: {run_status['message']}") + run_status = json.loads(run_details.pipeline_runtime.workflow_manifest)[ + "status" + ] + + if ( + run_status + and run_status.get("phase", "").lower() in ["failed", "error"] + and run_status.get("message") + ): + raise RuntimeError( + f"Run {run_id} failed with error: {run_status['message']}" + ) return run_id except Exception as e: - print(f"Exception trying to run pipeline {api_pipeline.id} '{api_pipeline.name}'" - f" with parameters {parameters}:" - f" %s\n" % e) - raise ApiError(message=f"{e.body}\nKFP URL: {_pipeline_service_url}", http_status_code=e.status or 422) + print( + f"Exception trying to run pipeline {api_pipeline.id} '{api_pipeline.name}'" + f" with parameters {parameters}:" + f" %s\n" % e + ) + raise ApiError( + message=f"{e.body}\nKFP URL: {_pipeline_service_url}", + http_status_code=e.status or 422, + ) return None @@ -634,8 +839,14 @@ def run_code_in_experiment(source_code: str, wait_for_status=False) -> str: except SyntaxError as e: print(f"SyntaxError trying to run_code_in_experiment: {e}") - print("\n".join(["{}{:3d}: {}".format(">" if n+1 == e.lineno else " ", n+1, l) - for n, l in enumerate(source_code.splitlines())])) + print( + "\n".join( + [ + "{}{:3d}: {}".format(">" if n + 1 == e.lineno else " ", n + 1, l) + for n, l in enumerate(source_code.splitlines()) + ] + ) + ) # raise ApiError(f"SyntaxError trying to run_code_in_experiment: {e.msg}\n" # f"{source_code}", 500) # don't reveal internal code template to users @@ -647,15 +858,21 @@ def run_code_in_experiment(source_code: str, wait_for_status=False) -> str: if wait_for_status: - client: KfpClient = exec_locals['client'] + client: KfpClient = exec_locals["client"] run_details = wait_for_run_status(client, run_id, 10) - run_status = json.loads(run_details.pipeline_runtime.workflow_manifest)["status"] - - if run_status \ - and run_status.get("phase", "").lower() in ["failed", "error"] \ - and run_status.get("message"): - - raise RuntimeError(f"Run {run_id} failed with error: {run_status['message']}") + run_status = json.loads(run_details.pipeline_runtime.workflow_manifest)[ + "status" + ] + + if ( + run_status + and run_status.get("phase", "").lower() in ["failed", "error"] + and run_status.get("message") + ): + + raise RuntimeError( + f"Run {run_id} failed with error: {run_status['message']}" + ) return run_id diff --git a/api/server/swagger_server/gateways/kubernetes_service.py b/api/server/swagger_server/gateways/kubernetes_service.py index 80f073bb..11755819 100644 --- a/api/server/swagger_server/gateways/kubernetes_service.py +++ b/api/server/swagger_server/gateways/kubernetes_service.py @@ -6,8 +6,8 @@ import subprocess from base64 import b64decode -from os import environ as env -from pprint import pprint +from os import environ as env # noqa: F401 +from pprint import pprint # noqa: F401 from swagger_server.util import ApiError @@ -18,14 +18,22 @@ def create_secret(secret_name: str, secret_contents: dict): try: - command = ['kubectl', '-n', _namespace, 'create', 'secret', 'generic', secret_name] + command = [ + "kubectl", + "-n", + _namespace, + "create", + "secret", + "generic", + secret_name, + ] for key, value in secret_contents.items(): if type(value) == dict: raise ApiError(f"Secret values must not be of type 'dict'") if type(value) == list: value = ",".join([str(v) for v in value]) if type(value) == str and " " in value: - value = f"\"{value}\"" + value = f'"{value}"' command.append(f"--from-literal={key}={value or ''}") output = subprocess.run(command, capture_output=True, check=True, timeout=10) pprint(output.stdout.decode()) @@ -40,8 +48,10 @@ def delete_secret(secret_name): return delete_all_secrets() output = None try: - delete_command = ['kubectl', 'delete', '-n', _namespace, 'secret', secret_name] - output = subprocess.run(delete_command, capture_output=True, check=True, timeout=10) + delete_command = ["kubectl", "delete", "-n", _namespace, "secret", secret_name] + output = subprocess.run( + delete_command, capture_output=True, check=True, timeout=10 + ) print(f"Credential {secret_name} was deleted") except Exception as e: if output and output.stderr: @@ -59,8 +69,19 @@ def delete_all_secrets(name_prefix=secret_name_prefix): def get_secret(secret_name, decode=False) -> dict: output = None try: - get_command = ['kubectl', '-n', _namespace, '-o', 'json', 'get', 'secret', secret_name] - output = subprocess.run(get_command, capture_output=True, check=True, timeout=10) + get_command = [ + "kubectl", + "-n", + _namespace, + "-o", + "json", + "get", + "secret", + secret_name, + ] + output = subprocess.run( + get_command, capture_output=True, check=True, timeout=10 + ) secret_data = json.loads(output.stdout.decode()) or {} secret = secret_data.get("data") if decode: @@ -70,17 +91,22 @@ def get_secret(secret_name, decode=False) -> dict: except Exception as e: if output and output.stderr: pprint(output.stderr.decode()) - raise ApiError( f"Error trying to retrieve secret '{secret_name}': {e}") + raise ApiError(f"Error trying to retrieve secret '{secret_name}': {e}") def list_secrets(name_prefix=secret_name_prefix, decode=False) -> [dict]: output = None try: - list_command = ['kubectl', '-n', _namespace, '-o', 'json', 'get', 'secrets'] - output = subprocess.run(list_command, capture_output=True, check=True, timeout=10) + list_command = ["kubectl", "-n", _namespace, "-o", "json", "get", "secrets"] + output = subprocess.run( + list_command, capture_output=True, check=True, timeout=10 + ) secrets_data = json.loads(output.stdout.decode()) or {} - mlx_secrets = [d for d in secrets_data.get("items") or [] - if d["metadata"]["name"].startswith(name_prefix)] + mlx_secrets = [ + d + for d in secrets_data.get("items") or [] + if d["metadata"]["name"].startswith(name_prefix) + ] if decode: for s in mlx_secrets: for k, v in s["data"].items(): diff --git a/api/server/swagger_server/models/__init__.py b/api/server/swagger_server/models/__init__.py index 1b79a5d3..4d693f2f 100644 --- a/api/server/swagger_server/models/__init__.py +++ b/api/server/swagger_server/models/__init__.py @@ -5,6 +5,7 @@ # flake8: noqa from __future__ import absolute_import + # import models into model package from swagger_server.models.any_value import AnyValue from swagger_server.models.api_access_token import ApiAccessToken @@ -14,20 +15,31 @@ from swagger_server.models.api_model import ApiModel from swagger_server.models.api_notebook import ApiNotebook from swagger_server.models.api_pipeline import ApiPipeline + # import ApiAsset(s) before ApiCatalog... classes to prevent circular import errors from swagger_server.models.api_catalog_upload import ApiCatalogUpload from swagger_server.models.api_catalog_upload_item import ApiCatalogUploadItem from swagger_server.models.api_credential import ApiCredential from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse -from swagger_server.models.api_generate_model_code_response import ApiGenerateModelCodeResponse +from swagger_server.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) from swagger_server.models.api_get_template_response import ApiGetTemplateResponse from swagger_server.models.api_inferenceservice import ApiInferenceservice -from swagger_server.models.api_list_catalog_items_response import ApiListCatalogItemsResponse -from swagger_server.models.api_list_catalog_upload_errors import ApiListCatalogUploadErrors +from swagger_server.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_server.models.api_list_catalog_upload_errors import ( + ApiListCatalogUploadErrors, +) from swagger_server.models.api_list_components_response import ApiListComponentsResponse -from swagger_server.models.api_list_credentials_response import ApiListCredentialsResponse +from swagger_server.models.api_list_credentials_response import ( # noqa: F401 + ApiListCredentialsResponse, +) from swagger_server.models.api_list_datasets_response import ApiListDatasetsResponse -from swagger_server.models.api_list_inferenceservices_response import ApiListInferenceservicesResponse +from swagger_server.models.api_list_inferenceservices_response import ( # noqa: F401 + ApiListInferenceservicesResponse, +) from swagger_server.models.api_list_models_response import ApiListModelsResponse from swagger_server.models.api_list_notebooks_response import ApiListNotebooksResponse from swagger_server.models.api_list_pipelines_response import ApiListPipelinesResponse @@ -37,7 +49,9 @@ from swagger_server.models.api_model_script import ApiModelScript from swagger_server.models.api_parameter import ApiParameter from swagger_server.models.api_pipeline_custom import ApiPipelineCustom -from swagger_server.models.api_pipeline_custom_run_payload import ApiPipelineCustomRunPayload +from swagger_server.models.api_pipeline_custom_run_payload import ( + ApiPipelineCustomRunPayload, +) from swagger_server.models.api_pipeline_dag import ApiPipelineDAG from swagger_server.models.api_pipeline_extension import ApiPipelineExtension from swagger_server.models.api_pipeline_inputs import ApiPipelineInputs diff --git a/api/server/swagger_server/models/any_value.py b/api/server/swagger_server/models/any_value.py index e6fe3345..624ed613 100644 --- a/api/server/swagger_server/models/any_value.py +++ b/api/server/swagger_server/models/any_value.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class AnyValue(Model): @@ -19,17 +19,13 @@ class AnyValue(Model): """ def __init__(self): # noqa: E501 - """AnyValue - a model defined in Swagger + """AnyValue - a model defined in Swagger""" + self.swagger_types = {} - """ - self.swagger_types = { - } - - self.attribute_map = { - } + self.attribute_map = {} @classmethod - def from_dict(cls, dikt) -> 'AnyValue': + def from_dict(cls, dikt) -> "AnyValue": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_access_token.py b/api/server/swagger_server/models/api_access_token.py index 31dace26..4cab8d2f 100644 --- a/api/server/swagger_server/models/api_access_token.py +++ b/api/server/swagger_server/models/api_access_token.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiAccessToken(Model): @@ -18,7 +18,7 @@ class ApiAccessToken(Model): Do not edit the class manually. """ - def __init__(self, api_token: str=None, url_host: str=None): # noqa: E501 + def __init__(self, api_token: str = None, url_host: str = None): # noqa: E501 """ApiAccessToken - a model defined in Swagger :param api_token: The api_token of this ApiAccessToken. # noqa: E501 @@ -26,21 +26,15 @@ def __init__(self, api_token: str=None, url_host: str=None): # noqa: E501 :param url_host: The url_host of this ApiAccessToken. # noqa: E501 :type url_host: str """ - self.swagger_types = { - 'api_token': str, - 'url_host': str - } + self.swagger_types = {"api_token": str, "url_host": str} - self.attribute_map = { - 'api_token': 'api_token', - 'url_host': 'url_host' - } + self.attribute_map = {"api_token": "api_token", "url_host": "url_host"} self._api_token = api_token self._url_host = url_host @classmethod - def from_dict(cls, dikt) -> 'ApiAccessToken': + def from_dict(cls, dikt) -> "ApiAccessToken": """Returns the dict as a model :param dikt: A dict. @@ -71,7 +65,9 @@ def api_token(self, api_token: str): :type api_token: str """ if api_token is None: - raise ValueError("Invalid value for `api_token`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `api_token`, must not be `None`" + ) self._api_token = api_token @@ -96,6 +92,8 @@ def url_host(self, url_host: str): :type url_host: str """ if url_host is None: - raise ValueError("Invalid value for `url_host`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url_host`, must not be `None`" + ) self._url_host = url_host diff --git a/api/server/swagger_server/models/api_asset.py b/api/server/swagger_server/models/api_asset.py index de23c7ba..c740ed89 100644 --- a/api/server/swagger_server/models/api_asset.py +++ b/api/server/swagger_server/models/api_asset.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiAsset(Model): @@ -18,7 +18,17 @@ class ApiAsset(Model): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + featured: bool = None, + publish_approved: bool = None, + related_assets: List[str] = None, + filter_categories: Dict[str, str] = None, + ): # noqa: E501 """ApiAsset - a model defined in Swagger :param id: The id of this ApiAsset. # noqa: E501 @@ -39,25 +49,25 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type filter_categories: Dict[str, str] """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'featured': bool, - 'publish_approved': bool, - 'related_assets': List[str], - 'filter_categories': Dict[str, str] + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "featured": bool, + "publish_approved": bool, + "related_assets": List[str], + "filter_categories": Dict[str, str], } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", } self._id = id @@ -70,7 +80,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._filter_categories = filter_categories @classmethod - def from_dict(cls, dikt) -> 'ApiAsset': + def from_dict(cls, dikt) -> "ApiAsset": """Returns the dict as a model :param dikt: A dict. @@ -141,7 +151,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -164,7 +176,9 @@ def description(self, description: str): :type description: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description diff --git a/api/server/swagger_server/models/api_catalog_upload.py b/api/server/swagger_server/models/api_catalog_upload.py index 8f431603..1dea7ef1 100644 --- a/api/server/swagger_server/models/api_catalog_upload.py +++ b/api/server/swagger_server/models/api_catalog_upload.py @@ -11,7 +11,7 @@ from swagger_server.models.api_access_token import ApiAccessToken from swagger_server.models.api_catalog_upload_item import ApiCatalogUploadItem from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiCatalogUpload(Model): @@ -20,7 +20,15 @@ class ApiCatalogUpload(Model): Do not edit the class manually. """ - def __init__(self, api_access_tokens: List[ApiAccessToken]=None, components: List[ApiCatalogUploadItem]=None, datasets: List[ApiCatalogUploadItem]=None, models: List[ApiCatalogUploadItem]=None, notebooks: List[ApiCatalogUploadItem]=None, pipelines: List[ApiCatalogUploadItem]=None): # noqa: E501 + def __init__( + self, + api_access_tokens: List[ApiAccessToken] = None, + components: List[ApiCatalogUploadItem] = None, + datasets: List[ApiCatalogUploadItem] = None, + models: List[ApiCatalogUploadItem] = None, + notebooks: List[ApiCatalogUploadItem] = None, + pipelines: List[ApiCatalogUploadItem] = None, + ): # noqa: E501 """ApiCatalogUpload - a model defined in Swagger :param api_access_tokens: The api_access_tokens of this ApiCatalogUpload. # noqa: E501 @@ -37,21 +45,21 @@ def __init__(self, api_access_tokens: List[ApiAccessToken]=None, components: Lis :type pipelines: List[ApiCatalogUploadItem] """ self.swagger_types = { - 'api_access_tokens': List[ApiAccessToken], - 'components': List[ApiCatalogUploadItem], - 'datasets': List[ApiCatalogUploadItem], - 'models': List[ApiCatalogUploadItem], - 'notebooks': List[ApiCatalogUploadItem], - 'pipelines': List[ApiCatalogUploadItem] + "api_access_tokens": List[ApiAccessToken], + "components": List[ApiCatalogUploadItem], + "datasets": List[ApiCatalogUploadItem], + "models": List[ApiCatalogUploadItem], + "notebooks": List[ApiCatalogUploadItem], + "pipelines": List[ApiCatalogUploadItem], } self.attribute_map = { - 'api_access_tokens': 'api_access_tokens', - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines' + "api_access_tokens": "api_access_tokens", + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", } self._api_access_tokens = api_access_tokens @@ -62,7 +70,7 @@ def __init__(self, api_access_tokens: List[ApiAccessToken]=None, components: Lis self._pipelines = pipelines @classmethod - def from_dict(cls, dikt) -> 'ApiCatalogUpload': + def from_dict(cls, dikt) -> "ApiCatalogUpload": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_catalog_upload_error.py b/api/server/swagger_server/models/api_catalog_upload_error.py index 6f986b08..f21bf688 100644 --- a/api/server/swagger_server/models/api_catalog_upload_error.py +++ b/api/server/swagger_server/models/api_catalog_upload_error.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiCatalogUploadError(Model): @@ -18,7 +18,13 @@ class ApiCatalogUploadError(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, url: str=None, error_message: str=None, status_code: int=None): # noqa: E501 + def __init__( + self, + name: str = None, + url: str = None, + error_message: str = None, + status_code: int = None, + ): # noqa: E501 """ApiCatalogUploadError - a model defined in Swagger :param name: The name of this ApiCatalogUploadError. # noqa: E501 @@ -31,17 +37,17 @@ def __init__(self, name: str=None, url: str=None, error_message: str=None, statu :type status_code: int """ self.swagger_types = { - 'name': str, - 'url': str, - 'error_message': str, - 'status_code': int + "name": str, + "url": str, + "error_message": str, + "status_code": int, } self.attribute_map = { - 'name': 'name', - 'url': 'url', - 'error_message': 'error_message', - 'status_code': 'status_code' + "name": "name", + "url": "url", + "error_message": "error_message", + "status_code": "status_code", } self._name = name @@ -50,7 +56,7 @@ def __init__(self, name: str=None, url: str=None, error_message: str=None, statu self._status_code = status_code @classmethod - def from_dict(cls, dikt) -> 'ApiCatalogUploadError': + def from_dict(cls, dikt) -> "ApiCatalogUploadError": """Returns the dict as a model :param dikt: A dict. @@ -102,7 +108,9 @@ def url(self, url: str): :type url: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url diff --git a/api/server/swagger_server/models/api_catalog_upload_item.py b/api/server/swagger_server/models/api_catalog_upload_item.py index 52daa56b..3408cac5 100644 --- a/api/server/swagger_server/models/api_catalog_upload_item.py +++ b/api/server/swagger_server/models/api_catalog_upload_item.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiCatalogUploadItem(Model): @@ -18,7 +18,7 @@ class ApiCatalogUploadItem(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, url: str=None): # noqa: E501 + def __init__(self, name: str = None, url: str = None): # noqa: E501 """ApiCatalogUploadItem - a model defined in Swagger :param name: The name of this ApiCatalogUploadItem. # noqa: E501 @@ -26,21 +26,15 @@ def __init__(self, name: str=None, url: str=None): # noqa: E501 :param url: The url of this ApiCatalogUploadItem. # noqa: E501 :type url: str """ - self.swagger_types = { - 'name': str, - 'url': str - } + self.swagger_types = {"name": str, "url": str} - self.attribute_map = { - 'name': 'name', - 'url': 'url' - } + self.attribute_map = {"name": "name", "url": "url"} self._name = name self._url = url @classmethod - def from_dict(cls, dikt) -> 'ApiCatalogUploadItem': + def from_dict(cls, dikt) -> "ApiCatalogUploadItem": """Returns the dict as a model :param dikt: A dict. @@ -92,6 +86,8 @@ def url(self, url: str): :type url: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url diff --git a/api/server/swagger_server/models/api_catalog_upload_response.py b/api/server/swagger_server/models/api_catalog_upload_response.py index be1f5e03..be42a63e 100644 --- a/api/server/swagger_server/models/api_catalog_upload_response.py +++ b/api/server/swagger_server/models/api_catalog_upload_response.py @@ -8,9 +8,16 @@ from typing import List, Dict # noqa: F401 -from swagger_server.models import ApiComponent, ApiDataset, ApiModel, ApiNotebook, ApiPipeline, ApiCatalogUploadError +from swagger_server.models import ( + ApiComponent, + ApiDataset, + ApiModel, + ApiNotebook, + ApiPipeline, + ApiCatalogUploadError, +) from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiCatalogUploadResponse(Model): @@ -19,7 +26,19 @@ class ApiCatalogUploadResponse(Model): Do not edit the class manually. """ - def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDataset]=None, models: List[ApiModel]=None, notebooks: List[ApiNotebook]=None, pipelines: List[ApiPipeline]=None, total_size: int=None, next_page_token: str=None, errors: List[ApiCatalogUploadError]=None, total_errors: int=None, total_created: int=None): # noqa: E501 + def __init__( + self, + components: List[ApiComponent] = None, + datasets: List[ApiDataset] = None, + models: List[ApiModel] = None, + notebooks: List[ApiNotebook] = None, + pipelines: List[ApiPipeline] = None, + total_size: int = None, + next_page_token: str = None, + errors: List[ApiCatalogUploadError] = None, + total_errors: int = None, + total_created: int = None, + ): # noqa: E501 """ApiCatalogUploadResponse - a model defined in Swagger :param components: The components of this ApiCatalogUploadResponse. # noqa: E501 @@ -44,29 +63,29 @@ def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDatase :type total_created: int """ self.swagger_types = { - 'components': List[ApiComponent], - 'datasets': List[ApiDataset], - 'models': List[ApiModel], - 'notebooks': List[ApiNotebook], - 'pipelines': List[ApiPipeline], - 'total_size': int, - 'next_page_token': str, - 'errors': List[ApiCatalogUploadError], - 'total_errors': int, - 'total_created': int + "components": List[ApiComponent], + "datasets": List[ApiDataset], + "models": List[ApiModel], + "notebooks": List[ApiNotebook], + "pipelines": List[ApiPipeline], + "total_size": int, + "next_page_token": str, + "errors": List[ApiCatalogUploadError], + "total_errors": int, + "total_created": int, } self.attribute_map = { - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token', - 'errors': 'errors', - 'total_errors': 'total_errors', - 'total_created': 'total_created' + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", + "errors": "errors", + "total_errors": "total_errors", + "total_created": "total_created", } self._components = components @@ -81,7 +100,7 @@ def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDatase self._total_created = total_created @classmethod - def from_dict(cls, dikt) -> 'ApiCatalogUploadResponse': + def from_dict(cls, dikt) -> "ApiCatalogUploadResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_component.py b/api/server/swagger_server/models/api_component.py index 9313f063..e4cd0bb2 100644 --- a/api/server/swagger_server/models/api_component.py +++ b/api/server/swagger_server/models/api_component.py @@ -11,7 +11,7 @@ from swagger_server.models.api_asset import ApiAsset from swagger_server.models.api_metadata import ApiMetadata # noqa: F401,E501 from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiComponent(ApiAsset): @@ -20,7 +20,19 @@ class ApiComponent(ApiAsset): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None, metadata: ApiMetadata=None, parameters: List[ApiParameter]=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + featured: bool = None, + publish_approved: bool = None, + related_assets: List[str] = None, + filter_categories: Dict[str, str] = None, + metadata: ApiMetadata = None, + parameters: List[ApiParameter] = None, + ): # noqa: E501 """ApiComponent - a model defined in Swagger :param id: The id of this ApiComponent. # noqa: E501 @@ -45,29 +57,29 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type parameters: List[ApiParameter] """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'featured': bool, - 'publish_approved': bool, - 'related_assets': List[str], - 'filter_categories': Dict[str, str], - 'metadata': ApiMetadata, - 'parameters': List[ApiParameter] + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "featured": bool, + "publish_approved": bool, + "related_assets": List[str], + "filter_categories": Dict[str, str], + "metadata": ApiMetadata, + "parameters": List[ApiParameter], } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'metadata': 'metadata', - 'parameters': 'parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "metadata": "metadata", + "parameters": "parameters", } self._id = id @@ -82,7 +94,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._parameters = parameters @classmethod - def from_dict(cls, dikt) -> 'ApiComponent': + def from_dict(cls, dikt) -> "ApiComponent": """Returns the dict as a model :param dikt: A dict. @@ -153,7 +165,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -176,7 +190,9 @@ def description(self, description: str): :type description: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description diff --git a/api/server/swagger_server/models/api_credential.py b/api/server/swagger_server/models/api_credential.py index d0b48532..8fe75d46 100644 --- a/api/server/swagger_server/models/api_credential.py +++ b/api/server/swagger_server/models/api_credential.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiCredential(Model): @@ -18,7 +18,15 @@ class ApiCredential(Model): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, pipeline_id: str=None, project_id: str=None, api_key: str=None, data_assets: List[str]=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + pipeline_id: str = None, + project_id: str = None, + api_key: str = None, + data_assets: List[str] = None, + ): # noqa: E501 """ApiCredential - a model defined in Swagger :param id: The id of this ApiCredential. # noqa: E501 @@ -35,21 +43,21 @@ def __init__(self, id: str=None, created_at: datetime=None, pipeline_id: str=Non :type data_assets: List[str] """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'pipeline_id': str, - 'project_id': str, - 'api_key': str, - 'data_assets': List[str] + "id": str, + "created_at": datetime, + "pipeline_id": str, + "project_id": str, + "api_key": str, + "data_assets": List[str], } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'pipeline_id': 'pipeline_id', - 'project_id': 'project_id', - 'api_key': 'api_key', - 'data_assets': 'data_assets' + "id": "id", + "created_at": "created_at", + "pipeline_id": "pipeline_id", + "project_id": "project_id", + "api_key": "api_key", + "data_assets": "data_assets", } self._id = id @@ -60,7 +68,7 @@ def __init__(self, id: str=None, created_at: datetime=None, pipeline_id: str=Non self._data_assets = data_assets @classmethod - def from_dict(cls, dikt) -> 'ApiCredential': + def from_dict(cls, dikt) -> "ApiCredential": """Returns the dict as a model :param dikt: A dict. @@ -131,7 +139,9 @@ def pipeline_id(self, pipeline_id: str): :type pipeline_id: str """ if pipeline_id is None: - raise ValueError("Invalid value for `pipeline_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `pipeline_id`, must not be `None`" + ) self._pipeline_id = pipeline_id @@ -154,7 +164,9 @@ def project_id(self, project_id: str): :type project_id: str """ if project_id is None: - raise ValueError("Invalid value for `project_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `project_id`, must not be `None`" + ) self._project_id = project_id diff --git a/api/server/swagger_server/models/api_dataset.py b/api/server/swagger_server/models/api_dataset.py index 59822835..70fb6e4b 100644 --- a/api/server/swagger_server/models/api_dataset.py +++ b/api/server/swagger_server/models/api_dataset.py @@ -11,7 +11,7 @@ from swagger_server.models.api_asset import ApiAsset from swagger_server.models.api_metadata import ApiMetadata # noqa: F401,E501 from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiDataset(ApiAsset): @@ -20,7 +20,23 @@ class ApiDataset(ApiAsset): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None, domain: str=None, format: str=None, size: str=None, number_of_records: int=None, license: str=None, metadata: ApiMetadata=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + featured: bool = None, + publish_approved: bool = None, + related_assets: List[str] = None, + filter_categories: Dict[str, str] = None, + domain: str = None, + format: str = None, + size: str = None, + number_of_records: int = None, + license: str = None, + metadata: ApiMetadata = None, + ): # noqa: E501 """ApiDataset - a model defined in Swagger :param id: The id of this ApiDataset. # noqa: E501 @@ -53,37 +69,37 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type metadata: ApiMetadata """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'featured': bool, - 'publish_approved': bool, - 'related_assets': List[str], - 'filter_categories': Dict[str, str], - 'domain': str, - 'format': str, - 'size': str, - 'number_of_records': int, - 'license': str, - 'metadata': ApiMetadata + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "featured": bool, + "publish_approved": bool, + "related_assets": List[str], + "filter_categories": Dict[str, str], + "domain": str, + "format": str, + "size": str, + "number_of_records": int, + "license": str, + "metadata": ApiMetadata, } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'domain': 'domain', - 'format': 'format', - 'size': 'size', - 'number_of_records': 'number_of_records', - 'license': 'license', - 'metadata': 'metadata' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "domain": "domain", + "format": "format", + "size": "size", + "number_of_records": "number_of_records", + "license": "license", + "metadata": "metadata", } self._id = id @@ -102,7 +118,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._metadata = metadata @classmethod - def from_dict(cls, dikt) -> 'ApiDataset': + def from_dict(cls, dikt) -> "ApiDataset": """Returns the dict as a model :param dikt: A dict. @@ -173,7 +189,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -196,7 +214,9 @@ def description(self, description: str): :type description: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description diff --git a/api/server/swagger_server/models/api_generate_code_response.py b/api/server/swagger_server/models/api_generate_code_response.py index 2110d295..03ccfc3c 100644 --- a/api/server/swagger_server/models/api_generate_code_response.py +++ b/api/server/swagger_server/models/api_generate_code_response.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiGenerateCodeResponse(Model): @@ -18,24 +18,20 @@ class ApiGenerateCodeResponse(Model): Do not edit the class manually. """ - def __init__(self, script: str=None): # noqa: E501 + def __init__(self, script: str = None): # noqa: E501 """ApiGenerateCodeResponse - a model defined in Swagger :param script: The script of this ApiGenerateCodeResponse. # noqa: E501 :type script: str """ - self.swagger_types = { - 'script': str - } + self.swagger_types = {"script": str} - self.attribute_map = { - 'script': 'script' - } + self.attribute_map = {"script": "script"} self._script = script @classmethod - def from_dict(cls, dikt) -> 'ApiGenerateCodeResponse': + def from_dict(cls, dikt) -> "ApiGenerateCodeResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_generate_model_code_response.py b/api/server/swagger_server/models/api_generate_model_code_response.py index 3b209711..8c97013c 100644 --- a/api/server/swagger_server/models/api_generate_model_code_response.py +++ b/api/server/swagger_server/models/api_generate_model_code_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_model_script import ApiModelScript # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiGenerateModelCodeResponse(Model): @@ -19,24 +19,20 @@ class ApiGenerateModelCodeResponse(Model): Do not edit the class manually. """ - def __init__(self, scripts: List[ApiModelScript]=None): # noqa: E501 + def __init__(self, scripts: List[ApiModelScript] = None): # noqa: E501 """ApiGenerateModelCodeResponse - a model defined in Swagger :param scripts: The scripts of this ApiGenerateModelCodeResponse. # noqa: E501 :type scripts: List[ApiModelScript] """ - self.swagger_types = { - 'scripts': List[ApiModelScript] - } + self.swagger_types = {"scripts": List[ApiModelScript]} - self.attribute_map = { - 'scripts': 'scripts' - } + self.attribute_map = {"scripts": "scripts"} self._scripts = scripts @classmethod - def from_dict(cls, dikt) -> 'ApiGenerateModelCodeResponse': + def from_dict(cls, dikt) -> "ApiGenerateModelCodeResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_get_template_response.py b/api/server/swagger_server/models/api_get_template_response.py index adf1103a..cf6076be 100644 --- a/api/server/swagger_server/models/api_get_template_response.py +++ b/api/server/swagger_server/models/api_get_template_response.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiGetTemplateResponse(Model): @@ -18,7 +18,7 @@ class ApiGetTemplateResponse(Model): Do not edit the class manually. """ - def __init__(self, template: str=None, url: str=None): # noqa: E501 + def __init__(self, template: str = None, url: str = None): # noqa: E501 """ApiGetTemplateResponse - a model defined in Swagger :param template: The template of this ApiGetTemplateResponse. # noqa: E501 @@ -26,21 +26,15 @@ def __init__(self, template: str=None, url: str=None): # noqa: E501 :param url: The url of this ApiGetTemplateResponse. # noqa: E501 :type url: str """ - self.swagger_types = { - 'template': str, - 'url': str - } + self.swagger_types = {"template": str, "url": str} - self.attribute_map = { - 'template': 'template', - 'url': 'url' - } + self.attribute_map = {"template": "template", "url": "url"} self._template = template self._url = url @classmethod - def from_dict(cls, dikt) -> 'ApiGetTemplateResponse': + def from_dict(cls, dikt) -> "ApiGetTemplateResponse": """Returns the dict as a model :param dikt: A dict. @@ -77,7 +71,7 @@ def template(self, template: str): def url(self) -> str: """Gets the url of this ApiGetTemplateResponse. - The URL to download the template text from S3 storage (Minio) # noqa: E501 + The URL to download the template text from S3 storage (Minio) :return: The url of this ApiGetTemplateResponse. :rtype: str @@ -88,7 +82,7 @@ def url(self) -> str: def url(self, url: str): """Sets the url of this ApiGetTemplateResponse. - The URL to download the template text from S3 storage (Minio) # noqa: E501 + The URL to download the template text from S3 storage (Minio) :param url: The url of this ApiGetTemplateResponse. :type url: str diff --git a/api/server/swagger_server/models/api_inferenceservice.py b/api/server/swagger_server/models/api_inferenceservice.py index 74f78a07..0d379fe0 100644 --- a/api/server/swagger_server/models/api_inferenceservice.py +++ b/api/server/swagger_server/models/api_inferenceservice.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.any_value import AnyValue # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiInferenceservice(Model): @@ -19,7 +19,13 @@ class ApiInferenceservice(Model): Do not edit the class manually. """ - def __init__(self, api_version: str=None, kind: str=None, metadata: AnyValue=None, spec: AnyValue=None): # noqa: E501 + def __init__( + self, + api_version: str = None, + kind: str = None, + metadata: AnyValue = None, + spec: AnyValue = None, + ): # noqa: E501 """ApiInferenceservice - a model defined in Swagger :param api_version: The api_version of this ApiInferenceservice. # noqa: E501 @@ -32,17 +38,17 @@ def __init__(self, api_version: str=None, kind: str=None, metadata: AnyValue=Non :type spec: AnyValue """ self.swagger_types = { - 'api_version': str, - 'kind': str, - 'metadata': AnyValue, - 'spec': AnyValue + "api_version": str, + "kind": str, + "metadata": AnyValue, + "spec": AnyValue, } self.attribute_map = { - 'api_version': 'apiVersion', - 'kind': 'kind', - 'metadata': 'metadata', - 'spec': 'spec' + "api_version": "apiVersion", + "kind": "kind", + "metadata": "metadata", + "spec": "spec", } self._api_version = api_version @@ -51,7 +57,7 @@ def __init__(self, api_version: str=None, kind: str=None, metadata: AnyValue=Non self._spec = spec @classmethod - def from_dict(cls, dikt) -> 'ApiInferenceservice': + def from_dict(cls, dikt) -> "ApiInferenceservice": """Returns the dict as a model :param dikt: A dict. @@ -80,7 +86,9 @@ def api_version(self, api_version: str): :type api_version: str """ if api_version is None: - raise ValueError("Invalid value for `api_version`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `api_version`, must not be `None`" + ) self._api_version = api_version @@ -103,7 +111,9 @@ def kind(self, kind: str): :type kind: str """ if kind is None: - raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `kind`, must not be `None`" + ) self._kind = kind diff --git a/api/server/swagger_server/models/api_list_catalog_items_response.py b/api/server/swagger_server/models/api_list_catalog_items_response.py index cc52ed9d..fb23a580 100644 --- a/api/server/swagger_server/models/api_list_catalog_items_response.py +++ b/api/server/swagger_server/models/api_list_catalog_items_response.py @@ -14,7 +14,7 @@ from swagger_server.models.api_notebook import ApiNotebook from swagger_server.models.api_pipeline import ApiPipeline from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListCatalogItemsResponse(Model): @@ -23,7 +23,16 @@ class ApiListCatalogItemsResponse(Model): Do not edit the class manually. """ - def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDataset]=None, models: List[ApiModel]=None, notebooks: List[ApiNotebook]=None, pipelines: List[ApiPipeline]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + components: List[ApiComponent] = None, + datasets: List[ApiDataset] = None, + models: List[ApiModel] = None, + notebooks: List[ApiNotebook] = None, + pipelines: List[ApiPipeline] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListCatalogItemsResponse - a model defined in Swagger :param components: The components of this ApiListCatalogItemsResponse. # noqa: E501 @@ -42,23 +51,23 @@ def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDatase :type next_page_token: str """ self.swagger_types = { - 'components': List[ApiComponent], - 'datasets': List[ApiDataset], - 'models': List[ApiModel], - 'notebooks': List[ApiNotebook], - 'pipelines': List[ApiPipeline], - 'total_size': int, - 'next_page_token': str + "components": List[ApiComponent], + "datasets": List[ApiDataset], + "models": List[ApiModel], + "notebooks": List[ApiNotebook], + "pipelines": List[ApiPipeline], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'components': 'components', - 'datasets': 'datasets', - 'models': 'models', - 'notebooks': 'notebooks', - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "components": "components", + "datasets": "datasets", + "models": "models", + "notebooks": "notebooks", + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._components = components @@ -70,7 +79,7 @@ def __init__(self, components: List[ApiComponent]=None, datasets: List[ApiDatase self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListCatalogItemsResponse': + def from_dict(cls, dikt) -> "ApiListCatalogItemsResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_catalog_upload_errors.py b/api/server/swagger_server/models/api_list_catalog_upload_errors.py index c86f076e..5ddca074 100644 --- a/api/server/swagger_server/models/api_list_catalog_upload_errors.py +++ b/api/server/swagger_server/models/api_list_catalog_upload_errors.py @@ -10,7 +10,7 @@ from swagger_server.models.api_catalog_upload_error import ApiCatalogUploadError from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListCatalogUploadErrors(Model): @@ -19,7 +19,9 @@ class ApiListCatalogUploadErrors(Model): Do not edit the class manually. """ - def __init__(self, errors: List[ApiCatalogUploadError]=None, total_errors: int=None): # noqa: E501 + def __init__( + self, errors: List[ApiCatalogUploadError] = None, total_errors: int = None + ): # noqa: E501 """ApiListCatalogUploadErrors - a model defined in Swagger :param errors: The errors of this ApiListCatalogUploadErrors. # noqa: E501 @@ -28,20 +30,17 @@ def __init__(self, errors: List[ApiCatalogUploadError]=None, total_errors: int=N :type total_errors: int """ self.swagger_types = { - 'errors': List[ApiCatalogUploadError], - 'total_errors': int + "errors": List[ApiCatalogUploadError], + "total_errors": int, } - self.attribute_map = { - 'errors': 'errors', - 'total_errors': 'total_errors' - } + self.attribute_map = {"errors": "errors", "total_errors": "total_errors"} self._errors = errors self._total_errors = total_errors @classmethod - def from_dict(cls, dikt) -> 'ApiListCatalogUploadErrors': + def from_dict(cls, dikt) -> "ApiListCatalogUploadErrors": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_components_response.py b/api/server/swagger_server/models/api_list_components_response.py index 2ec60810..a5d05109 100644 --- a/api/server/swagger_server/models/api_list_components_response.py +++ b/api/server/swagger_server/models/api_list_components_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_component import ApiComponent # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListComponentsResponse(Model): @@ -19,7 +19,12 @@ class ApiListComponentsResponse(Model): Do not edit the class manually. """ - def __init__(self, components: List[ApiComponent]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + components: List[ApiComponent] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListComponentsResponse - a model defined in Swagger :param components: The components of this ApiListComponentsResponse. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, components: List[ApiComponent]=None, total_size: int=None, ne :type next_page_token: str """ self.swagger_types = { - 'components': List[ApiComponent], - 'total_size': int, - 'next_page_token': str + "components": List[ApiComponent], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'components': 'components', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "components": "components", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._components = components @@ -46,7 +51,7 @@ def __init__(self, components: List[ApiComponent]=None, total_size: int=None, ne self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListComponentsResponse': + def from_dict(cls, dikt) -> "ApiListComponentsResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_credentials_response.py b/api/server/swagger_server/models/api_list_credentials_response.py index db03ba62..a8422c12 100644 --- a/api/server/swagger_server/models/api_list_credentials_response.py +++ b/api/server/swagger_server/models/api_list_credentials_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_credential import ApiCredential # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListCredentialsResponse(Model): @@ -19,7 +19,12 @@ class ApiListCredentialsResponse(Model): Do not edit the class manually. """ - def __init__(self, credentials: List[ApiCredential]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + credentials: List[ApiCredential] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListCredentialsResponse - a model defined in Swagger :param credentials: The credentials of this ApiListCredentialsResponse. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, credentials: List[ApiCredential]=None, total_size: int=None, :type next_page_token: str """ self.swagger_types = { - 'credentials': List[ApiCredential], - 'total_size': int, - 'next_page_token': str + "credentials": List[ApiCredential], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'credentials': 'credentials', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "credentials": "credentials", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._credentials = credentials @@ -46,7 +51,7 @@ def __init__(self, credentials: List[ApiCredential]=None, total_size: int=None, self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListCredentialsResponse': + def from_dict(cls, dikt) -> "ApiListCredentialsResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_datasets_response.py b/api/server/swagger_server/models/api_list_datasets_response.py index 16c11c39..85748b81 100644 --- a/api/server/swagger_server/models/api_list_datasets_response.py +++ b/api/server/swagger_server/models/api_list_datasets_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_dataset import ApiDataset # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListDatasetsResponse(Model): @@ -19,7 +19,12 @@ class ApiListDatasetsResponse(Model): Do not edit the class manually. """ - def __init__(self, datasets: List[ApiDataset]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + datasets: List[ApiDataset] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListDatasetsResponse - a model defined in Swagger :param datasets: The datasets of this ApiListDatasetsResponse. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, datasets: List[ApiDataset]=None, total_size: int=None, next_p :type next_page_token: str """ self.swagger_types = { - 'datasets': List[ApiDataset], - 'total_size': int, - 'next_page_token': str + "datasets": List[ApiDataset], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'datasets': 'datasets', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "datasets": "datasets", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._datasets = datasets @@ -46,7 +51,7 @@ def __init__(self, datasets: List[ApiDataset]=None, total_size: int=None, next_p self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListDatasetsResponse': + def from_dict(cls, dikt) -> "ApiListDatasetsResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_inferenceservices_response.py b/api/server/swagger_server/models/api_list_inferenceservices_response.py index 5b188c71..643eae1e 100644 --- a/api/server/swagger_server/models/api_list_inferenceservices_response.py +++ b/api/server/swagger_server/models/api_list_inferenceservices_response.py @@ -9,8 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_inferenceservice import ( + ApiInferenceservice, +) +from swagger_server import util # noqa: F401 class ApiListInferenceservicesResponse(Model): @@ -19,7 +21,12 @@ class ApiListInferenceservicesResponse(Model): Do not edit the class manually. """ - def __init__(self, inferenceservices: List[ApiInferenceservice]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + inferenceservices: List[ApiInferenceservice] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListInferenceservicesResponse - a model defined in Swagger :param inferenceservices: The inferenceservices of this ApiListInferenceservicesResponse. # noqa: E501 @@ -30,15 +37,15 @@ def __init__(self, inferenceservices: List[ApiInferenceservice]=None, total_size :type next_page_token: str """ self.swagger_types = { - 'inferenceservices': List[ApiInferenceservice], - 'total_size': int, - 'next_page_token': str + "inferenceservices": List[ApiInferenceservice], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'inferenceservices': 'Inferenceservices', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "inferenceservices": "Inferenceservices", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._inferenceservices = inferenceservices @@ -46,7 +53,7 @@ def __init__(self, inferenceservices: List[ApiInferenceservice]=None, total_size self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListInferenceservicesResponse': + def from_dict(cls, dikt) -> "ApiListInferenceservicesResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_models_response.py b/api/server/swagger_server/models/api_list_models_response.py index e59e2028..bdf0cf40 100644 --- a/api/server/swagger_server/models/api_list_models_response.py +++ b/api/server/swagger_server/models/api_list_models_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_model import ApiModel # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListModelsResponse(Model): @@ -19,7 +19,12 @@ class ApiListModelsResponse(Model): Do not edit the class manually. """ - def __init__(self, models: List[ApiModel]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + models: List[ApiModel] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListModelsResponse - a model defined in Swagger :param models: The models of this ApiListModelsResponse. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, models: List[ApiModel]=None, total_size: int=None, next_page_ :type next_page_token: str """ self.swagger_types = { - 'models': List[ApiModel], - 'total_size': int, - 'next_page_token': str + "models": List[ApiModel], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'models': 'models', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "models": "models", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._models = models @@ -46,7 +51,7 @@ def __init__(self, models: List[ApiModel]=None, total_size: int=None, next_page_ self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListModelsResponse': + def from_dict(cls, dikt) -> "ApiListModelsResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_notebooks_response.py b/api/server/swagger_server/models/api_list_notebooks_response.py index 4291edbe..fd7badcb 100644 --- a/api/server/swagger_server/models/api_list_notebooks_response.py +++ b/api/server/swagger_server/models/api_list_notebooks_response.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_notebook import ApiNotebook # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiListNotebooksResponse(Model): @@ -19,7 +19,12 @@ class ApiListNotebooksResponse(Model): Do not edit the class manually. """ - def __init__(self, notebooks: List[ApiNotebook]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + notebooks: List[ApiNotebook] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListNotebooksResponse - a model defined in Swagger :param notebooks: The notebooks of this ApiListNotebooksResponse. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, notebooks: List[ApiNotebook]=None, total_size: int=None, next :type next_page_token: str """ self.swagger_types = { - 'notebooks': List[ApiNotebook], - 'total_size': int, - 'next_page_token': str + "notebooks": List[ApiNotebook], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'notebooks': 'notebooks', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "notebooks": "notebooks", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._notebooks = notebooks @@ -46,7 +51,7 @@ def __init__(self, notebooks: List[ApiNotebook]=None, total_size: int=None, next self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListNotebooksResponse': + def from_dict(cls, dikt) -> "ApiListNotebooksResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_list_pipelines_response.py b/api/server/swagger_server/models/api_list_pipelines_response.py index 01b809f9..36023bd5 100644 --- a/api/server/swagger_server/models/api_list_pipelines_response.py +++ b/api/server/swagger_server/models/api_list_pipelines_response.py @@ -9,8 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_pipeline_extended import ApiPipelineExtended # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) +from swagger_server import util # noqa: F401 class ApiListPipelinesResponse(Model): @@ -19,7 +21,12 @@ class ApiListPipelinesResponse(Model): Do not edit the class manually. """ - def __init__(self, pipelines: List[ApiPipelineExtended]=None, total_size: int=None, next_page_token: str=None): # noqa: E501 + def __init__( + self, + pipelines: List[ApiPipelineExtended] = None, + total_size: int = None, + next_page_token: str = None, + ): # noqa: E501 """ApiListPipelinesResponse - a model defined in Swagger :param pipelines: The pipelines of this ApiListPipelinesResponse. # noqa: E501 @@ -30,15 +37,15 @@ def __init__(self, pipelines: List[ApiPipelineExtended]=None, total_size: int=No :type next_page_token: str """ self.swagger_types = { - 'pipelines': List[ApiPipelineExtended], - 'total_size': int, - 'next_page_token': str + "pipelines": List[ApiPipelineExtended], + "total_size": int, + "next_page_token": str, } self.attribute_map = { - 'pipelines': 'pipelines', - 'total_size': 'total_size', - 'next_page_token': 'next_page_token' + "pipelines": "pipelines", + "total_size": "total_size", + "next_page_token": "next_page_token", } self._pipelines = pipelines @@ -46,7 +53,7 @@ def __init__(self, pipelines: List[ApiPipelineExtended]=None, total_size: int=No self._next_page_token = next_page_token @classmethod - def from_dict(cls, dikt) -> 'ApiListPipelinesResponse': + def from_dict(cls, dikt) -> "ApiListPipelinesResponse": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_metadata.py b/api/server/swagger_server/models/api_metadata.py index 2d4a11b6..fed24dd5 100644 --- a/api/server/swagger_server/models/api_metadata.py +++ b/api/server/swagger_server/models/api_metadata.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiMetadata(Model): @@ -18,7 +18,12 @@ class ApiMetadata(Model): Do not edit the class manually. """ - def __init__(self, annotations: Dict[str, str]=None, labels: Dict[str, str]=None, tags: List[str]=None): # noqa: E501 + def __init__( + self, + annotations: Dict[str, str] = None, + labels: Dict[str, str] = None, + tags: List[str] = None, + ): # noqa: E501 """ApiMetadata - a model defined in Swagger :param annotations: The annotations of this ApiMetadata. # noqa: E501 @@ -29,15 +34,15 @@ def __init__(self, annotations: Dict[str, str]=None, labels: Dict[str, str]=None :type tags: List[str] """ self.swagger_types = { - 'annotations': Dict[str, str], - 'labels': Dict[str, str], - 'tags': List[str] + "annotations": Dict[str, str], + "labels": Dict[str, str], + "tags": List[str], } self.attribute_map = { - 'annotations': 'annotations', - 'labels': 'labels', - 'tags': 'tags' + "annotations": "annotations", + "labels": "labels", + "tags": "tags", } self._annotations = annotations @@ -45,7 +50,7 @@ def __init__(self, annotations: Dict[str, str]=None, labels: Dict[str, str]=None self._tags = tags @classmethod - def from_dict(cls, dikt) -> 'ApiMetadata': + def from_dict(cls, dikt) -> "ApiMetadata": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_model.py b/api/server/swagger_server/models/api_model.py index 1061151b..5b7b0bff 100644 --- a/api/server/swagger_server/models/api_model.py +++ b/api/server/swagger_server/models/api_model.py @@ -9,9 +9,11 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.api_asset import ApiAsset -from swagger_server.models.api_model_framework import ApiModelFramework # noqa: F401,E501 +from swagger_server.models.api_model_framework import ( + ApiModelFramework, # noqa: F401 +) from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiModel(ApiAsset): @@ -20,7 +22,28 @@ class ApiModel(ApiAsset): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None, domain: str=None, labels: Dict[str, str]=None, framework: ApiModelFramework=None, trainable: bool=None, trainable_tested_platforms: List[str]=None, trainable_credentials_required: bool=None, trainable_parameters: List[ApiParameter]=None, servable: bool=None, servable_tested_platforms: List[str]=None, servable_credentials_required: bool=None, servable_parameters: List[ApiParameter]=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + featured: bool = None, + publish_approved: bool = None, + related_assets: List[str] = None, + filter_categories: Dict[str, str] = None, + domain: str = None, + labels: Dict[str, str] = None, + framework: ApiModelFramework = None, + trainable: bool = None, + trainable_tested_platforms: List[str] = None, + trainable_credentials_required: bool = None, + trainable_parameters: List[ApiParameter] = None, + servable: bool = None, + servable_tested_platforms: List[str] = None, + servable_credentials_required: bool = None, + servable_parameters: List[ApiParameter] = None, + ): # noqa: E501 """ApiModel - a model defined in Swagger :param id: The id of this ApiModel. # noqa: E501 @@ -63,47 +86,47 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type servable_parameters: List[ApiParameter] """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'featured': bool, - 'publish_approved': bool, - 'related_assets': List[str], - 'filter_categories': Dict[str, str], - 'domain': str, - 'labels': Dict[str, str], - 'framework': ApiModelFramework, - 'trainable': bool, - 'trainable_tested_platforms': List[str], - 'trainable_credentials_required': bool, - 'trainable_parameters': List[ApiParameter], - 'servable': bool, - 'servable_tested_platforms': List[str], - 'servable_credentials_required': bool, - 'servable_parameters': List[ApiParameter] + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "featured": bool, + "publish_approved": bool, + "related_assets": List[str], + "filter_categories": Dict[str, str], + "domain": str, + "labels": Dict[str, str], + "framework": ApiModelFramework, + "trainable": bool, + "trainable_tested_platforms": List[str], + "trainable_credentials_required": bool, + "trainable_parameters": List[ApiParameter], + "servable": bool, + "servable_tested_platforms": List[str], + "servable_credentials_required": bool, + "servable_parameters": List[ApiParameter], } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'domain': 'domain', - 'labels': 'labels', - 'framework': 'framework', - 'trainable': 'trainable', - 'trainable_tested_platforms': 'trainable_tested_platforms', - 'trainable_credentials_required': 'trainable_credentials_required', - 'trainable_parameters': 'trainable_parameters', - 'servable': 'servable', - 'servable_tested_platforms': 'servable_tested_platforms', - 'servable_credentials_required': 'servable_credentials_required', - 'servable_parameters': 'servable_parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "domain": "domain", + "labels": "labels", + "framework": "framework", + "trainable": "trainable", + "trainable_tested_platforms": "trainable_tested_platforms", + "trainable_credentials_required": "trainable_credentials_required", + "trainable_parameters": "trainable_parameters", + "servable": "servable", + "servable_tested_platforms": "servable_tested_platforms", + "servable_credentials_required": "servable_credentials_required", + "servable_parameters": "servable_parameters", } self._id = id @@ -127,7 +150,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._servable_parameters = servable_parameters @classmethod - def from_dict(cls, dikt) -> 'ApiModel': + def from_dict(cls, dikt) -> "ApiModel": """Returns the dict as a model :param dikt: A dict. @@ -198,7 +221,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -221,7 +246,9 @@ def description(self, description: str): :type description: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -370,7 +397,9 @@ def framework(self, framework: ApiModelFramework): :type framework: ApiModelFramework """ if framework is None: - raise ValueError("Invalid value for `framework`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `framework`, must not be `None`" + ) self._framework = framework diff --git a/api/server/swagger_server/models/api_model_framework.py b/api/server/swagger_server/models/api_model_framework.py index cae4bd4d..91abbd35 100644 --- a/api/server/swagger_server/models/api_model_framework.py +++ b/api/server/swagger_server/models/api_model_framework.py @@ -9,8 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_model_framework_runtimes import ApiModelFrameworkRuntimes # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_model_framework_runtimes import ( + ApiModelFrameworkRuntimes, +) +from swagger_server import util # noqa: F401 class ApiModelFramework(Model): @@ -19,7 +21,12 @@ class ApiModelFramework(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, version: str=None, runtimes: ApiModelFrameworkRuntimes=None): # noqa: E501 + def __init__( + self, + name: str = None, + version: str = None, + runtimes: ApiModelFrameworkRuntimes = None, + ): # noqa: E501 """ApiModelFramework - a model defined in Swagger :param name: The name of this ApiModelFramework. # noqa: E501 @@ -30,15 +37,15 @@ def __init__(self, name: str=None, version: str=None, runtimes: ApiModelFramewor :type runtimes: ApiModelFrameworkRuntimes """ self.swagger_types = { - 'name': str, - 'version': str, - 'runtimes': ApiModelFrameworkRuntimes + "name": str, + "version": str, + "runtimes": ApiModelFrameworkRuntimes, } self.attribute_map = { - 'name': 'name', - 'version': 'version', - 'runtimes': 'runtimes' + "name": "name", + "version": "version", + "runtimes": "runtimes", } self._name = name @@ -46,7 +53,7 @@ def __init__(self, name: str=None, version: str=None, runtimes: ApiModelFramewor self._runtimes = runtimes @classmethod - def from_dict(cls, dikt) -> 'ApiModelFramework': + def from_dict(cls, dikt) -> "ApiModelFramework": """Returns the dict as a model :param dikt: A dict. @@ -75,7 +82,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name diff --git a/api/server/swagger_server/models/api_model_framework_runtimes.py b/api/server/swagger_server/models/api_model_framework_runtimes.py index 6293929e..dedb227f 100644 --- a/api/server/swagger_server/models/api_model_framework_runtimes.py +++ b/api/server/swagger_server/models/api_model_framework_runtimes.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiModelFrameworkRuntimes(Model): @@ -18,7 +18,7 @@ class ApiModelFrameworkRuntimes(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, version: str=None): # noqa: E501 + def __init__(self, name: str = None, version: str = None): # noqa: E501 """ApiModelFrameworkRuntimes - a model defined in Swagger :param name: The name of this ApiModelFrameworkRuntimes. # noqa: E501 @@ -26,21 +26,15 @@ def __init__(self, name: str=None, version: str=None): # noqa: E501 :param version: The version of this ApiModelFrameworkRuntimes. # noqa: E501 :type version: str """ - self.swagger_types = { - 'name': str, - 'version': str - } + self.swagger_types = {"name": str, "version": str} - self.attribute_map = { - 'name': 'name', - 'version': 'version' - } + self.attribute_map = {"name": "name", "version": "version"} self._name = name self._version = version @classmethod - def from_dict(cls, dikt) -> 'ApiModelFrameworkRuntimes': + def from_dict(cls, dikt) -> "ApiModelFrameworkRuntimes": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_model_script.py b/api/server/swagger_server/models/api_model_script.py index 91099a44..73158281 100644 --- a/api/server/swagger_server/models/api_model_script.py +++ b/api/server/swagger_server/models/api_model_script.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiModelScript(Model): @@ -18,7 +18,12 @@ class ApiModelScript(Model): Do not edit the class manually. """ - def __init__(self, pipeline_stage: str=None, execution_platform: str=None, script_code: str=None): # noqa: E501 + def __init__( + self, + pipeline_stage: str = None, + execution_platform: str = None, + script_code: str = None, + ): # noqa: E501 """ApiModelScript - a model defined in Swagger :param pipeline_stage: The pipeline_stage of this ApiModelScript. # noqa: E501 @@ -29,15 +34,15 @@ def __init__(self, pipeline_stage: str=None, execution_platform: str=None, scrip :type script_code: str """ self.swagger_types = { - 'pipeline_stage': str, - 'execution_platform': str, - 'script_code': str + "pipeline_stage": str, + "execution_platform": str, + "script_code": str, } self.attribute_map = { - 'pipeline_stage': 'pipeline_stage', - 'execution_platform': 'execution_platform', - 'script_code': 'script_code' + "pipeline_stage": "pipeline_stage", + "execution_platform": "execution_platform", + "script_code": "script_code", } self._pipeline_stage = pipeline_stage @@ -45,7 +50,7 @@ def __init__(self, pipeline_stage: str=None, execution_platform: str=None, scrip self._script_code = script_code @classmethod - def from_dict(cls, dikt) -> 'ApiModelScript': + def from_dict(cls, dikt) -> "ApiModelScript": """Returns the dict as a model :param dikt: A dict. @@ -76,7 +81,9 @@ def pipeline_stage(self, pipeline_stage: str): :type pipeline_stage: str """ if pipeline_stage is None: - raise ValueError("Invalid value for `pipeline_stage`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `pipeline_stage`, must not be `None`" + ) self._pipeline_stage = pipeline_stage @@ -101,7 +108,9 @@ def execution_platform(self, execution_platform: str): :type execution_platform: str """ if execution_platform is None: - raise ValueError("Invalid value for `execution_platform`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `execution_platform`, must not be `None`" + ) self._execution_platform = execution_platform @@ -126,6 +135,8 @@ def script_code(self, script_code: str): :type script_code: str """ if script_code is None: - raise ValueError("Invalid value for `script_code`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `script_code`, must not be `None`" + ) self._script_code = script_code diff --git a/api/server/swagger_server/models/api_notebook.py b/api/server/swagger_server/models/api_notebook.py index 48386008..683201e9 100644 --- a/api/server/swagger_server/models/api_notebook.py +++ b/api/server/swagger_server/models/api_notebook.py @@ -11,7 +11,7 @@ from swagger_server.models.api_asset import ApiAsset from swagger_server.models.api_metadata import ApiMetadata # noqa: F401,E501 from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiNotebook(ApiAsset): @@ -20,7 +20,20 @@ class ApiNotebook(ApiAsset): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None, url: str=None, metadata: ApiMetadata=None, parameters: List[ApiParameter]=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + featured: bool = None, + publish_approved: bool = None, + related_assets: List[str] = None, + filter_categories: Dict[str, str] = None, + url: str = None, + metadata: ApiMetadata = None, + parameters: List[ApiParameter] = None, + ): # noqa: E501 """ApiNotebook - a model defined in Swagger :param id: The id of this ApiNotebook. # noqa: E501 @@ -47,31 +60,31 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type parameters: List[ApiParameter] """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'featured': bool, - 'publish_approved': bool, - 'related_assets': List[str], - 'filter_categories': Dict[str, str], - 'url': str, - 'metadata': ApiMetadata, - 'parameters': List[ApiParameter] + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "featured": bool, + "publish_approved": bool, + "related_assets": List[str], + "filter_categories": Dict[str, str], + "url": str, + "metadata": ApiMetadata, + "parameters": List[ApiParameter], } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'featured': 'featured', - 'publish_approved': 'publish_approved', - 'related_assets': 'related_assets', - 'filter_categories': 'filter_categories', - 'url': 'url', - 'metadata': 'metadata', - 'parameters': 'parameters' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "featured": "featured", + "publish_approved": "publish_approved", + "related_assets": "related_assets", + "filter_categories": "filter_categories", + "url": "url", + "metadata": "metadata", + "parameters": "parameters", } self._id = id @@ -87,7 +100,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._parameters = parameters @classmethod - def from_dict(cls, dikt) -> 'ApiNotebook': + def from_dict(cls, dikt) -> "ApiNotebook": """Returns the dict as a model :param dikt: A dict. @@ -158,7 +171,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name @@ -181,7 +196,9 @@ def description(self, description: str): :type description: str """ if description is None: - raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `description`, must not be `None`" + ) self._description = description @@ -290,7 +307,9 @@ def url(self, url: str): :type url: str """ if url is None: - raise ValueError("Invalid value for `url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `url`, must not be `None`" + ) self._url = url diff --git a/api/server/swagger_server/models/api_parameter.py b/api/server/swagger_server/models/api_parameter.py index 9f8de072..62fe0a85 100644 --- a/api/server/swagger_server/models/api_parameter.py +++ b/api/server/swagger_server/models/api_parameter.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.any_value import AnyValue # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiParameter(Model): @@ -19,7 +19,13 @@ class ApiParameter(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, description: str=None, default: AnyValue=None, value: AnyValue=None): # noqa: E501 + def __init__( + self, + name: str = None, + description: str = None, + default: AnyValue = None, + value: AnyValue = None, + ): # noqa: E501 """ApiParameter - a model defined in Swagger :param name: The name of this ApiParameter. # noqa: E501 @@ -32,17 +38,17 @@ def __init__(self, name: str=None, description: str=None, default: AnyValue=None :type value: AnyValue """ self.swagger_types = { - 'name': str, - 'description': str, - 'default': AnyValue, - 'value': AnyValue + "name": str, + "description": str, + "default": AnyValue, + "value": AnyValue, } self.attribute_map = { - 'name': 'name', - 'description': 'description', - 'default': 'default', - 'value': 'value' + "name": "name", + "description": "description", + "default": "default", + "value": "value", } self._name = name @@ -51,7 +57,7 @@ def __init__(self, name: str=None, description: str=None, default: AnyValue=None self._value = value @classmethod - def from_dict(cls, dikt) -> 'ApiParameter': + def from_dict(cls, dikt) -> "ApiParameter": """Returns the dict as a model :param dikt: A dict. @@ -80,7 +86,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name diff --git a/api/server/swagger_server/models/api_pipeline.py b/api/server/swagger_server/models/api_pipeline.py index 9121d601..b77c3227 100644 --- a/api/server/swagger_server/models/api_pipeline.py +++ b/api/server/swagger_server/models/api_pipeline.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipeline(Model): @@ -19,7 +19,17 @@ class ApiPipeline(Model): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, parameters: List[ApiParameter]=None, status: str=None, default_version_id: str=None, namespace: str=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + parameters: List[ApiParameter] = None, + status: str = None, + default_version_id: str = None, + namespace: str = None, + ): # noqa: E501 """ApiPipeline - a model defined in Swagger :param id: The id of this ApiPipeline. # noqa: E501 @@ -40,25 +50,25 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type namespace: str """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'parameters': List[ApiParameter], - 'status': str, - 'default_version_id': str, - 'namespace': str + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "parameters": List[ApiParameter], + "status": str, + "default_version_id": str, + "namespace": str, } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'parameters': 'parameters', - 'status': 'status', - 'default_version_id': 'default_version_id', - 'namespace': 'namespace' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "parameters": "parameters", + "status": "status", + "default_version_id": "default_version_id", + "namespace": "namespace", } self._id = id @@ -71,7 +81,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._namespace = namespace @classmethod - def from_dict(cls, dikt) -> 'ApiPipeline': + def from_dict(cls, dikt) -> "ApiPipeline": """Returns the dict as a model :param dikt: A dict. @@ -213,7 +223,8 @@ def status(self, status: str): def default_version_id(self) -> str: """Gets the default_version_id of this ApiPipeline. - The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + The default version of the pipeline. As of now, the latest version is used as default. # noqa: E501 + (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 :return: The default_version_id of this ApiPipeline. :rtype: str @@ -224,7 +235,8 @@ def default_version_id(self) -> str: def default_version_id(self, default_version_id: str): """Sets the default_version_id of this ApiPipeline. - The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + The default version of the pipeline. As of now, the latest version is used as default. # noqa: E501 + (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 :param default_version_id: The default_version_id of this ApiPipeline. :type default_version_id: str diff --git a/api/server/swagger_server/models/api_pipeline_custom.py b/api/server/swagger_server/models/api_pipeline_custom.py index e6841e5b..ddf58051 100644 --- a/api/server/swagger_server/models/api_pipeline_custom.py +++ b/api/server/swagger_server/models/api_pipeline_custom.py @@ -10,8 +10,10 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_pipeline_dag import ApiPipelineDAG # noqa: F401,E501 -from swagger_server.models.api_pipeline_inputs import ApiPipelineInputs # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_pipeline_inputs import ( + ApiPipelineInputs, +) +from swagger_server import util # noqa: F401 class ApiPipelineCustom(Model): @@ -20,7 +22,13 @@ class ApiPipelineCustom(Model): Do not edit the class manually. """ - def __init__(self, dag: ApiPipelineDAG=None, inputs: ApiPipelineInputs=None, name: str=None, description: str=None): # noqa: E501 + def __init__( + self, + dag: ApiPipelineDAG = None, + inputs: ApiPipelineInputs = None, + name: str = None, + description: str = None, + ): # noqa: E501 """ApiPipelineCustom - a model defined in Swagger :param dag: The dag of this ApiPipelineCustom. # noqa: E501 @@ -33,17 +41,17 @@ def __init__(self, dag: ApiPipelineDAG=None, inputs: ApiPipelineInputs=None, nam :type description: str """ self.swagger_types = { - 'dag': ApiPipelineDAG, - 'inputs': ApiPipelineInputs, - 'name': str, - 'description': str + "dag": ApiPipelineDAG, + "inputs": ApiPipelineInputs, + "name": str, + "description": str, } self.attribute_map = { - 'dag': 'dag', - 'inputs': 'inputs', - 'name': 'name', - 'description': 'description' + "dag": "dag", + "inputs": "inputs", + "name": "name", + "description": "description", } self._dag = dag @@ -52,7 +60,7 @@ def __init__(self, dag: ApiPipelineDAG=None, inputs: ApiPipelineInputs=None, nam self._description = description @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineCustom': + def from_dict(cls, dikt) -> "ApiPipelineCustom": """Returns the dict as a model :param dikt: A dict. @@ -81,7 +89,9 @@ def dag(self, dag: ApiPipelineDAG): :type dag: ApiPipelineDAG """ if dag is None: - raise ValueError("Invalid value for `dag`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `dag`, must not be `None`" + ) self._dag = dag @@ -127,7 +137,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name diff --git a/api/server/swagger_server/models/api_pipeline_custom_run_payload.py b/api/server/swagger_server/models/api_pipeline_custom_run_payload.py index 6aadd22d..d5081ab4 100644 --- a/api/server/swagger_server/models/api_pipeline_custom_run_payload.py +++ b/api/server/swagger_server/models/api_pipeline_custom_run_payload.py @@ -9,9 +9,11 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_pipeline_custom import ApiPipelineCustom # noqa: F401,E501 +from swagger_server.models.api_pipeline_custom import ( + ApiPipelineCustom, +) from swagger_server.models.dictionary import Dictionary # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipelineCustomRunPayload(Model): @@ -20,7 +22,11 @@ class ApiPipelineCustomRunPayload(Model): Do not edit the class manually. """ - def __init__(self, custom_pipeline: ApiPipelineCustom=None, run_parameters: Dictionary=None): # noqa: E501 + def __init__( + self, + custom_pipeline: ApiPipelineCustom = None, + run_parameters: Dictionary = None, + ): # noqa: E501 """ApiPipelineCustomRunPayload - a model defined in Swagger :param custom_pipeline: The custom_pipeline of this ApiPipelineCustomRunPayload. # noqa: E501 @@ -29,20 +35,20 @@ def __init__(self, custom_pipeline: ApiPipelineCustom=None, run_parameters: Dict :type run_parameters: Dictionary """ self.swagger_types = { - 'custom_pipeline': ApiPipelineCustom, - 'run_parameters': Dictionary + "custom_pipeline": ApiPipelineCustom, + "run_parameters": Dictionary, } self.attribute_map = { - 'custom_pipeline': 'custom_pipeline', - 'run_parameters': 'run_parameters' + "custom_pipeline": "custom_pipeline", + "run_parameters": "run_parameters", } self._custom_pipeline = custom_pipeline self._run_parameters = run_parameters @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineCustomRunPayload': + def from_dict(cls, dikt) -> "ApiPipelineCustomRunPayload": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_pipeline_dag.py b/api/server/swagger_server/models/api_pipeline_dag.py index 7aef7a89..2f40b963 100644 --- a/api/server/swagger_server/models/api_pipeline_dag.py +++ b/api/server/swagger_server/models/api_pipeline_dag.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_pipeline_task import ApiPipelineTask # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipelineDAG(Model): @@ -19,24 +19,20 @@ class ApiPipelineDAG(Model): Do not edit the class manually. """ - def __init__(self, tasks: List[ApiPipelineTask]=None): # noqa: E501 + def __init__(self, tasks: List[ApiPipelineTask] = None): # noqa: E501 """ApiPipelineDAG - a model defined in Swagger :param tasks: The tasks of this ApiPipelineDAG. # noqa: E501 :type tasks: List[ApiPipelineTask] """ - self.swagger_types = { - 'tasks': List[ApiPipelineTask] - } + self.swagger_types = {"tasks": List[ApiPipelineTask]} - self.attribute_map = { - 'tasks': 'tasks' - } + self.attribute_map = {"tasks": "tasks"} self._tasks = tasks @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineDAG': + def from_dict(cls, dikt) -> "ApiPipelineDAG": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_pipeline_extended.py b/api/server/swagger_server/models/api_pipeline_extended.py index 63b5c6b5..95c7aec3 100644 --- a/api/server/swagger_server/models/api_pipeline_extended.py +++ b/api/server/swagger_server/models/api_pipeline_extended.py @@ -8,11 +8,13 @@ from typing import List, Dict # noqa: F401 -from swagger_server.models.base_model_ import Model +from swagger_server.models.base_model_ import Model # noqa: F401 from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 from swagger_server.models.api_pipeline import ApiPipeline # noqa: F401,E501 -from swagger_server.models.api_pipeline_extension import ApiPipelineExtension # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_pipeline_extension import ( + ApiPipelineExtension, +) +from swagger_server import util # noqa: F401 class ApiPipelineExtended(ApiPipeline, ApiPipelineExtension): @@ -21,7 +23,20 @@ class ApiPipelineExtended(ApiPipeline, ApiPipelineExtension): Do not edit the class manually. """ - def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, parameters: List[ApiParameter]=None, status: str=None, default_version_id: str=None, namespace: str=None, annotations: Dict[str, str]=None, featured: bool=None, publish_approved: bool=None): # noqa: E501 + def __init__( + self, + id: str = None, + created_at: datetime = None, + name: str = None, + description: str = None, + parameters: List[ApiParameter] = None, + status: str = None, + default_version_id: str = None, + namespace: str = None, + annotations: Dict[str, str] = None, + featured: bool = None, + publish_approved: bool = None, + ): # noqa: E501 """ApiPipelineExtended - a model defined in Swagger :param id: The id of this ApiPipelineExtended. # noqa: E501 @@ -48,31 +63,31 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc :type publish_approved: bool """ self.swagger_types = { - 'id': str, - 'created_at': datetime, - 'name': str, - 'description': str, - 'parameters': List[ApiParameter], - 'status': str, - 'default_version_id': str, - 'namespace': str, - 'annotations': Dict[str, str], - 'featured': bool, - 'publish_approved': bool + "id": str, + "created_at": datetime, + "name": str, + "description": str, + "parameters": List[ApiParameter], + "status": str, + "default_version_id": str, + "namespace": str, + "annotations": Dict[str, str], + "featured": bool, + "publish_approved": bool, } self.attribute_map = { - 'id': 'id', - 'created_at': 'created_at', - 'name': 'name', - 'description': 'description', - 'parameters': 'parameters', - 'status': 'status', - 'default_version_id': 'default_version_id', - 'namespace': 'namespace', - 'annotations': 'annotations', - 'featured': 'featured', - 'publish_approved': 'publish_approved' + "id": "id", + "created_at": "created_at", + "name": "name", + "description": "description", + "parameters": "parameters", + "status": "status", + "default_version_id": "default_version_id", + "namespace": "namespace", + "annotations": "annotations", + "featured": "featured", + "publish_approved": "publish_approved", } self._id = id @@ -88,7 +103,7 @@ def __init__(self, id: str=None, created_at: datetime=None, name: str=None, desc self._publish_approved = publish_approved @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineExtended': + def from_dict(cls, dikt) -> "ApiPipelineExtended": """Returns the dict as a model :param dikt: A dict. @@ -230,7 +245,8 @@ def from_dict(cls, dikt) -> 'ApiPipelineExtended': # def default_version_id(self) -> str: # """Gets the default_version_id of this ApiPipelineExtended. # - # The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + # The default version of the pipeline. As of now, the latest version is used as default. # noqa: E501 + # (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 # # :return: The default_version_id of this ApiPipelineExtended. # :rtype: str @@ -241,7 +257,8 @@ def from_dict(cls, dikt) -> 'ApiPipelineExtended': # def default_version_id(self, default_version_id: str): # """Sets the default_version_id of this ApiPipelineExtended. # - # The default version of the pipeline. As of now, the latest version is used as default. (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 + # The default version of the pipeline. As of now, the latest version is used as default. # noqa: E501 + # (In the future, if desired by customers, we can allow them to set default version.) # noqa: E501 # # :param default_version_id: The default_version_id of this ApiPipelineExtended. # :type default_version_id: str diff --git a/api/server/swagger_server/models/api_pipeline_extension.py b/api/server/swagger_server/models/api_pipeline_extension.py index d0d9c26c..6e0307bf 100644 --- a/api/server/swagger_server/models/api_pipeline_extension.py +++ b/api/server/swagger_server/models/api_pipeline_extension.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipelineExtension(Model): @@ -18,7 +18,13 @@ class ApiPipelineExtension(Model): Do not edit the class manually. """ - def __init__(self, id: str=None, annotations: Dict[str, str]=None, featured: bool=None, publish_approved: bool=None): # noqa: E501 + def __init__( + self, + id: str = None, + annotations: Dict[str, str] = None, + featured: bool = None, + publish_approved: bool = None, + ): # noqa: E501 """ApiPipelineExtension - a model defined in Swagger :param id: The id of this ApiPipelineExtension. # noqa: E501 @@ -31,17 +37,17 @@ def __init__(self, id: str=None, annotations: Dict[str, str]=None, featured: boo :type publish_approved: bool """ self.swagger_types = { - 'id': str, - 'annotations': Dict[str, str], - 'featured': bool, - 'publish_approved': bool + "id": str, + "annotations": Dict[str, str], + "featured": bool, + "publish_approved": bool, } self.attribute_map = { - 'id': 'id', - 'annotations': 'annotations', - 'featured': 'featured', - 'publish_approved': 'publish_approved' + "id": "id", + "annotations": "annotations", + "featured": "featured", + "publish_approved": "publish_approved", } self._id = id @@ -50,7 +56,7 @@ def __init__(self, id: str=None, annotations: Dict[str, str]=None, featured: boo self._publish_approved = publish_approved @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineExtension': + def from_dict(cls, dikt) -> "ApiPipelineExtension": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_pipeline_inputs.py b/api/server/swagger_server/models/api_pipeline_inputs.py index 1a437471..abeb988f 100644 --- a/api/server/swagger_server/models/api_pipeline_inputs.py +++ b/api/server/swagger_server/models/api_pipeline_inputs.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipelineInputs(Model): @@ -19,24 +19,20 @@ class ApiPipelineInputs(Model): Do not edit the class manually. """ - def __init__(self, parameters: List[ApiParameter]=None): # noqa: E501 + def __init__(self, parameters: List[ApiParameter] = None): # noqa: E501 """ApiPipelineInputs - a model defined in Swagger :param parameters: The parameters of this ApiPipelineInputs. # noqa: E501 :type parameters: List[ApiParameter] """ - self.swagger_types = { - 'parameters': List[ApiParameter] - } + self.swagger_types = {"parameters": List[ApiParameter]} - self.attribute_map = { - 'parameters': 'parameters' - } + self.attribute_map = {"parameters": "parameters"} self._parameters = parameters @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineInputs': + def from_dict(cls, dikt) -> "ApiPipelineInputs": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_pipeline_task.py b/api/server/swagger_server/models/api_pipeline_task.py index 8fd42228..d805df03 100644 --- a/api/server/swagger_server/models/api_pipeline_task.py +++ b/api/server/swagger_server/models/api_pipeline_task.py @@ -9,8 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_pipeline_task_arguments import ApiPipelineTaskArguments # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_pipeline_task_arguments import ( + ApiPipelineTaskArguments, +) +from swagger_server import util # noqa: F401 class ApiPipelineTask(Model): @@ -19,7 +21,14 @@ class ApiPipelineTask(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, artifact_type: str=None, artifact_id: str=None, arguments: ApiPipelineTaskArguments=None, dependencies: List[str]=None): # noqa: E501 + def __init__( + self, + name: str = None, + artifact_type: str = None, + artifact_id: str = None, + arguments: ApiPipelineTaskArguments = None, + dependencies: List[str] = None, + ): # noqa: E501 """ApiPipelineTask - a model defined in Swagger :param name: The name of this ApiPipelineTask. # noqa: E501 @@ -34,19 +43,19 @@ def __init__(self, name: str=None, artifact_type: str=None, artifact_id: str=Non :type dependencies: List[str] """ self.swagger_types = { - 'name': str, - 'artifact_type': str, - 'artifact_id': str, - 'arguments': ApiPipelineTaskArguments, - 'dependencies': List[str] + "name": str, + "artifact_type": str, + "artifact_id": str, + "arguments": ApiPipelineTaskArguments, + "dependencies": List[str], } self.attribute_map = { - 'name': 'name', - 'artifact_type': 'artifact_type', - 'artifact_id': 'artifact_id', - 'arguments': 'arguments', - 'dependencies': 'dependencies' + "name": "name", + "artifact_type": "artifact_type", + "artifact_id": "artifact_id", + "arguments": "arguments", + "dependencies": "dependencies", } self._name = name @@ -56,7 +65,7 @@ def __init__(self, name: str=None, artifact_type: str=None, artifact_id: str=Non self._dependencies = dependencies @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineTask': + def from_dict(cls, dikt) -> "ApiPipelineTask": """Returns the dict as a model :param dikt: A dict. @@ -108,7 +117,9 @@ def artifact_type(self, artifact_type: str): :type artifact_type: str """ if artifact_type is None: - raise ValueError("Invalid value for `artifact_type`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `artifact_type`, must not be `None`" + ) self._artifact_type = artifact_type @@ -133,7 +144,9 @@ def artifact_id(self, artifact_id: str): :type artifact_id: str """ if artifact_id is None: - raise ValueError("Invalid value for `artifact_id`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `artifact_id`, must not be `None`" + ) self._artifact_id = artifact_id diff --git a/api/server/swagger_server/models/api_pipeline_task_arguments.py b/api/server/swagger_server/models/api_pipeline_task_arguments.py index 321e6dcd..21a112d6 100644 --- a/api/server/swagger_server/models/api_pipeline_task_arguments.py +++ b/api/server/swagger_server/models/api_pipeline_task_arguments.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiPipelineTaskArguments(Model): @@ -19,24 +19,20 @@ class ApiPipelineTaskArguments(Model): Do not edit the class manually. """ - def __init__(self, parameters: List[ApiParameter]=None): # noqa: E501 + def __init__(self, parameters: List[ApiParameter] = None): # noqa: E501 """ApiPipelineTaskArguments - a model defined in Swagger :param parameters: The parameters of this ApiPipelineTaskArguments. # noqa: E501 :type parameters: List[ApiParameter] """ - self.swagger_types = { - 'parameters': List[ApiParameter] - } + self.swagger_types = {"parameters": List[ApiParameter]} - self.attribute_map = { - 'parameters': 'parameters' - } + self.attribute_map = {"parameters": "parameters"} self._parameters = parameters @classmethod - def from_dict(cls, dikt) -> 'ApiPipelineTaskArguments': + def from_dict(cls, dikt) -> "ApiPipelineTaskArguments": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_run_code_response.py b/api/server/swagger_server/models/api_run_code_response.py index 3437a982..33e77bb7 100644 --- a/api/server/swagger_server/models/api_run_code_response.py +++ b/api/server/swagger_server/models/api_run_code_response.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiRunCodeResponse(Model): @@ -18,7 +18,9 @@ class ApiRunCodeResponse(Model): Do not edit the class manually. """ - def __init__(self, run_url: str=None, run_output_location: str=None): # noqa: E501 + def __init__( + self, run_url: str = None, run_output_location: str = None + ): # noqa: E501 """ApiRunCodeResponse - a model defined in Swagger :param run_url: The run_url of this ApiRunCodeResponse. # noqa: E501 @@ -26,21 +28,18 @@ def __init__(self, run_url: str=None, run_output_location: str=None): # noqa: E :param run_output_location: The run_output_location of this ApiRunCodeResponse. # noqa: E501 :type run_output_location: str """ - self.swagger_types = { - 'run_url': str, - 'run_output_location': str - } + self.swagger_types = {"run_url": str, "run_output_location": str} self.attribute_map = { - 'run_url': 'run_url', - 'run_output_location': 'run_output_location' + "run_url": "run_url", + "run_output_location": "run_output_location", } self._run_url = run_url self._run_output_location = run_output_location @classmethod - def from_dict(cls, dikt) -> 'ApiRunCodeResponse': + def from_dict(cls, dikt) -> "ApiRunCodeResponse": """Returns the dict as a model :param dikt: A dict. @@ -71,7 +70,9 @@ def run_url(self, run_url: str): :type run_url: str """ if run_url is None: - raise ValueError("Invalid value for `run_url`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `run_url`, must not be `None`" + ) self._run_url = run_url diff --git a/api/server/swagger_server/models/api_settings.py b/api/server/swagger_server/models/api_settings.py index ad1dffec..f6db5587 100644 --- a/api/server/swagger_server/models/api_settings.py +++ b/api/server/swagger_server/models/api_settings.py @@ -9,8 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server.models.api_settings_section import ApiSettingsSection # noqa: F401,E501 -from swagger_server import util +from swagger_server.models.api_settings_section import ( # noqa: F401 + ApiSettingsSection, +) +from swagger_server import util # noqa: F401 class ApiSettings(Model): @@ -19,24 +21,20 @@ class ApiSettings(Model): Do not edit the class manually. """ - def __init__(self, sections: List[ApiSettingsSection]=None): # noqa: E501 + def __init__(self, sections: List[ApiSettingsSection] = None): # noqa: E501 """ApiSettings - a model defined in Swagger :param sections: The sections of this ApiSettings. # noqa: E501 :type sections: List[ApiSettingsSection] """ - self.swagger_types = { - 'sections': List[ApiSettingsSection] - } + self.swagger_types = {"sections": List[ApiSettingsSection]} - self.attribute_map = { - 'sections': 'sections' - } + self.attribute_map = {"sections": "sections"} self._sections = sections @classmethod - def from_dict(cls, dikt) -> 'ApiSettings': + def from_dict(cls, dikt) -> "ApiSettings": """Returns the dict as a model :param dikt: A dict. @@ -67,6 +65,8 @@ def sections(self, sections: List[ApiSettingsSection]): :type sections: List[ApiSettingsSection] """ if sections is None: - raise ValueError("Invalid value for `sections`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `sections`, must not be `None`" + ) self._sections = sections diff --git a/api/server/swagger_server/models/api_settings_section.py b/api/server/swagger_server/models/api_settings_section.py index aeaec9b1..1ed40b6c 100644 --- a/api/server/swagger_server/models/api_settings_section.py +++ b/api/server/swagger_server/models/api_settings_section.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.api_parameter import ApiParameter # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiSettingsSection(Model): @@ -19,7 +19,12 @@ class ApiSettingsSection(Model): Do not edit the class manually. """ - def __init__(self, name: str=None, description: str=None, settings: List[ApiParameter]=None): # noqa: E501 + def __init__( + self, + name: str = None, + description: str = None, + settings: List[ApiParameter] = None, + ): # noqa: E501 """ApiSettingsSection - a model defined in Swagger :param name: The name of this ApiSettingsSection. # noqa: E501 @@ -30,15 +35,15 @@ def __init__(self, name: str=None, description: str=None, settings: List[ApiPara :type settings: List[ApiParameter] """ self.swagger_types = { - 'name': str, - 'description': str, - 'settings': List[ApiParameter] + "name": str, + "description": str, + "settings": List[ApiParameter], } self.attribute_map = { - 'name': 'name', - 'description': 'description', - 'settings': 'settings' + "name": "name", + "description": "description", + "settings": "settings", } self._name = name @@ -46,7 +51,7 @@ def __init__(self, name: str=None, description: str=None, settings: List[ApiPara self._settings = settings @classmethod - def from_dict(cls, dikt) -> 'ApiSettingsSection': + def from_dict(cls, dikt) -> "ApiSettingsSection": """Returns the dict as a model :param dikt: A dict. @@ -77,7 +82,9 @@ def name(self, name: str): :type name: str """ if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + raise ValueError( + "Invalid value for `name`, must not be `None`" + ) self._name = name diff --git a/api/server/swagger_server/models/api_status.py b/api/server/swagger_server/models/api_status.py index 2a6dda2d..4c57c32a 100644 --- a/api/server/swagger_server/models/api_status.py +++ b/api/server/swagger_server/models/api_status.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.protobuf_any import ProtobufAny # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiStatus(Model): @@ -19,7 +19,9 @@ class ApiStatus(Model): Do not edit the class manually. """ - def __init__(self, error: str=None, code: int=None, details: List[ProtobufAny]=None): # noqa: E501 + def __init__( + self, error: str = None, code: int = None, details: List[ProtobufAny] = None + ): # noqa: E501 """ApiStatus - a model defined in Swagger :param error: The error of this ApiStatus. # noqa: E501 @@ -29,24 +31,16 @@ def __init__(self, error: str=None, code: int=None, details: List[ProtobufAny]=N :param details: The details of this ApiStatus. # noqa: E501 :type details: List[ProtobufAny] """ - self.swagger_types = { - 'error': str, - 'code': int, - 'details': List[ProtobufAny] - } - - self.attribute_map = { - 'error': 'error', - 'code': 'code', - 'details': 'details' - } + self.swagger_types = {"error": str, "code": int, "details": List[ProtobufAny]} + + self.attribute_map = {"error": "error", "code": "code", "details": "details"} self._error = error self._code = code self._details = details @classmethod - def from_dict(cls, dikt) -> 'ApiStatus': + def from_dict(cls, dikt) -> "ApiStatus": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/api_url.py b/api/server/swagger_server/models/api_url.py index 71d64c33..f67d8f8b 100644 --- a/api/server/swagger_server/models/api_url.py +++ b/api/server/swagger_server/models/api_url.py @@ -9,7 +9,7 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model -from swagger_server import util +from swagger_server import util # noqa: F401 class ApiUrl(Model): @@ -18,24 +18,20 @@ class ApiUrl(Model): Do not edit the class manually. """ - def __init__(self, pipeline_url: str=None): # noqa: E501 + def __init__(self, pipeline_url: str = None): # noqa: E501 """ApiUrl - a model defined in Swagger :param pipeline_url: The pipeline_url of this ApiUrl. # noqa: E501 :type pipeline_url: str """ - self.swagger_types = { - 'pipeline_url': str - } + self.swagger_types = {"pipeline_url": str} - self.attribute_map = { - 'pipeline_url': 'pipeline_url' - } + self.attribute_map = {"pipeline_url": "pipeline_url"} self._pipeline_url = pipeline_url @classmethod - def from_dict(cls, dikt) -> 'ApiUrl': + def from_dict(cls, dikt) -> "ApiUrl": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/base_model_.py b/api/server/swagger_server/models/base_model_.py index 6e3d52c5..f4b06491 100644 --- a/api/server/swagger_server/models/base_model_.py +++ b/api/server/swagger_server/models/base_model_.py @@ -1,15 +1,15 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 -import pprint +# +# SPDX-License-Identifier: Apache-2.0 +import pprint # noqa: F401 -import six +import six # noqa: F401 import typing from datetime import datetime -from swagger_server import util +from swagger_server import util # noqa: F401 -T = typing.TypeVar('T') +T = typing.TypeVar("T") class Model(object): @@ -36,18 +36,20 @@ def to_dict(self): for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) + result[attr] = list( + map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) + ) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") + else item, + value.items(), + ) + ) elif isinstance(value, datetime): result[attr] = value.isoformat() else: diff --git a/api/server/swagger_server/models/dictionary.py b/api/server/swagger_server/models/dictionary.py index 652bea8b..7439ee7b 100644 --- a/api/server/swagger_server/models/dictionary.py +++ b/api/server/swagger_server/models/dictionary.py @@ -10,7 +10,7 @@ from swagger_server.models.base_model_ import Model from swagger_server.models.any_value import AnyValue # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class Dictionary(Model): @@ -20,17 +20,13 @@ class Dictionary(Model): """ def __init__(self): # noqa: E501 - """Dictionary - a model defined in Swagger + """Dictionary - a model defined in Swagger""" + self.swagger_types = {} - """ - self.swagger_types = { - } - - self.attribute_map = { - } + self.attribute_map = {} @classmethod - def from_dict(cls, dikt) -> 'Dictionary': + def from_dict(cls, dikt) -> "Dictionary": """Returns the dict as a model :param dikt: A dict. diff --git a/api/server/swagger_server/models/protobuf_any.py b/api/server/swagger_server/models/protobuf_any.py index 437487cf..4b0ebf4d 100644 --- a/api/server/swagger_server/models/protobuf_any.py +++ b/api/server/swagger_server/models/protobuf_any.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import @@ -9,9 +9,10 @@ from typing import List, Dict # noqa: F401 from swagger_server.models.base_model_ import Model + # from swagger_server.models.byte_array import ByteArray # noqa: F401,E501 import re # noqa: F401,E501 -from swagger_server import util +from swagger_server import util # noqa: F401 class ProtobufAny(Model): @@ -20,7 +21,7 @@ class ProtobufAny(Model): Do not edit the class manually. """ - def __init__(self, type_url: str=None, value:object=None): # noqa: E501 + def __init__(self, type_url: str = None, value: object = None): # noqa: E501 """ProtobufAny - a model defined in Swagger :param type_url: The type_url of this ProtobufAny. # noqa: E501 @@ -28,21 +29,15 @@ def __init__(self, type_url: str=None, value:object=None): # noqa: E501 :param value: The value of this ProtobufAny. # noqa: E501 :type value: ByteArray """ - self.swagger_types = { - 'type_url': str, - 'value': object - } + self.swagger_types = {"type_url": str, "value": object} - self.attribute_map = { - 'type_url': 'type_url', - 'value': 'value' - } + self.attribute_map = {"type_url": "type_url", "value": "value"} self._type_url = type_url self._value = value @classmethod - def from_dict(cls, dikt) -> 'ProtobufAny': + def from_dict(cls, dikt) -> "ProtobufAny": """Returns the dict as a model :param dikt: A dict. @@ -95,7 +90,12 @@ def value(self, value: object): :param value: The value of this ProtobufAny. :type value: ByteArray """ - if value is not None and not re.search(r'^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$', value): # noqa: E501 - raise ValueError("Invalid value for `value`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") # noqa: E501 + if value is not None and not re.search( + r"^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$", + value, + ): # noqa: E501 + raise ValueError( + "Invalid value for `value`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`" # noqa: E501 + ) self._value = value diff --git a/api/server/swagger_server/test/__init__.py b/api/server/swagger_server/test/__init__.py index 798bcf62..8d5141ab 100644 --- a/api/server/swagger_server/test/__init__.py +++ b/api/server/swagger_server/test/__init__.py @@ -3,17 +3,16 @@ # SPDX-License-Identifier: Apache-2.0 import logging -import connexion +import connexion # noqa: F401 from flask_testing import TestCase from swagger_server.encoder import JSONEncoder class BaseTestCase(TestCase): - def create_app(self): - logging.getLogger('connexion.operation').setLevel('ERROR') - app = connexion.App(__name__, specification_dir='../swagger/') + logging.getLogger("connexion.operation").setLevel("ERROR") + app = connexion.App(__name__, specification_dir="../swagger/") app.app.json_encoder = JSONEncoder - app.add_api('swagger.yaml') + app.add_api("swagger.yaml") return app.app diff --git a/api/server/swagger_server/test/test_application_settings_controller.py b/api/server/swagger_server/test/test_application_settings_controller.py index 461d0a1e..ba5afd03 100644 --- a/api/server/swagger_server/test/test_application_settings_controller.py +++ b/api/server/swagger_server/test/test_application_settings_controller.py @@ -1,15 +1,15 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import -from flask import json -from six import BytesIO +from flask import json # noqa: F401 +from six import BytesIO # noqa: F401 from swagger_server.models.api_settings import ApiSettings # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.models.dictionary import Dictionary # noqa: E501 from swagger_server.test import BaseTestCase @@ -18,45 +18,34 @@ class TestApplicationSettingsController(BaseTestCase): """ApplicationSettingsController integration test stubs""" def test_get_application_settings(self): - """Test case for get_application_settings - - - """ - response = self.client.open( - '/apis/v1alpha1/settings', - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + """Test case for get_application_settings""" + response = self.client.open("/apis/v1alpha1/settings", method="GET") + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_modify_application_settings(self): - """Test case for modify_application_settings - - - """ + """Test case for modify_application_settings""" dictionary = Dictionary() response = self.client.open( - '/apis/v1alpha1/settings', - method='PUT', + "/apis/v1alpha1/settings", + method="PUT", data=json.dumps(dictionary), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_application_settings(self): - """Test case for set_application_settings - - - """ + """Test case for set_application_settings""" settings = ApiSettings() response = self.client.open( - '/apis/v1alpha1/settings', - method='POST', + "/apis/v1alpha1/settings", + method="POST", data=json.dumps(settings), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_catalog_service_controller.py b/api/server/swagger_server/test/test_catalog_service_controller.py index cc96a989..21d409ae 100644 --- a/api/server/swagger_server/test/test_catalog_service_controller.py +++ b/api/server/swagger_server/test/test_catalog_service_controller.py @@ -5,12 +5,14 @@ from __future__ import absolute_import -from flask import json -from six import BytesIO - -from swagger_server.models.api_catalog_upload import ApiCatalogUpload # noqa: E501 -from swagger_server.models.api_list_catalog_items_response import ApiListCatalogItemsResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from flask import json # noqa: F401 +from six import BytesIO # noqa: F401 + +from swagger_server.models.api_catalog_upload import ApiCatalogUpload # noqa: F401, E501 +from swagger_server.models.api_list_catalog_items_response import ( # noqa: F401 + ApiListCatalogItemsResponse, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -18,36 +20,31 @@ class TestCatalogServiceController(BaseTestCase): """CatalogServiceController integration test stubs""" def test_list_all_assets(self): - """Test case for list_all_assets - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'sort_by_example'), - ('filter', 'filter_example')] + """Test case for list_all_assets""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "sort_by_example"), + ("filter", "filter_example"), + ] response = self.client.open( - '/apis/v1alpha1/catalog', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/catalog", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_multiple_assets(self): - """Test case for upload_multiple_assets - - - """ + """Test case for upload_multiple_assets""" body = [ApiCatalogUpload()] response = self.client.open( - '/apis/v1alpha1/catalog', - method='POST', + "/apis/v1alpha1/catalog", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_component_service_controller.py b/api/server/swagger_server/test/test_component_service_controller.py index a80935f3..73bd2908 100644 --- a/api/server/swagger_server/test/test_component_service_controller.py +++ b/api/server/swagger_server/test/test_component_service_controller.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import @@ -11,12 +11,18 @@ from six import BytesIO from swagger_server.models.api_component import ApiComponent # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_components_response import ApiListComponentsResponse # noqa: E501 -from swagger_server.models.api_parameter import ApiParameter # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_components_response import ( # noqa: F401 + ApiListComponentsResponse, +) +from swagger_server.models.api_parameter import ApiParameter # noqa: F401, E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -24,167 +30,133 @@ class TestComponentServiceController(BaseTestCase): """ComponentServiceController integration test stubs""" def test_approve_components_for_publishing(self): - """Test case for approve_components_for_publishing - - - """ + """Test case for approve_components_for_publishing""" component_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/components/publish_approved', - method='POST', + "/apis/v1alpha1/components/publish_approved", + method="POST", data=json.dumps(component_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_create_component(self): - """Test case for create_component - - - """ + """Test case for create_component""" body = ApiComponent() response = self.client.open( - '/apis/v1alpha1/components', - method='POST', + "/apis/v1alpha1/components", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_component(self): - """Test case for delete_component - - - """ + """Test case for delete_component""" response = self.client.open( - '/apis/v1alpha1/components/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_download_component_files(self): """Test case for download_component_files Returns the component artifacts compressed into a .tgz (.tar.gz) file. """ - query_string = [('include_generated_code', False)] + query_string = [("include_generated_code", False)] response = self.client.open( - '/apis/v1alpha1/components/{id}/download'.format(id='id_example'), - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components/{id}/download".format(id="id_example"), + method="GET", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_generate_component_code(self): - """Test case for generate_component_code - - - """ + """Test case for generate_component_code""" response = self.client.open( - '/apis/v1alpha1/components/{id}/generate_code'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components/{id}/generate_code".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_component(self): - """Test case for get_component - - - """ + """Test case for get_component""" response = self.client.open( - '/apis/v1alpha1/components/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_component_template(self): - """Test case for get_component_template - - - """ + """Test case for get_component_template""" response = self.client.open( - '/apis/v1alpha1/components/{id}/templates'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components/{id}/templates".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_components(self): - """Test case for list_components - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + """Test case for list_components""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/components', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/components", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_run_component(self): - """Test case for run_component - - - """ + """Test case for run_component""" parameters = [ApiParameter()] - query_string = [('run_name', 'run_name_example')] + query_string = [("run_name", "run_name_example")] response = self.client.open( - '/apis/v1alpha1/components/{id}/run'.format(id='id_example'), - method='POST', + "/apis/v1alpha1/components/{id}/run".format(id="id_example"), + method="POST", data=json.dumps(parameters), - content_type='application/json', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_featured_components(self): - """Test case for set_featured_components - - - """ + """Test case for set_featured_components""" component_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/components/featured', - method='POST', + "/apis/v1alpha1/components/featured", + method="POST", data=json.dumps(component_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_component(self): - """Test case for upload_component - - - """ - query_string = [('name', 'name_example')] - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_component""" + query_string = [("name", "name_example")] + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/components/upload', - method='POST', + "/apis/v1alpha1/components/upload", + method="POST", data=data, - content_type='multipart/form-data', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_component_file(self): - """Test case for upload_component_file - - - """ - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_component_file""" + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/components/{id}/upload'.format(id='id_example'), - method='POST', + "/apis/v1alpha1/components/{id}/upload".format(id="id_example"), + method="POST", data=data, - content_type='multipart/form-data') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_credential_service_controller.py b/api/server/swagger_server/test/test_credential_service_controller.py index 9c910a34..3172fc2d 100644 --- a/api/server/swagger_server/test/test_credential_service_controller.py +++ b/api/server/swagger_server/test/test_credential_service_controller.py @@ -1,16 +1,16 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import -from flask import json -from six import BytesIO +from flask import json # noqa: F401 +from six import BytesIO # noqa: F401 -from swagger_server.models.api_component import ApiComponent # noqa: E501 +from swagger_server.models.api_component import ApiComponent # noqa: F401, E501 from swagger_server.models.api_credential import ApiCredential # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -18,42 +18,32 @@ class TestCredentialServiceController(BaseTestCase): """CredentialServiceController integration test stubs""" def test_create_credentials(self): - """Test case for create_credentials - - - """ + """Test case for create_credentials""" body = ApiCredential() response = self.client.open( - '/apis/v1alpha1/credentials', - method='POST', + "/apis/v1alpha1/credentials", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_credential(self): - """Test case for delete_credential - - - """ + """Test case for delete_credential""" response = self.client.open( - '/apis/v1alpha1/credentials/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/credentials/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_credential(self): - """Test case for get_credential - - - """ + """Test case for get_credential""" response = self.client.open( - '/apis/v1alpha1/credentials/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/credentials/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_dataset_service_controller.py b/api/server/swagger_server/test/test_dataset_service_controller.py index cb8a9780..75432cad 100644 --- a/api/server/swagger_server/test/test_dataset_service_controller.py +++ b/api/server/swagger_server/test/test_dataset_service_controller.py @@ -9,10 +9,16 @@ from six import BytesIO from swagger_server.models.api_dataset import ApiDataset # noqa: E501 -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_datasets_response import ApiListDatasetsResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_datasets_response import ( # noqa: F401 + ApiListDatasetsResponse, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -20,151 +26,120 @@ class TestDatasetServiceController(BaseTestCase): """DatasetServiceController integration test stubs""" def test_approve_datasets_for_publishing(self): - """Test case for approve_datasets_for_publishing - - - """ + """Test case for approve_datasets_for_publishing""" dataset_ids = [list[str]()] response = self.client.open( - '/apis/v1alpha1/datasets/publish_approved', - method='POST', + "/apis/v1alpha1/datasets/publish_approved", + method="POST", data=json.dumps(dataset_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_create_dataset(self): - """Test case for create_dataset - - - """ + """Test case for create_dataset""" body = ApiDataset() response = self.client.open( - '/apis/v1alpha1/datasets', - method='POST', + "/apis/v1alpha1/datasets", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_dataset(self): - """Test case for delete_dataset - - - """ + """Test case for delete_dataset""" response = self.client.open( - '/apis/v1alpha1/datasets/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_download_dataset_files(self): """Test case for download_dataset_files Returns the dataset artifacts compressed into a .tgz (.tar.gz) file. """ - query_string = [('include_generated_code', False)] + query_string = [("include_generated_code", False)] response = self.client.open( - '/apis/v1alpha1/datasets/{id}/download'.format(id='id_example'), - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets/{id}/download".format(id="id_example"), + method="GET", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_generate_dataset_code(self): - """Test case for generate_dataset_code - - - """ + """Test case for generate_dataset_code""" response = self.client.open( - '/apis/v1alpha1/datasets/{id}/generate_code'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets/{id}/generate_code".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_dataset(self): - """Test case for get_dataset - - - """ + """Test case for get_dataset""" response = self.client.open( - '/apis/v1alpha1/datasets/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_dataset_template(self): - """Test case for get_dataset_template - - - """ + """Test case for get_dataset_template""" response = self.client.open( - '/apis/v1alpha1/datasets/{id}/templates'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets/{id}/templates".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_datasets(self): - """Test case for list_datasets - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + """Test case for list_datasets""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/datasets', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/datasets", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_featured_datasets(self): - """Test case for set_featured_datasets - - - """ + """Test case for set_featured_datasets""" dataset_ids = [list[str]()] response = self.client.open( - '/apis/v1alpha1/datasets/featured', - method='POST', + "/apis/v1alpha1/datasets/featured", + method="POST", data=json.dumps(dataset_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_dataset(self): - """Test case for upload_dataset - - - """ - query_string = [('name', 'name_example')] - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_dataset""" + query_string = [("name", "name_example")] + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/datasets/upload', - method='POST', + "/apis/v1alpha1/datasets/upload", + method="POST", data=data, - content_type='multipart/form-data', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_dataset_file(self): - """Test case for upload_dataset_file - - - """ - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_dataset_file""" + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/datasets/{id}/upload'.format(id='id_example'), - method='POST', + "/apis/v1alpha1/datasets/{id}/upload".format(id="id_example"), + method="POST", data=data, - content_type='multipart/form-data') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_health_check_controller.py b/api/server/swagger_server/test/test_health_check_controller.py index b566e7fa..b8fbb487 100644 --- a/api/server/swagger_server/test/test_health_check_controller.py +++ b/api/server/swagger_server/test/test_health_check_controller.py @@ -1,14 +1,14 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import -from flask import json -from six import BytesIO +from flask import json # noqa: F401 +from six import BytesIO # noqa: F401 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -20,16 +20,14 @@ def test_health_check(self): Checks if the server is running """ - query_string = [('check_database', True), - ('check_object_store', True)] + query_string = [("check_database", True), ("check_object_store", True)] response = self.client.open( - '/apis/v1alpha1/health_check', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/health_check", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_inference_service_controller.py b/api/server/swagger_server/test/test_inference_service_controller.py index db3f5194..281e24fa 100644 --- a/api/server/swagger_server/test/test_inference_service_controller.py +++ b/api/server/swagger_server/test/test_inference_service_controller.py @@ -1,14 +1,14 @@ -# Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# Copyright 2021 The MLX Contributors +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import -from flask import json -from six import BytesIO +from flask import json # noqa: F401 +from six import BytesIO # noqa: F401 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -16,33 +16,31 @@ class TestInferenceServiceController(BaseTestCase): """InferenceServiceController integration test stubs""" def test_get_service(self): - """Test case for get_service - - - """ + """Test case for get_service""" response = self.client.open( - '/apis/v1alpha1/inferenceservices/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/inferenceservices/{id}".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_services(self): """Test case for list_services Gets all KFServing services """ - query_string = [('namespace', 'namespace_example'), - ('label', 'label_example'), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + query_string = [ + ("namespace", "namespace_example"), + ("label", "label_example"), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/inferenceservices', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/inferenceservices", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_model_service_controller.py b/api/server/swagger_server/test/test_model_service_controller.py index 056eab49..e393ebad 100644 --- a/api/server/swagger_server/test/test_model_service_controller.py +++ b/api/server/swagger_server/test/test_model_service_controller.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import @@ -10,12 +10,18 @@ from flask import json from six import BytesIO -from swagger_server.models.api_generate_model_code_response import ApiGenerateModelCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_models_response import ApiListModelsResponse # noqa: E501 +from swagger_server.models.api_generate_model_code_response import ( # noqa: F401 + ApiGenerateModelCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_models_response import ( # noqa: F401 + ApiListModelsResponse, +) from swagger_server.models.api_model import ApiModel # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -23,166 +29,133 @@ class TestModelServiceController(BaseTestCase): """ModelServiceController integration test stubs""" def test_approve_models_for_publishing(self): - """Test case for approve_models_for_publishing - - - """ + """Test case for approve_models_for_publishing""" model_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/models/publish_approved', - method='POST', + "/apis/v1alpha1/models/publish_approved", + method="POST", data=json.dumps(model_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_create_model(self): - """Test case for create_model - - - """ + """Test case for create_model""" body = ApiModel() response = self.client.open( - '/apis/v1alpha1/models', - method='POST', + "/apis/v1alpha1/models", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_model(self): - """Test case for delete_model - - - """ + """Test case for delete_model""" response = self.client.open( - '/apis/v1alpha1/models/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_download_model_files(self): """Test case for download_model_files Returns the model artifacts compressed into a .tgz (.tar.gz) file. """ - query_string = [('include_generated_code', False)] + query_string = [("include_generated_code", False)] response = self.client.open( - '/apis/v1alpha1/models/{id}/download'.format(id='id_example'), - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}/download".format(id="id_example"), + method="GET", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_generate_model_code(self): - """Test case for generate_model_code - - - """ + """Test case for generate_model_code""" response = self.client.open( - '/apis/v1alpha1/models/{id}/generate_code'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}/generate_code".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_model(self): - """Test case for get_model - - - """ + """Test case for get_model""" response = self.client.open( - '/apis/v1alpha1/models/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_model_template(self): - """Test case for get_model_template - - - """ + """Test case for get_model_template""" response = self.client.open( - '/apis/v1alpha1/models/{id}/templates'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}/templates".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_models(self): - """Test case for list_models - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + """Test case for list_models""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/models', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_run_model(self): - """Test case for run_model - - - """ - query_string = [('pipeline_stage', 'pipeline_stage_example'), - ('execution_platform', 'execution_platform_example'), - ('run_name', 'run_name_example')] + """Test case for run_model""" + query_string = [ + ("pipeline_stage", "pipeline_stage_example"), + ("execution_platform", "execution_platform_example"), + ("run_name", "run_name_example"), + ] response = self.client.open( - '/apis/v1alpha1/models/{id}/run'.format(id='id_example'), - method='POST', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/models/{id}/run".format(id="id_example"), + method="POST", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_featured_models(self): - """Test case for set_featured_models - - - """ + """Test case for set_featured_models""" model_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/models/featured', - method='POST', + "/apis/v1alpha1/models/featured", + method="POST", data=json.dumps(model_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_model(self): - """Test case for upload_model - - - """ - query_string = [('name', 'name_example')] - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_model""" + query_string = [("name", "name_example")] + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/models/upload', - method='POST', + "/apis/v1alpha1/models/upload", + method="POST", data=data, - content_type='multipart/form-data', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_model_file(self): - """Test case for upload_model_file - - - """ - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_model_file""" + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/models/{id}/upload'.format(id='id_example'), - method='POST', + "/apis/v1alpha1/models/{id}/upload".format(id="id_example"), + method="POST", data=data, - content_type='multipart/form-data') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_notebook_service_controller.py b/api/server/swagger_server/test/test_notebook_service_controller.py index acb7e76b..e4580f27 100644 --- a/api/server/swagger_server/test/test_notebook_service_controller.py +++ b/api/server/swagger_server/test/test_notebook_service_controller.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import @@ -10,12 +10,18 @@ from flask import json from six import BytesIO -from swagger_server.models.api_generate_code_response import ApiGenerateCodeResponse # noqa: E501 -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_notebooks_response import ApiListNotebooksResponse # noqa: E501 +from swagger_server.models.api_generate_code_response import ( # noqa: F401 + ApiGenerateCodeResponse, +) +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_notebooks_response import ( # noqa: F401 + ApiListNotebooksResponse, +) from swagger_server.models.api_notebook import ApiNotebook # noqa: E501 -from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_run_code_response import ApiRunCodeResponse # noqa: F401, E501 +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -23,164 +29,130 @@ class TestNotebookServiceController(BaseTestCase): """NotebookServiceController integration test stubs""" def test_approve_notebooks_for_publishing(self): - """Test case for approve_notebooks_for_publishing - - - """ + """Test case for approve_notebooks_for_publishing""" notebook_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/notebooks/publish_approved', - method='POST', + "/apis/v1alpha1/notebooks/publish_approved", + method="POST", data=json.dumps(notebook_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_create_notebook(self): - """Test case for create_notebook - - - """ + """Test case for create_notebook""" body = ApiNotebook() response = self.client.open( - '/apis/v1alpha1/notebooks', - method='POST', + "/apis/v1alpha1/notebooks", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_notebook(self): - """Test case for delete_notebook - - - """ + """Test case for delete_notebook""" response = self.client.open( - '/apis/v1alpha1/notebooks/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_download_notebook_files(self): """Test case for download_notebook_files Returns the notebook artifacts compressed into a .tgz (.tar.gz) file. """ - query_string = [('include_generated_code', False)] + query_string = [("include_generated_code", False)] response = self.client.open( - '/apis/v1alpha1/notebooks/{id}/download'.format(id='id_example'), - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}/download".format(id="id_example"), + method="GET", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_generate_notebook_code(self): - """Test case for generate_notebook_code - - - """ + """Test case for generate_notebook_code""" response = self.client.open( - '/apis/v1alpha1/notebooks/{id}/generate_code'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}/generate_code".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_notebook(self): - """Test case for get_notebook - - - """ + """Test case for get_notebook""" response = self.client.open( - '/apis/v1alpha1/notebooks/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_notebook_template(self): - """Test case for get_notebook_template - - - """ + """Test case for get_notebook_template""" response = self.client.open( - '/apis/v1alpha1/notebooks/{id}/templates'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}/templates".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_notebooks(self): - """Test case for list_notebooks - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + """Test case for list_notebooks""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/notebooks', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_run_notebook(self): - """Test case for run_notebook - - - """ - query_string = [('run_name', 'run_name_example')] + """Test case for run_notebook""" + query_string = [("run_name", "run_name_example")] response = self.client.open( - '/apis/v1alpha1/notebooks/{id}/run'.format(id='id_example'), - method='POST', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/notebooks/{id}/run".format(id="id_example"), + method="POST", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_featured_notebooks(self): - """Test case for set_featured_notebooks - - - """ + """Test case for set_featured_notebooks""" notebook_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/notebooks/featured', - method='POST', + "/apis/v1alpha1/notebooks/featured", + method="POST", data=json.dumps(notebook_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_notebook(self): - """Test case for upload_notebook - - - """ - query_string = [('name', 'name_example')] - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_notebook""" + query_string = [("name", "name_example")] + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/notebooks/upload', - method='POST', + "/apis/v1alpha1/notebooks/upload", + method="POST", data=data, - content_type='multipart/form-data', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_notebook_file(self): - """Test case for upload_notebook_file - - - """ - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt')) + """Test case for upload_notebook_file""" + data = dict(uploadfile=(BytesIO(b"some file data"), "file.txt")) response = self.client.open( - '/apis/v1alpha1/notebooks/{id}/upload'.format(id='id_example'), - method='POST', + "/apis/v1alpha1/notebooks/{id}/upload".format(id="id_example"), + method="POST", data=data, - content_type='multipart/form-data') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/test/test_pipeline_service_controller.py b/api/server/swagger_server/test/test_pipeline_service_controller.py index 17cf5c58..18f5f7fe 100644 --- a/api/server/swagger_server/test/test_pipeline_service_controller.py +++ b/api/server/swagger_server/test/test_pipeline_service_controller.py @@ -1,6 +1,6 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 # coding: utf-8 from __future__ import absolute_import @@ -10,11 +10,17 @@ from flask import json from six import BytesIO -from swagger_server.models.api_get_template_response import ApiGetTemplateResponse # noqa: E501 -from swagger_server.models.api_list_pipelines_response import ApiListPipelinesResponse # noqa: E501 +from swagger_server.models.api_get_template_response import ( # noqa: F401 + ApiGetTemplateResponse, +) +from swagger_server.models.api_list_pipelines_response import ( # noqa: F401 + ApiListPipelinesResponse, +) from swagger_server.models.api_pipeline import ApiPipeline # noqa: E501 -from swagger_server.models.api_pipeline_extended import ApiPipelineExtended # noqa: E501 -from swagger_server.models.api_status import ApiStatus # noqa: E501 +from swagger_server.models.api_pipeline_extended import ( # noqa: F401 + ApiPipelineExtended, +) +from swagger_server.models.api_status import ApiStatus # noqa: F401, E501 from swagger_server.test import BaseTestCase @@ -22,43 +28,33 @@ class TestPipelineServiceController(BaseTestCase): """PipelineServiceController integration test stubs""" def test_approve_pipelines_for_publishing(self): - """Test case for approve_pipelines_for_publishing - - - """ + """Test case for approve_pipelines_for_publishing""" pipeline_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/pipelines/publish_approved', - method='POST', + "/apis/v1alpha1/pipelines/publish_approved", + method="POST", data=json.dumps(pipeline_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_create_pipeline(self): - """Test case for create_pipeline - - - """ + """Test case for create_pipeline""" body = ApiPipeline() response = self.client.open( - '/apis/v1alpha1/pipelines', - method='POST', + "/apis/v1alpha1/pipelines", + method="POST", data=json.dumps(body), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_delete_pipeline(self): - """Test case for delete_pipeline - - - """ + """Test case for delete_pipeline""" response = self.client.open( - '/apis/v1alpha1/pipelines/{id}'.format(id='id_example'), - method='DELETE') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/pipelines/{id}".format(id="id_example"), method="DELETE" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_download_pipeline_files(self): """Test case for download_pipeline_files @@ -66,82 +62,71 @@ def test_download_pipeline_files(self): Returns the pipeline YAML compressed into a .tgz (.tar.gz) file. """ response = self.client.open( - '/apis/v1alpha1/pipelines/{id}/download'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/pipelines/{id}/download".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_pipeline(self): - """Test case for get_pipeline - - - """ + """Test case for get_pipeline""" response = self.client.open( - '/apis/v1alpha1/pipelines/{id}'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/pipelines/{id}".format(id="id_example"), method="GET" + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_get_template(self): - """Test case for get_template - - - """ + """Test case for get_template""" response = self.client.open( - '/apis/v1alpha1/pipelines/{id}/templates'.format(id='id_example'), - method='GET') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/pipelines/{id}/templates".format(id="id_example"), + method="GET", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_list_pipelines(self): - """Test case for list_pipelines - - - """ - query_string = [('page_token', 'page_token_example'), - ('page_size', 56), - ('sort_by', 'name'), - ('filter', '{"name": "test"}')] + """Test case for list_pipelines""" + query_string = [ + ("page_token", "page_token_example"), + ("page_size", 56), + ("sort_by", "name"), + ("filter", '{"name": "test"}'), + ] response = self.client.open( - '/apis/v1alpha1/pipelines', - method='GET', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + "/apis/v1alpha1/pipelines", method="GET", query_string=query_string + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_set_featured_pipelines(self): - """Test case for set_featured_pipelines - - - """ + """Test case for set_featured_pipelines""" pipeline_ids = [List[str]()] response = self.client.open( - '/apis/v1alpha1/pipelines/featured', - method='POST', + "/apis/v1alpha1/pipelines/featured", + method="POST", data=json.dumps(pipeline_ids), - content_type='application/json') - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="application/json", + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) def test_upload_pipeline(self): - """Test case for upload_pipeline - - - """ - query_string = [('name', 'name_example'), - ('description', 'description_example')] - data = dict(uploadfile=(BytesIO(b'some file data'), 'file.txt'), - annotations='annotations_example') + """Test case for upload_pipeline""" + query_string = [ + ("name", "name_example"), + ("description", "description_example"), + ] + data = dict( + uploadfile=(BytesIO(b"some file data"), "file.txt"), + annotations="annotations_example", + ) response = self.client.open( - '/apis/v1alpha1/pipelines/upload', - method='POST', + "/apis/v1alpha1/pipelines/upload", + method="POST", data=data, - content_type='multipart/form-data', - query_string=query_string) - self.assert200(response, - 'Response body is : ' + response.data.decode('utf-8')) + content_type="multipart/form-data", + query_string=query_string, + ) + self.assert200(response, "Response body is : " + response.data.decode("utf-8")) -if __name__ == '__main__': +if __name__ == "__main__": import unittest + unittest.main() diff --git a/api/server/swagger_server/util.py b/api/server/swagger_server/util.py index 00fca329..0094dabf 100644 --- a/api/server/swagger_server/util.py +++ b/api/server/swagger_server/util.py @@ -1,11 +1,11 @@ # Copyright 2021 The MLX Contributors -# -# SPDX-License-Identifier: Apache-2.0 +# +# SPDX-License-Identifier: Apache-2.0 import datetime import logging -import six +import six # noqa: F401 import typing from flask import request @@ -37,11 +37,11 @@ def _deserialize(data, klass): # AttributeError: module 'typing' has no attribute 'GenericMeta': https://github.com/zalando/connexion/issues/739#issuecomment-437398835 # elif type(klass) == typing.GenericMeta: elif type(klass) == typing._GenericAlias: # Python >= 3.7 - if klass._name == 'List': + if klass._name == "List": return _deserialize_list(data, klass.__args__[0]) - if klass._name == 'Dict': + if klass._name == "Dict": return _deserialize_dict(data, klass.__args__[1]) - elif hasattr(klass, '__origin__') and hasattr(klass, '__extra__'): # Python <= 3.6 + elif hasattr(klass, "__origin__") and hasattr(klass, "__extra__"): # Python <= 3.6 if klass.__extra__ == list: return _deserialize_list(data, klass.__args__[0]) if klass.__extra__ == dict: @@ -86,6 +86,7 @@ def deserialize_date(string): """ try: from dateutil.parser import parse + return parse(string).date() except ImportError: return string @@ -103,6 +104,7 @@ def deserialize_datetime(string): """ try: from dateutil.parser import parse + return parse(string) except ImportError: return string @@ -122,9 +124,11 @@ def deserialize_model(data, klass): return data for attr, attr_type in six.iteritems(instance.swagger_types): - if data is not None \ - and instance.attribute_map[attr] in data \ - and isinstance(data, (list, dict)): + if ( + data is not None + and instance.attribute_map[attr] in data + and isinstance(data, (list, dict)) + ): value = data[instance.attribute_map[attr]] setattr(instance, attr, _deserialize(value, attr_type)) @@ -141,8 +145,7 @@ def _deserialize_list(data, boxed_type): :return: deserialized list. :rtype: list """ - return [_deserialize(sub_data, boxed_type) - for sub_data in data] + return [_deserialize(sub_data, boxed_type) for sub_data in data] def _deserialize_dict(data, boxed_type): @@ -155,16 +158,15 @@ def _deserialize_dict(data, boxed_type): :return: deserialized dict. :rtype: dict """ - return {k: _deserialize(v, boxed_type) - for k, v in six.iteritems(data)} + return {k: _deserialize(v, boxed_type) for k, v in six.iteritems(data)} ####################################################################### # non-generated methods # ####################################################################### -class ApiError(Exception): +class ApiError(Exception): def __init__(self, message, http_status_code=500): self.message = message self.http_status_code = http_status_code @@ -182,9 +184,11 @@ def __repr__(self): def should_cache(controller_name, method_name): - return request.method == "GET" \ - and method_name != "health_check" \ - and "inference_service" not in controller_name + return ( + request.method == "GET" + and method_name != "health_check" + and "inference_service" not in controller_name + ) def invoke_controller_impl(controller_name=None, parameters=None, method_name=None): @@ -231,7 +235,7 @@ def delete_component(id): # replace 'None' values with None, happens when client sets a parameter to None (a JSON serialization quirk) for k, v in parameters.items(): - if type(v) == str and v == 'None': + if type(v) == str and v == "None": parameters[k] = None # remove parameters with None values, otherwise the default values of method signature will not take effect @@ -239,13 +243,13 @@ def delete_component(id): if v is None: del parameters[k] - module_name_parts = controller_name.split('.') + module_name_parts = controller_name.split(".") - if module_name_parts[1] == 'controllers': - module_name_parts[1] = 'controllers_impl' - module_name_parts[2] = module_name_parts[2] + '_impl' + if module_name_parts[1] == "controllers": + module_name_parts[1] = "controllers_impl" + module_name_parts[2] = module_name_parts[2] + "_impl" - module_name = '.'.join(module_name_parts) + module_name = ".".join(module_name_parts) try: controller_impl_module = importlib.import_module(module_name) @@ -257,7 +261,10 @@ def delete_component(id): except AttributeError: traceback.print_exc() - return f"The method '{method_name}' does not exist in module '{module_name}'", 501 + return ( + f"The method '{method_name}' does not exist in module '{module_name}'", + 501, + ) if impl_func: try: @@ -275,7 +282,12 @@ def delete_component(id): log_msg = get_request_log_msg() logging.getLogger("GETcache").info(f"{log_msg} added to cache") - if request.method in ("DELETE", "POST", "PATCH", "PUT") and not method_name.startswith("run_"): + if request.method in ( + "DELETE", + "POST", + "PATCH", + "PUT", + ) and not method_name.startswith("run_"): # any modifying method clears all cached entries, to avoid loopholes like delete '*', # upload has no 'id', catalog modifies other asset types (represented by controller class), ... response_cache.clear() @@ -298,4 +310,4 @@ def delete_component(id): return f"{e.__class__.__name__}: {str(e)}", 500 else: - return f'Method not found: {module_name}.{method_name}()', 501 + return f"Method not found: {module_name}.{method_name}()", 501 diff --git a/bootstrapper/start.py b/bootstrapper/start.py index 919b14ef..8da27ead 100644 --- a/bootstrapper/start.py +++ b/bootstrapper/start.py @@ -9,11 +9,14 @@ import shutil -internal_github_raw_url = os.getenv("internal_github_raw_url", "https://raw.githubusercontent.com/machine-learning-exchange/mlx/main/") +internal_github_raw_url = os.getenv( + "internal_github_raw_url", + "https://raw.githubusercontent.com/machine-learning-exchange/mlx/main/", +) api_url = os.getenv("mlx_api", "mlx-api") token = os.getenv("enterprise_github_token", "") -repo_name = 'mlx' -asset_categories = ['pipelines', 'components', 'models', 'notebooks', 'datasets'] +repo_name = "mlx" +asset_categories = ["pipelines", "components", "models", "notebooks", "datasets"] def get_github_files(asset_name, asset_list): @@ -21,57 +24,58 @@ def get_github_files(asset_name, asset_list): for asset in asset_list: if token: headers = { - 'Accept': 'application/vnd.github.v3.raw', - 'Authorization': 'token ' + token + "Accept": "application/vnd.github.v3.raw", + "Authorization": "token " + token, } else: headers = {} - if '://' not in asset['source'] and token: - r = requests.get(internal_github_raw_url + asset['source'], headers=headers) - elif 'raw.github.ibm.com' in asset['source'] and token: - r = requests.get(asset['source'], headers=headers) - elif '://' in asset['source']: - r = requests.get(asset['source']) + if "://" not in asset["source"] and token: + r = requests.get(internal_github_raw_url + asset["source"], headers=headers) + elif "raw.github.ibm.com" in asset["source"] and token: + r = requests.get(asset["source"], headers=headers) + elif "://" in asset["source"]: + r = requests.get(asset["source"]) else: continue - if asset_name != 'components': - filename = os.path.basename(asset['source']) + if asset_name != "components": + filename = os.path.basename(asset["source"]) else: - filename = asset['name'].replace(" ", "-") + '.yaml' + filename = asset["name"].replace(" ", "-") + ".yaml" with open(os.path.join(asset_name, filename), "w") as file: file.write(r.text) - asset['download'] = 'true' + asset["download"] = "true" def upload_asset(asset_name, asset_list): for asset in asset_list: - if asset.get('download', '') == 'true': - if asset_name != 'components': - filename = os.path.basename(asset['source']) + if asset.get("download", "") == "true": + if asset_name != "components": + filename = os.path.basename(asset["source"]) else: - filename = asset['name'].replace(" ", "-") + '.yaml' + filename = asset["name"].replace(" ", "-") + ".yaml" tarname = filename.replace(".yaml", ".tgz") tarfile_path = os.path.join(asset_name, tarname) with tarfile.open(tarfile_path, "w:gz") as tar: tar.add(os.path.join(asset_name, filename), arcname=filename) tar.close() - params = { - 'name': asset.get('name', '') - } - if asset_name == 'notebooks' and '://' not in asset['source'] and token: - data = { - 'enterprise_github_token': token - } + params = {"name": asset.get("name", "")} + if asset_name == "notebooks" and "://" not in asset["source"] and token: + data = {"enterprise_github_token": token} else: data = {} - with open(os.path.join(asset_name, tarname), 'rb') as f: - r = requests.post("http://" + api_url + '/apis/v1alpha1/' + asset_name + '/upload', params=params, files={'uploadfile': f}, data=data) + with open(os.path.join(asset_name, tarname), "rb") as f: + r = requests.post( + "http://" + api_url + "/apis/v1alpha1/" + asset_name + "/upload", + params=params, + files={"uploadfile": f}, + data=data, + ) print(r.text) def cleanup_assets(asset_name): - r = requests.delete("http://" + api_url + '/apis/v1alpha1/' + asset_name + '/*') + r = requests.delete("http://" + api_url + "/apis/v1alpha1/" + asset_name + "/*") print(r.text) @@ -79,93 +83,119 @@ def get_github_dir_files(asset_name, asset_list): os.makedirs(asset_name, exist_ok=True) if token: headers = { - 'Accept': 'application/vnd.github.v3.raw', - 'Authorization': 'token ' + token + "Accept": "application/vnd.github.v3.raw", + "Authorization": "token " + token, } - internal_github_url = internal_github_raw_url.replace('raw.', token + '@').replace('/master/', '') - command = ['git', 'clone', internal_github_url, repo_name] + internal_github_url = internal_github_raw_url.replace( + "raw.", token + "@" + ).replace("/master/", "") + command = ["git", "clone", internal_github_url, repo_name] subprocess.run(command, check=True) for asset in asset_list: - if '://' not in asset['source'] and token: - shutil.copytree(repo_name + '/' + asset['source'], asset_name + '/' + asset['name'].replace(" ", "-")) - asset['url'] = internal_github_url + '/' + asset['source'] - asset['download'] = 'true' - elif '://' in asset['source']: - source_pieces = asset['source'].split('/') - github_url = '/'.join(source_pieces[0:5]) + if "://" not in asset["source"] and token: + shutil.copytree( + repo_name + "/" + asset["source"], + asset_name + "/" + asset["name"].replace(" ", "-"), + ) + asset["url"] = internal_github_url + "/" + asset["source"] + asset["download"] = "true" + elif "://" in asset["source"]: + source_pieces = asset["source"].split("/") + github_url = "/".join(source_pieces[0:5]) github_repo = source_pieces[4] - source_dir = '/'.join(source_pieces[7:]) - command = ['git', 'clone', github_url, github_repo] - if github_repo not in os.listdir('.'): + source_dir = "/".join(source_pieces[7:]) + command = ["git", "clone", github_url, github_repo] + if github_repo not in os.listdir("."): subprocess.run(command, check=True) - shutil.copytree(github_repo + '/' + source_dir, asset_name + '/' + asset['name'].replace(" ", "-")) - asset['url'] = asset['source'] - asset['download'] = 'true' + shutil.copytree( + github_repo + "/" + source_dir, + asset_name + "/" + asset["name"].replace(" ", "-"), + ) + asset["url"] = asset["source"] + asset["download"] = "true" def upload_dir_asset(asset_name, asset_list): for asset in asset_list: - if asset.get('download', '') == 'true': - dirname = asset['name'].replace(" ", "-") - tarname = dirname + '.tgz' + if asset.get("download", "") == "true": + dirname = asset["name"].replace(" ", "-") + tarname = dirname + ".tgz" tarfile_path = os.path.join(asset_name, tarname) with tarfile.open(tarfile_path, "w:gz") as tar: for filename in os.listdir(os.path.join(asset_name, dirname)): - if filename.endswith('.yaml') or filename.endswith('.yml'): - tar.add(os.path.join(asset_name, dirname, filename), arcname=filename) + if filename.endswith(".yaml") or filename.endswith(".yml"): + tar.add( + os.path.join(asset_name, dirname, filename), + arcname=filename, + ) tar.close() - with open(os.path.join(asset_name, tarname), 'rb') as f: - params = { - 'name': asset.get('name', ''), - 'url': asset.get('url', '') - } - r = requests.post("http://" + api_url + '/apis/v1alpha1/' + asset_name + '/upload', files={'uploadfile': f}, params=params) + with open(os.path.join(asset_name, tarname), "rb") as f: + params = {"name": asset.get("name", ""), "url": asset.get("url", "")} + r = requests.post( + "http://" + api_url + "/apis/v1alpha1/" + asset_name + "/upload", + files={"uploadfile": f}, + params=params, + ) print(r.text) def feature_default_assets(): for category in asset_categories: - data = ['*'] - r = requests.post("http://" + api_url + '/apis/v1alpha1/' + category + '/publish_approved', json=data) + data = ["*"] + r = requests.post( + "http://" + api_url + "/apis/v1alpha1/" + category + "/publish_approved", + json=data, + ) print(r.text) - r = requests.post("http://" + api_url + '/apis/v1alpha1/' + category + '/featured', json=data) + r = requests.post( + "http://" + api_url + "/apis/v1alpha1/" + category + "/featured", json=data + ) print(r.text) -if __name__ == '__main__': +if __name__ == "__main__": with open("/etc/config.json", "r") as f: samples = json.load(f) f.close() - if os.getenv('cleanup', '') == 'true': + if os.getenv("cleanup", "") == "true": for category in asset_categories: cleanup_assets(category) - get_github_files('pipelines', samples['pipelines']) - get_github_files('components', samples['components']) - get_github_files('models', samples['models']) - get_github_files('notebooks', samples['notebooks']) - get_github_files('datasets', samples['datasets']) + get_github_files("pipelines", samples["pipelines"]) + get_github_files("components", samples["components"]) + get_github_files("models", samples["models"]) + get_github_files("notebooks", samples["notebooks"]) + get_github_files("datasets", samples["datasets"]) if api_url: - for asset in samples['pipelines']: - if asset.get('download', '') == 'true': - filename = os.path.basename(asset['source']) - tarname = filename + '.tar.gz' - command = ['dsl-compile', '--py', os.path.join('pipelines', filename), '--output', os.path.join('pipelines', tarname)] + for asset in samples["pipelines"]: + if asset.get("download", "") == "true": + filename = os.path.basename(asset["source"]) + tarname = filename + ".tar.gz" + command = [ + "dsl-compile", + "--py", + os.path.join("pipelines", filename), + "--output", + os.path.join("pipelines", tarname), + ] subprocess.run(command, check=True) - with open(os.path.join('pipelines', tarname), 'rb') as f: + with open(os.path.join("pipelines", tarname), "rb") as f: params = { - 'name': asset.get('name', ''), - 'description': asset.get('description', '') - } - data = { - 'annotations': json.dumps(asset.get('annotations', {})) + "name": asset.get("name", ""), + "description": asset.get("description", ""), } - r = requests.post("http://" + api_url + '/apis/v1alpha1/pipelines/upload', files={'uploadfile': f}, params=params, data=data) + data = {"annotations": json.dumps(asset.get("annotations", {}))} + r = requests.post( + "http://" + api_url + "/apis/v1alpha1/pipelines/upload", + files={"uploadfile": f}, + params=params, + data=data, + ) print(r.text) - upload_asset('components', samples['components']) - upload_asset('models', samples['models']) - upload_asset('notebooks', samples['notebooks']) - upload_asset('datasets', samples['datasets']) + upload_asset("components", samples["components"]) + upload_asset("models", samples["models"]) + upload_asset("notebooks", samples["notebooks"]) + upload_asset("datasets", samples["datasets"]) feature_default_assets() diff --git a/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx b/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx index 4c8553a3..19cdc1e1 100644 --- a/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx @@ -9,7 +9,7 @@ import StoreContext from '../../lib/stores/context' import SourceCodeDisplay from '../SourceCodeDisplay'; import Tabs from '@material-ui/core/Tabs'; import Tab from '@material-ui/core/Tab'; -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; import { getUserInfo, hasRole } from '../../lib/util'; import RunView from '../RunView' import RelatedAssetView from '../RelatedAssetView'; diff --git a/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx b/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx index 0a82fcd4..c6041b1c 100644 --- a/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx @@ -11,7 +11,7 @@ import Grid from '@material-ui/core/Grid'; import SourceCodeDisplay from '../SourceCodeDisplay'; import Tabs from '@material-ui/core/Tabs'; import Tab from '@material-ui/core/Tab'; -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; import CheckCircleIcon from '@material-ui/icons/CheckCircle'; import ErrorIcon from '@material-ui/icons/Error'; //import errorIcon from '../../images/error.png' diff --git a/dashboard/origin-mlx/src/components/Detail/PipelineDetail.tsx b/dashboard/origin-mlx/src/components/Detail/PipelineDetail.tsx index 28d43620..f032f39f 100644 --- a/dashboard/origin-mlx/src/components/Detail/PipelineDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/PipelineDetail.tsx @@ -13,7 +13,7 @@ import RunView from '../RunView' import SourceCodeDisplay from '../SourceCodeDisplay'; import Tabs from '@material-ui/core/Tabs'; import Tab from '@material-ui/core/Tab'; -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; import MetadataView from '../MetadataView'; import Graph from '../Graph'; diff --git a/dashboard/origin-mlx/src/components/UploadButton.tsx b/dashboard/origin-mlx/src/components/UploadButton.tsx index 4b55de47..a73476f8 100644 --- a/dashboard/origin-mlx/src/components/UploadButton.tsx +++ b/dashboard/origin-mlx/src/components/UploadButton.tsx @@ -19,7 +19,7 @@ import indicatorGif from '../images/indicator-gif.gif' import { UPDATE_ARTIFACT_ASSET } from '../lib/stores/artifacts' import { Artifact } from '../lib/stores/artifacts' import { setFeaturedArtifacts, setPublishApprovedArtifacts } from '../lib/api/artifacts'; -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; import fs from 'fs'; interface MatchProps { type: string diff --git a/dashboard/origin-mlx/src/lib/api/artifacts.ts b/dashboard/origin-mlx/src/lib/api/artifacts.ts index 4d5695b6..5d691caa 100644 --- a/dashboard/origin-mlx/src/lib/api/artifacts.ts +++ b/dashboard/origin-mlx/src/lib/api/artifacts.ts @@ -2,7 +2,7 @@ // // SPDX-License-Identifier: Apache-2.0 import { Artifact } from '../stores/artifacts' -import yaml from 'js-yaml' +import yaml # noqa: F401 from 'js-yaml' const supported = [ 'pipelines', diff --git a/dashboard/origin-mlx/src/pages/KFServingDetailPage.tsx b/dashboard/origin-mlx/src/pages/KFServingDetailPage.tsx index 1bf9deaf..c3a95f00 100644 --- a/dashboard/origin-mlx/src/pages/KFServingDetailPage.tsx +++ b/dashboard/origin-mlx/src/pages/KFServingDetailPage.tsx @@ -6,7 +6,7 @@ import React, { useContext, useEffect, useState, Children, ReactNode, ReactElement } from 'react' import { capitalize, formatTitle } from '../lib/util'; import StoreContext from '../lib/stores/context' -import yaml from 'js-yaml' +import yaml # noqa: F401 from 'js-yaml' import Button from '../components/Button/Button'; import Icon from '@material-ui/core/Icon' diff --git a/dashboard/origin-mlx/src/pages/KFServingUploadPage.tsx b/dashboard/origin-mlx/src/pages/KFServingUploadPage.tsx index 985e48ec..61ba7f77 100644 --- a/dashboard/origin-mlx/src/pages/KFServingUploadPage.tsx +++ b/dashboard/origin-mlx/src/pages/KFServingUploadPage.tsx @@ -13,7 +13,7 @@ import Hero from '../components/Hero' import Paper from '@material-ui/core/Paper' import Typography from '@material-ui/core/Typography' import indicatorGif from '../images/indicator-gif.gif' -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; function UploadPage() { diff --git a/dashboard/origin-mlx/src/pages/MetaDeletePage.tsx b/dashboard/origin-mlx/src/pages/MetaDeletePage.tsx index 8cd0c4bc..e9911bd6 100644 --- a/dashboard/origin-mlx/src/pages/MetaDeletePage.tsx +++ b/dashboard/origin-mlx/src/pages/MetaDeletePage.tsx @@ -6,7 +6,7 @@ import React from 'react' import { Link } from 'react-router-dom' import Hero from '../components/Hero'; -import yaml from 'js-yaml'; +import yaml # noqa: F401 from 'js-yaml'; import Table from '@material-ui/core/Table'; import TableBody from '@material-ui/core/TableBody'; import TableCell from '@material-ui/core/TableCell'; diff --git a/temp_run.sh b/temp_run.sh new file mode 100644 index 00000000..84da7528 --- /dev/null +++ b/temp_run.sh @@ -0,0 +1,13 @@ + + +SEARCHSTRING="from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: F401, E501" +NOQA=" # noqa: F401" + +# REPLACESTRING="$SEARCHSTRING$NOQA" +REPLACESTRING="from swagger_server.models.api_inferenceservice import ApiInferenceservice # noqa: F401, E501" + +ECHO "${REPLACESTRING}" +grep -rl "${SEARCHSTRING}" ./ | LC_ALL=C xargs sed -i "" "s/${SEARCHSTRING}/${REPLACESTRING}/g" + + +make lint_python \ No newline at end of file diff --git a/tools/python/regenerate_catalog_upload_json.py b/tools/python/regenerate_catalog_upload_json.py index 66e60b2f..6b988126 100755 --- a/tools/python/regenerate_catalog_upload_json.py +++ b/tools/python/regenerate_catalog_upload_json.py @@ -7,7 +7,7 @@ from __future__ import print_function import json -import yaml +import yaml # noqa: F401 from glob import glob from os.path import abspath, dirname, relpath @@ -25,7 +25,9 @@ project_dir = dirname(script_path) katalog_dir = f"{project_dir}/../katalog" # TODO: don't assume user cloned katalog and mlx repos into same parent folder -katalog_url = "https://raw.githubusercontent.com/machine-learning-exchange/katalog/main/" +katalog_url = ( + "https://raw.githubusercontent.com/machine-learning-exchange/katalog/main/" +) catalog_upload_json_files = [ f"{project_dir}/bootstrapper/catalog_upload.json", @@ -37,8 +39,11 @@ def get_list_of_yaml_files_in_katalog(asset_type: str): yaml_files = glob(f"{katalog_dir}/{asset_type}-samples/**/*.yaml", recursive=True) - yaml_files = [filepath for filepath in yaml_files - if not any(word in filepath for word in ["template", "test", "src"])] + yaml_files = [ + filepath + for filepath in yaml_files + if not any(word in filepath for word in ["template", "test", "src"]) + ] return sorted(yaml_files) @@ -56,15 +61,17 @@ def generate_katalog_dict() -> dict: with open(yaml_file) as f: yaml_dict = yaml.load(f, Loader=yaml.FullLoader) - asset_name = yaml_dict.get("name") or \ - yaml_dict.get("metadata", {}).get("name", "").replace("-", " ").title() \ - or "" + asset_name = ( + yaml_dict.get("name") + or yaml_dict.get("metadata", {}) + .get("name", "") + .replace("-", " ") + .title() + or "" + ) asset_url = katalog_url + relpath(yaml_file, katalog_dir) - katalog_asset_item = { - "name": asset_name, - "url": asset_url - } + katalog_asset_item = {"name": asset_name, "url": asset_url} katalog_asset_list.append(katalog_asset_item) @@ -98,6 +105,6 @@ def main(): print("Done. Use git diff to evaluate if and which changes are desired!") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tools/python/update_doc_table.py b/tools/python/update_doc_table.py index 710cb2f6..ebc8a60b 100755 --- a/tools/python/update_doc_table.py +++ b/tools/python/update_doc_table.py @@ -15,9 +15,7 @@ from glob import glob from os.path import abspath, dirname, split -md_file_path_expressions = [ - "/docs/*.md" -] +md_file_path_expressions = ["/docs/*.md"] script_folder = abspath(dirname(__file__)) project_root_dir = abspath(dirname(dirname(script_folder))) @@ -30,8 +28,10 @@ def find_md_files() -> [str]: print(" " + path_expr.lstrip("/")) print("") - md_files_list_of_lists = [glob(project_root_dir + path_expr, recursive=True) - for path_expr in md_file_path_expressions] + md_files_list_of_lists = [ + glob(project_root_dir + path_expr, recursive=True) + for path_expr in md_file_path_expressions + ] return sorted(list(itertools.chain(*md_files_list_of_lists))) @@ -54,14 +54,12 @@ def update_doc_table() -> [str]: md_file_paths = find_md_files() # 2. extract all descriptions using headers (first line) from files - descriptions = [ - get_header_from_md_file(file) - for file in md_file_paths - ] + descriptions = [get_header_from_md_file(file) for file in md_file_paths] # 3. format filenames as Markdown hyperlinks: [name](url) - md_filenames = ["[" + split(file)[1] + "](./" + split(file)[1] + ")" - for file in md_file_paths] + md_filenames = [ + "[" + split(file)[1] + "](./" + split(file)[1] + ")" for file in md_file_paths + ] table = [] table.append( @@ -72,16 +70,14 @@ def update_doc_table() -> [str]: ) for i in range(len(md_filenames)): - if("README.md" in md_filenames[i]): + if "README.md" in md_filenames[i]: continue - table.append( - f"| {md_filenames[i]} | {descriptions[i]} |" - ) + table.append(f"| {md_filenames[i]} | {descriptions[i]} |") f = open("./docs/README.md", "w") f.write("\n".join(table)) f.close() -if __name__ == '__main__': +if __name__ == "__main__": update_doc_table() diff --git a/tools/python/verify_doc_links.py b/tools/python/verify_doc_links.py index fb66650b..9c168dac 100755 --- a/tools/python/verify_doc_links.py +++ b/tools/python/verify_doc_links.py @@ -10,14 +10,16 @@ import requests from glob import glob -from os import environ as env +from os import environ as env # noqa: F401 from os.path import abspath, dirname, exists, relpath from random import randint from time import sleep from urllib3.util.url import parse_url from urllib3.exceptions import LocationParseError -GITHUB_REPO = env.get("GITHUB_REPO", "https://github.com/machine-learning-exchange/mlx/") +GITHUB_REPO = env.get( + "GITHUB_REPO", "https://github.com/machine-learning-exchange/mlx/" +) md_file_path_expressions = [ "/**/*.md", @@ -33,7 +35,9 @@ project_root_dir = abspath(dirname(dirname(script_folder))) github_repo_master_path = "{}/blob/master".format(GITHUB_REPO.rstrip("/")) -parallel_requests = 60 # GitHub rate limiting is 60 requests per minute, then we sleep a bit +parallel_requests = ( + 60 # GitHub rate limiting is 60 requests per minute, then we sleep a bit +) url_status_cache = dict() @@ -45,18 +49,23 @@ def find_md_files() -> [str]: print(" " + path_expr.lstrip("/")) print("") - list_of_lists = [glob(project_root_dir + path_expr, recursive=True) - for path_expr in md_file_path_expressions] + list_of_lists = [ + glob(project_root_dir + path_expr, recursive=True) + for path_expr in md_file_path_expressions + ] flattened_list = list(itertools.chain(*list_of_lists)) - filtered_list = [path for path in flattened_list - if not any(s in path for s in excluded_paths)] + filtered_list = [ + path for path in flattened_list if not any(s in path for s in excluded_paths) + ] return sorted(filtered_list) -def get_links_from_md_file(md_file_path: str) -> [(int, str, str)]: # -> [(line, link_text, URL)] +def get_links_from_md_file( + md_file_path: str, +) -> [(int, str, str)]: # -> [(line, link_text, URL)] with open(md_file_path, "r") as f: try: @@ -71,26 +80,41 @@ def get_links_from_md_file(md_file_path: str) -> [(int, str, str)]: # -> [(line md_file_content = re.sub( r"\[([^]]+)\]\((?!http|#|/)([^)]+)\)", r"[\1]({}/{}/\2)".format(github_repo_master_path, folder).replace("/./", "/"), - md_file_content) + md_file_content, + ) # replace links that are relative to the project root, i.e. [link text](/sdk/FEATURES.md) md_file_content = re.sub( r"\[([^]]+)\]\(/([^)]+)\)", r"[\1]({}/\2)".format(github_repo_master_path), - md_file_content) + md_file_content, + ) # find all the links line_text_url = [] for line_number, line_text in enumerate(md_file_content.splitlines()): # find markdown-styled links [text](url) - for (link_text, url) in re.findall(r"\[([^]]+)\]\((%s[^)]+)\)" % "http", line_text): + for (link_text, url) in re.findall( + r"\[([^]]+)\]\((%s[^)]+)\)" % "http", line_text + ): line_text_url.append((line_number + 1, link_text, url)) # find plain http(s)-style links for url in re.findall(r"[\n\r\s\"'](https?://[^\s]+)[\n\r\s\"']", line_text): - if not any(s in url for s in - ["localhost", "...", "lorem", "ipsum", "/path/to/", "address", "port", "${OS}"]): + if not any( + s in url + for s in [ + "localhost", + "...", + "lorem", + "ipsum", + "/path/to/", + "address", + "port", + "${OS}", + ] + ): try: parse_url(url) line_text_url.append((line_number + 1, "", url)) @@ -101,7 +125,9 @@ def get_links_from_md_file(md_file_path: str) -> [(int, str, str)]: # -> [(line return line_text_url -def test_url(file: str, line: int, text: str, url: str) -> (str, int, str, str, int): # (file, line, text, url, status) +def test_url( + file: str, line: int, text: str, url: str +) -> (str, int, str, str, int): # (file, line, text, url, status) short_url = url.split("#", maxsplit=1)[0] @@ -116,12 +142,18 @@ def test_url(file: str, line: int, text: str, url: str) -> (str, int, str, str, status = 404 else: try: - status = requests.head(short_url, allow_redirects=True, timeout=5).status_code + status = requests.head( + short_url, allow_redirects=True, timeout=5 + ).status_code if status == 405: # method not allowed, use GET instead of HEAD - status = requests.get(short_url, allow_redirects=True, timeout=5).status_code + status = requests.get( + short_url, allow_redirects=True, timeout=5 + ).status_code if status == 429: # GitHub rate limiting, try again after 1 minute sleep(randint(60, 90)) - status = requests.head(short_url, allow_redirects=True, timeout=5).status_code + status = requests.head( + short_url, allow_redirects=True, timeout=5 + ).status_code except requests.exceptions.Timeout as e: status = 408 except requests.exceptions.RequestException as e: @@ -134,10 +166,14 @@ def test_url(file: str, line: int, text: str, url: str) -> (str, int, str, str, return file, line, text, url, status -def verify_urls_concurrently(file_line_text_url: [(str, int, str, str)]) -> [(str, int, str, str)]: +def verify_urls_concurrently( + file_line_text_url: [(str, int, str, str)] +) -> [(str, int, str, str)]: file_line_text_url_status = [] - with concurrent.futures.ThreadPoolExecutor(max_workers=parallel_requests) as executor: + with concurrent.futures.ThreadPoolExecutor( + max_workers=parallel_requests + ) as executor: check_urls = ( executor.submit(test_url, file, line, text, url) for (file, line, text, url) in file_line_text_url @@ -150,8 +186,12 @@ def verify_urls_concurrently(file_line_text_url: [(str, int, str, str)]) -> [(st print(str(type(e))) file_line_text_url_status.append((file, line, text, url, 500)) finally: - print("{}/{}".format(len(file_line_text_url_status), - len(file_line_text_url)), end="\r") + print( + "{}/{}".format( + len(file_line_text_url_status), len(file_line_text_url) + ), + end="\r", + ) return file_line_text_url_status @@ -172,31 +212,42 @@ def verify_doc_links() -> [(str, int, str, str)]: file_line_text_url_status = verify_urls_concurrently(file_line_text_url) # 4. filter for the invalid URLs (status 404: "Not Found") to be reported - file_line_text_url_404 = [(f, l, t, u, s) - for (f, l, t, u, s) in file_line_text_url_status - if s == 404] + file_line_text_url_404 = [ + (f, l, t, u, s) for (f, l, t, u, s) in file_line_text_url_status if s == 404 + ] # 5. print some stats for confidence - print("{} {} links ({} unique URLs) in {} Markdown files.\n".format( - "Checked" if file_line_text_url_404 else "Verified", - len(file_line_text_url_status), - len(url_status_cache), - len(md_file_paths))) + print( + "{} {} links ({} unique URLs) in {} Markdown files.\n".format( + "Checked" if file_line_text_url_404 else "Verified", + len(file_line_text_url_status), + len(url_status_cache), + len(md_file_paths), + ) + ) # 6. report invalid links, exit with error for CI/CD if file_line_text_url_404: for (file, line, text, url, status) in file_line_text_url_404: - print("{}:{}: {} -> {}".format( - relpath(file, project_root_dir), line, - url.replace(github_repo_master_path, ""), status)) + print( + "{}:{}: {} -> {}".format( + relpath(file, project_root_dir), + line, + url.replace(github_repo_master_path, ""), + status, + ) + ) # print a summary line for clear error discovery at the bottom of Travis job log - print("\nERROR: Found {} invalid Markdown links".format( - len(file_line_text_url_404))) + print( + "\nERROR: Found {} invalid Markdown links".format( + len(file_line_text_url_404) + ) + ) exit(1) -if __name__ == '__main__': +if __name__ == "__main__": verify_doc_links() diff --git a/tools/python/verify_npm_packages.py b/tools/python/verify_npm_packages.py index d67d5686..85cb317b 100755 --- a/tools/python/verify_npm_packages.py +++ b/tools/python/verify_npm_packages.py @@ -82,15 +82,22 @@ def verify_npm_packages(): packages_outdated = f"\n\nFound outdated npm packages\n" packages_up_to_date = "All packages up to date" - check_vulnerabilities = run("npm audit", cwd="./dashboard/origin-mlx/", stdout=PIPE, shell=True - ).stdout.decode("utf-8").split('\n') - vulnerabilities = [word for word in check_vulnerabilities if 'vulnerabilities' in word] - packages_vulnerable = f'''\nFound vulnerable packages\n\n{colorText.RED}{vulnerabilities[0]}{colorText.END}\n - \rRun {colorText.BLUE}make update_npm_packages{colorText.END} to secure/update\n''' + check_vulnerabilities = ( + run("npm audit", cwd="./dashboard/origin-mlx/", stdout=PIPE, shell=True) + .stdout.decode("utf-8") + .split("\n") + ) + vulnerabilities = [ + word for word in check_vulnerabilities if "vulnerabilities" in word + ] + packages_vulnerable = f"""\nFound vulnerable packages\n\n{colorText.RED}{vulnerabilities[0]}{colorText.END}\n + \rRun {colorText.BLUE}make update_npm_packages{colorText.END} to secure/update\n""" packages_safe = "\nNo vulnerabilities found" - print(packages_up_to_date) if check_outdated.returncode == 0 else print(packages_outdated) - print('-' * 40) + print(packages_up_to_date) if check_outdated.returncode == 0 else print( + packages_outdated + ) + print("-" * 40) if "0 vulnerabilities" not in vulnerabilities[0]: print(packages_vulnerable) From 9a446b7523c6e406591efaee451c1d9d9956415b Mon Sep 17 00:00:00 2001 From: BluThaitanium Date: Wed, 6 Jul 2022 00:13:35 -0400 Subject: [PATCH 04/11] override additional eslint rules Signed-off-by: BluThaitanium --- dashboard/origin-mlx/.eslintrc.yml | 58 ++++++++++++++++++++++++++++-- 1 file changed, 55 insertions(+), 3 deletions(-) diff --git a/dashboard/origin-mlx/.eslintrc.yml b/dashboard/origin-mlx/.eslintrc.yml index 2b43ec4f..34b44a3d 100644 --- a/dashboard/origin-mlx/.eslintrc.yml +++ b/dashboard/origin-mlx/.eslintrc.yml @@ -16,13 +16,13 @@ parserOptions: plugins: - react - '@typescript-eslint' + rules: - #=== # Disabled, based on docs # https://reactjs.org/blog/2020/09/22/introducing-the-new-jsx-transform.html#eslint react/jsx-uses-react: 'off' react/react-in-jsx-scope: 'off' - #=== + # Allow only char '_' for unused-variable convention no-unused-vars: 'off' '@typescript-eslint/no-unused-vars': @@ -30,4 +30,56 @@ rules: - argsIgnorePattern: '^_' varsIgnorePattern: '^_' caughtErrorsIgnorePattern: '^_' - #=== + +# ignore errors that require extreme changes to the code +overrides: + - files: + - '**/*.tsx' + rules: + react/button-has-type: 'off' + react/destructuring-assignment: 'off' + react/function-component-definition: 'off' + react/jsx-no-bind: 'off' + react/jsx-no-constructed-context-values: 'off' + react/jsx-no-useless-fragment: 'off' + react/jsx-props-no-spreading: 'off' + react/no-access-state-in-setstate: 'off' + react/no-array-index-key: 'off' + react/no-children-prop: 'off' + react/no-deprecated: 'off' + react/no-unstable-nested-components: 'off' + react/no-unused-class-component-methods: 'off' + react/no-unused-prop-types: 'off' + react/no-unused-state: 'off' + react/prefer-stateless-function: 'off' + react/prop-types: 'off' + react/require-default-props: 'off' + react/sort-comp: 'off' + react/state-in-constructor: 'off' + react/static-property-placement: 'off' + '@typescript-eslint/brace-style': 'off' + '@typescript-eslint/naming-convention': 'off' + '@typescript-eslint/no-shadow': 'off' + '@typescript-eslint/no-unused-expressions': 'off' + '@typescript-eslint/no-unused-vars': 'off' + '@typescript-eslint/no-use-before-define': 'off' + jsx-a11y/click-events-have-key-events: 'off' + jsx-a11y/no-static-element-interactions: 'off' + class-methods-use-this: 'off' + consistent-return: 'off' + default-case: 'off' + func-names: 'off' + import/no-named-as-default: 'off' + max-classes-per-file: 'off' + max-len: 'off' + no-console: 'off' + no-constant-condition: 'off' + no-empty: 'off' + no-mixed-operators: 'off' + no-nested-ternary: 'off' + no-param-reassign: 'off' + no-prototype-builtins: 'off' + no-restricted-syntax: 'off' + no-underscore-dangle: 'off' + prefer-destructuring: 'off' + radix: 'off' From b6b7f490ce865fd8fac6c9ac9fd8508d2ce57b60 Mon Sep 17 00:00:00 2001 From: BluThaitanium Date: Wed, 6 Jul 2022 00:14:26 -0400 Subject: [PATCH 05/11] eslinted tsx/javascript files Signed-off-by: BluThaitanium --- Makefile | 4 +- dashboard/origin-mlx/.eslintrc.yml | 2 - dashboard/origin-mlx/src/App.test.tsx | 6 +- dashboard/origin-mlx/src/App.tsx | 312 ++++++----- dashboard/origin-mlx/src/Css.tsx | 90 ++-- .../src/components/Button/Button.tsx | 15 +- .../components/Button/ButtonWithTooltip.tsx | 19 +- .../src/components/Button/index.tsx | 14 +- .../origin-mlx/src/components/DataList.tsx | 72 +-- .../src/components/DataListItem.tsx | 189 +++---- .../src/components/Detail/ComponentDetail.tsx | 222 ++++---- .../src/components/Detail/DatasetDetail.tsx | 135 ++--- .../src/components/Detail/KFServingDetail.tsx | 495 +++++++++--------- .../src/components/Detail/ModelDetail.tsx | 219 ++++---- .../src/components/Detail/NotebookDetail.tsx | 129 ++--- .../src/components/Detail/OperatorDetail.tsx | 122 +++-- .../src/components/Detail/PipelineDetail.tsx | 192 +++---- dashboard/origin-mlx/src/components/Graph.tsx | 41 +- dashboard/origin-mlx/src/components/Hero.tsx | 32 +- dashboard/origin-mlx/src/components/Link.tsx | 21 +- .../src/components/LoadingMessage.tsx | 16 +- .../src/components/MarkdownViewer.tsx | 52 +- .../src/components/MetadataView.tsx | 28 +- .../origin-mlx/src/components/PageFooter.tsx | 37 +- .../src/components/PipelineDetailGraph.tsx | 210 ++++---- .../src/components/RelatedAssetView.tsx | 183 ++++--- .../origin-mlx/src/components/Rotate.tsx | 12 +- .../src/components/RunView/ModelRunView.tsx | 322 ++++++------ .../src/components/RunView/RunView.tsx | 300 ++++++----- .../origin-mlx/src/components/SecretMenu.tsx | 40 +- .../src/components/Sidebar/SideBarHeader.tsx | 38 +- .../src/components/Sidebar/Sidebar.tsx | 38 +- .../src/components/Sidebar/SidebarList.tsx | 127 ++--- .../components/Sidebar/SidebarListItem.tsx | 59 +-- .../src/components/SourceCodeDisplay.tsx | 39 +- .../src/components/StaticGraphParser.tsx | 74 +-- .../src/components/Tooltip/Tooltip.tsx | 34 +- .../src/components/UploadButton.tsx | 251 ++++----- .../src/components/WorkflowParser.tsx | 444 ++++++++-------- dashboard/origin-mlx/src/icons/codaitLogo.tsx | 83 +-- .../origin-mlx/src/icons/experiments.tsx | 27 +- .../origin-mlx/src/icons/kubeflowLogo.tsx | 80 +-- dashboard/origin-mlx/src/icons/pipelines.tsx | 22 +- .../origin-mlx/src/icons/statusRunning.tsx | 12 +- .../origin-mlx/src/icons/statusTerminated.tsx | 17 +- dashboard/origin-mlx/src/index.tsx | 6 +- dashboard/origin-mlx/src/lib/api/artifacts.ts | 2 +- .../origin-mlx/src/lib/stores/context.tsx | 33 +- .../origin-mlx/src/pages/Default404Page.tsx | 22 +- .../src/pages/ExternalLinksPage.tsx | 44 +- dashboard/origin-mlx/src/pages/IframePage.tsx | 84 ++- .../origin-mlx/src/pages/KFServingAllPage.tsx | 229 ++++---- .../src/pages/KFServingDetailPage.tsx | 100 ++-- .../KFServingFeaturedPage/KFServingCard.tsx | 115 ++-- .../KFServingFeatured.tsx | 114 ++-- .../KFServingFeaturedPage.tsx | 91 ++-- .../src/pages/KFServingUploadPage.tsx | 209 ++++---- .../origin-mlx/src/pages/LandingPage.tsx | 43 +- .../origin-mlx/src/pages/MetaAllPage.tsx | 192 ++++--- .../origin-mlx/src/pages/MetaDeletePage.tsx | 175 ++++--- .../origin-mlx/src/pages/MetaDetailPage.tsx | 159 +++--- .../src/pages/MetaFeaturedPage/MetaCard.tsx | 106 ++-- .../pages/MetaFeaturedPage/MetaFeatured.tsx | 73 +-- .../MetaFeaturedPage/MetaFeaturedPage.tsx | 125 ++--- .../origin-mlx/src/pages/SettingsPage.tsx | 443 ++++++++-------- dashboard/origin-mlx/src/pages/UploadPage.tsx | 211 ++++---- 66 files changed, 3830 insertions(+), 3622 deletions(-) diff --git a/Makefile b/Makefile index 4e380a6d..172eff82 100644 --- a/Makefile +++ b/Makefile @@ -45,9 +45,9 @@ lint_python: venv ## Check Python code style compliance --per-file-ignores ./*:F841,F821 \ --exclude .git,__pycache__,docs/source/conf.py,old,build,dist,venv \ --max-line-length=140 - @echo "$@: OK" + @echo "$@: OK" .PHONY: lint_javascript lint_javascript: ## Check Javascript code style compliance @cd dashboard/origin-mlx && npm run lint -- --fix - @echo "$@: OK" + @echo "$@: OK" diff --git a/dashboard/origin-mlx/.eslintrc.yml b/dashboard/origin-mlx/.eslintrc.yml index 34b44a3d..74ada6ae 100644 --- a/dashboard/origin-mlx/.eslintrc.yml +++ b/dashboard/origin-mlx/.eslintrc.yml @@ -68,13 +68,11 @@ overrides: class-methods-use-this: 'off' consistent-return: 'off' default-case: 'off' - func-names: 'off' import/no-named-as-default: 'off' max-classes-per-file: 'off' max-len: 'off' no-console: 'off' no-constant-condition: 'off' - no-empty: 'off' no-mixed-operators: 'off' no-nested-ternary: 'off' no-param-reassign: 'off' diff --git a/dashboard/origin-mlx/src/App.test.tsx b/dashboard/origin-mlx/src/App.test.tsx index efd26a10..d934ca27 100644 --- a/dashboard/origin-mlx/src/App.test.tsx +++ b/dashboard/origin-mlx/src/App.test.tsx @@ -1,8 +1,8 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import React from 'react'; import ReactDOM from 'react-dom'; import App from './App'; diff --git a/dashboard/origin-mlx/src/App.tsx b/dashboard/origin-mlx/src/App.tsx index 690bdfed..7eba3692 100644 --- a/dashboard/origin-mlx/src/App.tsx +++ b/dashboard/origin-mlx/src/App.tsx @@ -4,6 +4,10 @@ * SPDX-License-Identifier: Apache-2.0 */ import React, { Dispatch, useEffect } from 'react'; +import TagManager from 'react-gtm-module'; +import { + BrowserRouter as Router, Route, Switch, Redirect, +} from 'react-router-dom'; import reducer, { initial } from './lib/stores/reducer'; import StoreContext, { Store } from './lib/stores/context'; import { Action, State } from './lib/stores/types'; @@ -12,9 +16,6 @@ import { findInvalidCacheEntries } from './lib/api/artifacts'; import { getSettings } from './lib/api/settings'; import { getUserInfo, hasRole } from './lib/util'; -import TagManager from 'react-gtm-module'; - -import { BrowserRouter as Router, Route, Switch, Redirect } from 'react-router-dom'; import Sidebar from './components/Sidebar'; import PipelineDetail from './components/Detail/PipelineDetail'; import DatasetDetail from './components/Detail/DatasetDetail'; @@ -25,8 +26,8 @@ import KFServingDetail from './components/Detail/KFServingDetail'; import './App.css'; import { SettingsPage } from './pages/SettingsPage'; -import LandingPage from './pages/LandingPage' -import ExternalLinksPage from './pages/ExternalLinksPage' +import LandingPage from './pages/LandingPage'; +import ExternalLinksPage from './pages/ExternalLinksPage'; import MetaDeletePage from './pages/MetaDeletePage'; import MetaFeaturedPage from './pages/MetaFeaturedPage'; import KFServingFeaturedPage from './pages/KFServingFeaturedPage'; @@ -41,46 +42,44 @@ import Default404Page from './pages/Default404Page'; // initialize Google Analytics (Google Tag Manager) if (process.env.REACT_APP_GTM_ID) { - console.log("Google Analytics is enabled."); + console.log('Google Analytics is enabled.'); const tagManagerArgs = { - gtmId: process.env.REACT_APP_GTM_ID - } + gtmId: process.env.REACT_APP_GTM_ID, + }; TagManager.initialize(tagManagerArgs); } const isAdmin = hasRole(getUserInfo(), 'admin'); function App() { - - var prefix = process.env.REACT_APP_BASE_PATH || "" - var kfpStandalone = process.env.REACT_APP_KFP_STANDALONE === 'true' + const prefix = process.env.REACT_APP_BASE_PATH || ''; + const kfpStandalone = process.env.REACT_APP_KFP_STANDALONE === 'true'; // Removes the stored path if the user navigated away from the /experiments page - if (!window.location.pathname.substring(0, prefix.length+12).includes(prefix + "/experiments")) - localStorage.removeItem("experiments-iframe") + if (!window.location.pathname.substring(0, prefix.length + 12).includes(`${prefix}/experiments`)) localStorage.removeItem('experiments-iframe'); // receive iframe message when iframe is loaded and send correct namespace back. window.addEventListener('message', (event: MessageEvent) => { const { data, origin } = event; switch (data.type) { - case 'iframe-connected': - if (!kfpStandalone) { - ['iframe', 'iframe-run'].forEach((id) => { - const element = document.getElementById(id) as HTMLIFrameElement; - if (element) { + case 'iframe-connected': + if (!kfpStandalone) { + ['iframe', 'iframe-run'].forEach((id) => { + const element = document.getElementById(id) as HTMLIFrameElement; + if (element) { // TODO: get namespace from user info, use fixed value: mlx for now - element.contentWindow.postMessage({type: 'namespace-selected', value: 'mlx'}, origin); - } - }) - } - break; + element.contentWindow.postMessage({ type: 'namespace-selected', value: 'mlx' }, origin); + } + }); + } + break; } }); // Removes any invalid cache entries after enough time has passed since the last invalid check useEffect(() => { - findInvalidCacheEntries() - }) + findInvalidCacheEntries(); + }); return (
@@ -88,27 +87,27 @@ function App() { reducer={reducer} initial={initial} onLoaded={({ settings }: State, d: Dispatch) => { - const { api } = settings.endpoints - const API = api.value || api.default + const { api } = settings.endpoints; + const API = api.value || api.default; getSettings(`${API}/apis/v1alpha1`) - .then(settings => d({ type: GET_SETTINGS, settings })) - .catch(error => console.log("Failed to reach API: ", API)) + .then((settings) => d({ type: GET_SETTINGS, settings })) + .catch((error) => console.log('Failed to reach API: ', API)); }} > {({ store }) => { - const { settings } = store + const { settings } = store; - const { api, kfp } = settings.endpoints - const API = api.value || api.default - const KFP = kfp.value || kfp.default + const { api, kfp } = settings.endpoints; + const API = api.value || api.default; + const KFP = kfp.value || kfp.default; - const { execute } = settings.capabilities - const canRun = execute.value !== undefined ? execute.value : execute.default + const { execute } = settings.capabilities; + const canRun = execute.value !== undefined ? execute.value : execute.default; const switchProps : AppRouterSwitchProps = { - API, KFP, canRun - } + API, KFP, canRun, + }; return ( @@ -116,12 +115,12 @@ function App() { {AppRouterSwitch(switchProps)} - ) + ); }}
- ) + ); } interface AppRouterSwitchProps { @@ -131,14 +130,16 @@ interface AppRouterSwitchProps { } function AppRouterSwitch(props: AppRouterSwitchProps) { - const {API, KFP, canRun} = props + const { API, KFP, canRun } = props; return ( - + ( - } + )} /> - + ( - } + )} /> - + ( - } + )} /> - + ( - } + )} /> - + ( - } + )} /> - { - window.open(`${KFP}/hub/login`, '_blank') - routeProps.history.goBack() - return null + window.open(`${KFP}/hub/login`, '_blank'); + routeProps.history.goBack(); + return null; }} /> - + ( - } + )} /> - + ( - } + )} /> - + ( - } + )} /> - + ( - asset.metadata?.annotations?.platform + getTag={(asset: any) => asset.metadata?.annotations?.platform || 'OpenSource'} alternateBG leftBtn="Featured" @@ -292,12 +309,14 @@ function AppRouterSwitch(props: AppRouterSwitchProps) { leftIcon="arrow_back" rightBtn="Register a Component" rightLink="/upload/components" - canEdit={true} + canEdit /> - } + )} /> - + ( - } + )} /> - + ( - } + )} /> - + ( - } - /> + )} + /> + render={() => ( - } + )} /> } + exact + path="/upload/inferenceservices" + render={() => } /> } + render={(routeProps: any) => } /> - + ( - - } + /> + )} /> } /> - ( + ( )} /> - ( + ( )} /> - ( + ( )} /> - ( + ( )} /> - ( + ( )} /> - ( + ( )} /> - }/> + } /> - ) + ); } interface ProtectedRouteProps { @@ -458,16 +488,14 @@ interface ProtectedRouteProps { render: any } function ProtectedRoute(props: ProtectedRouteProps) { - return ( - - } + } /> - ) + ); } export default App; diff --git a/dashboard/origin-mlx/src/Css.tsx b/dashboard/origin-mlx/src/Css.tsx index f4763af8..2b287328 100644 --- a/dashboard/origin-mlx/src/Css.tsx +++ b/dashboard/origin-mlx/src/Css.tsx @@ -110,7 +110,7 @@ export const theme = createMuiTheme({ }, color: color.theme, marginRight: 10, - padding: '0 8px' + padding: '0 8px', }, }, MuiDialogActions: { @@ -150,7 +150,7 @@ export const theme = createMuiTheme({ }, MuiInput: { input: { padding: 0 }, - root: { padding: 0 } + root: { padding: 0 }, }, MuiInputAdornment: { positionEnd: { @@ -163,13 +163,13 @@ export const theme = createMuiTheme({ backgroundColor: '#666', color: '#f1f1f1', fontSize: 12, - } + }, }, }, palette, typography: { fontFamily: fonts.main, - fontSize: fontsize.base + ' !important' as any, + fontSize: `${fontsize.base} !important` as any, useNextVariants: true, }, }); @@ -227,37 +227,37 @@ export const commonCss = stylesheet({ paddingTop: 20, }, ibmCard: { - backgroundColor: '#525252', - borderRadius: '7px', - fontFamily: 'Helvetica Neue, Arial, sans-serif', - height: '100%', - marginBottom: '10px', - width: '20%', - boxShadow: '0 2px 4px rgba(0,0,0,0.16), 0 2px 4px rgba(0,0,0,0.23)', - $nest: { - '&:hover': { - boxShadow: '0 6px 12px rgba(0,0,0,0.25), 0 6px 12px rgba(0,0,0,0.43)' - } - } + backgroundColor: '#525252', + borderRadius: '7px', + fontFamily: 'Helvetica Neue, Arial, sans-serif', + height: '100%', + marginBottom: '10px', + width: '20%', + boxShadow: '0 2px 4px rgba(0,0,0,0.16), 0 2px 4px rgba(0,0,0,0.23)', + $nest: { + '&:hover': { + boxShadow: '0 6px 12px rgba(0,0,0,0.25), 0 6px 12px rgba(0,0,0,0.43)', + }, + }, }, ibmCardBackGround: { - backgroundColor: '#525252', - fontFamily: 'Helvetica Neue, Arial, sans-serif', - borderRadius: '7px', - padding: '10px 10px 0px 10px', - // TODO: need to change to adjust to ibmCard's height automatically - position: 'relative', - verticalAlign: 'top', - height: '15vw', - width: '15vw', - boxShadow: '0 2px 4px rgba(0,0,0,0.16), 0 2px 4px rgba(0,0,0,0.23)', - $nest: { - '&:hover': { - boxShadow: '0 6px 12px rgba(0,0,0,0.25), 0 6px 12px rgba(0,0,0,0.43)' - } + backgroundColor: '#525252', + fontFamily: 'Helvetica Neue, Arial, sans-serif', + borderRadius: '7px', + padding: '10px 10px 0px 10px', + // TODO: need to change to adjust to ibmCard's height automatically + position: 'relative', + verticalAlign: 'top', + height: '15vw', + width: '15vw', + boxShadow: '0 2px 4px rgba(0,0,0,0.16), 0 2px 4px rgba(0,0,0,0.23)', + $nest: { + '&:hover': { + boxShadow: '0 6px 12px rgba(0,0,0,0.25), 0 6px 12px rgba(0,0,0,0.43)', }, - overflow: 'hidden' + }, + overflow: 'hidden', }, ibmCardBottom: { bottom: '0px', @@ -267,12 +267,12 @@ export const commonCss = stylesheet({ width: '15vw', display: 'flex', justifyContent: 'space-between', - alignItems: 'flex-end' + alignItems: 'flex-end', }, - + ibmCardDescription: { color: '#fff', - margin: '10px 0 0 0' + margin: '10px 0 0 0', }, ibmCardLink: { @@ -281,25 +281,25 @@ export const commonCss = stylesheet({ ibmTable: { padding: '5px', - borderSpacing: '30px' + borderSpacing: '30px', }, ibmTag: { - alignItems: 'center', - border: '1px solid #b0b0b0', - borderRadius: '.93rem', - display: 'inline-flex', - fontFamily: 'ibm-plex-sans, Helvetica Neue, Arial, sans-serif', - padding: '.18rem .4rem', - whiteSpace: 'nowrap', - maxWidth: '85%', - textOverflow: 'ellipsis' + alignItems: 'center', + border: '1px solid #b0b0b0', + borderRadius: '.93rem', + display: 'inline-flex', + fontFamily: 'ibm-plex-sans, Helvetica Neue, Arial, sans-serif', + padding: '.18rem .4rem', + whiteSpace: 'nowrap', + maxWidth: '85%', + textOverflow: 'ellipsis', }, infoIcon: { color: color.lowContrast, height: 16, - width: 16 + width: 16, }, link: { $nest: { diff --git a/dashboard/origin-mlx/src/components/Button/Button.tsx b/dashboard/origin-mlx/src/components/Button/Button.tsx index bdeaf1b1..7ac2133b 100644 --- a/dashboard/origin-mlx/src/components/Button/Button.tsx +++ b/dashboard/origin-mlx/src/components/Button/Button.tsx @@ -1,14 +1,13 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ -import React from 'react' -import MaterialButton from '@material-ui/core/Button' - +*/ +import React from 'react'; +import MaterialButton from '@material-ui/core/Button'; function Button(props: any) { - return ({props.children}) + return ({props.children}); } -export default Button +export default Button; diff --git a/dashboard/origin-mlx/src/components/Button/ButtonWithTooltip.tsx b/dashboard/origin-mlx/src/components/Button/ButtonWithTooltip.tsx index b4de30ed..b865ca62 100644 --- a/dashboard/origin-mlx/src/components/Button/ButtonWithTooltip.tsx +++ b/dashboard/origin-mlx/src/components/Button/ButtonWithTooltip.tsx @@ -1,15 +1,14 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ -import React from 'react' -import Button from './Button' -import Tooltip from '../Tooltip/Tooltip' - +*/ +import React from 'react'; +import Button from './Button'; +import Tooltip from '../Tooltip/Tooltip'; function ButtonWithTooltip(props: any) { - const { tooltip, children, ...rest } = props + const { tooltip, children, ...rest } = props; return ( @@ -17,7 +16,7 @@ function ButtonWithTooltip(props: any) { {children} - ) + ); } -export default ButtonWithTooltip +export default ButtonWithTooltip; diff --git a/dashboard/origin-mlx/src/components/Button/index.tsx b/dashboard/origin-mlx/src/components/Button/index.tsx index 0f992ed1..524cc930 100644 --- a/dashboard/origin-mlx/src/components/Button/index.tsx +++ b/dashboard/origin-mlx/src/components/Button/index.tsx @@ -1,10 +1,10 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ -import Button from './Button' -import ButtonWithTooltip from './ButtonWithTooltip' +*/ +import Button from './Button'; +import ButtonWithTooltip from './ButtonWithTooltip'; -export default Button -export { Button, ButtonWithTooltip } +export default Button; +export { Button, ButtonWithTooltip }; diff --git a/dashboard/origin-mlx/src/components/DataList.tsx b/dashboard/origin-mlx/src/components/DataList.tsx index 1e679391..3280bac4 100644 --- a/dashboard/origin-mlx/src/components/DataList.tsx +++ b/dashboard/origin-mlx/src/components/DataList.tsx @@ -1,8 +1,8 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import * as React from 'react'; import Typography from '@material-ui/core/Typography'; import DataListItem from './DataListItem'; @@ -13,36 +13,38 @@ export interface IDataListProps { items: Array>; } -export default function DataList (props: IDataListProps) { - const { title, titleIcon, items } = props; - return ( -
- - { title } { titleIcon } - - {items.map((item: any, i: number) => { - return (React.isValidElement(item)) - ? {item} - : - } - )} -
- ); +export default function DataList(props: IDataListProps) { + const { title, titleIcon, items } = props; + return ( +
+ + { title } + {' '} + { titleIcon } + + {items.map((item: any, i: number) => ((React.isValidElement(item)) + ? {item} + : ( + + )))} +
+ ); } diff --git a/dashboard/origin-mlx/src/components/DataListItem.tsx b/dashboard/origin-mlx/src/components/DataListItem.tsx index fae6a9bb..75c58bc4 100644 --- a/dashboard/origin-mlx/src/components/DataListItem.tsx +++ b/dashboard/origin-mlx/src/components/DataListItem.tsx @@ -1,8 +1,8 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import * as React from 'react'; import Grid from '@material-ui/core/Grid'; import Typography from '@material-ui/core/Typography'; @@ -11,7 +11,7 @@ import FormControlLabel from '@material-ui/core/FormControlLabel'; import TextField from '@material-ui/core/TextField'; import Button from '@material-ui/core/Button'; import Select from '@material-ui/core/Select'; -import LinearProgress from '@material-ui/core/LinearProgress' +import LinearProgress from '@material-ui/core/LinearProgress'; export interface IDataListItemProps { name?: string; @@ -32,16 +32,17 @@ export interface IDataListItemProps { progress?: number; } -export default function DataListItem (props: IDataListItemProps) { +export default function DataListItem(props: IDataListItemProps) { return (
- - { props.name || `` } + + { props.name || '' }
@@ -50,17 +51,20 @@ export default function DataListItem (props: IDataListItemProps) { { buildItem(props) }
- {props.thirdColData && + {props.thirdColData + && (
- + { props.thirdColData }
- } - {props.defaultData && + )} + {props.defaultData + && (
@@ -73,133 +77,140 @@ export default function DataListItem (props: IDataListItemProps) { - } + )} -
+
); } const buildItem = (props:any) => { - const { name, - itemClass, - data, - options, - savedValue, - handleClick, - handleType, - handleSelect, + const { + name, + itemClass, + data, + options, + savedValue, + handleClick, + handleType, + handleSelect, saveValue, handleFile, - progress + progress, } = props; if (itemClass === 'model-link') { return ( - { data || `` } + href={data} + > + { data || '' } - ) - } else if (itemClass === 'drop-down') { + ); + } if (itemClass === 'drop-down') { return ( - - ) - } else if (itemClass === 'toggle-switch') { + + ); + } if (itemClass === 'toggle-switch') { return ( - } + )} label={String(data)} /> - ) - } else if (itemClass === 'text-input') { + ); + } if (itemClass === 'text-input') { return ( - + - { data !== savedValue && - - } + )} - ) - } else if (itemClass === 'file-button') { + ); + } if (itemClass === 'file-button') { if (!progress && progress !== 0) { return (
- -
- ) + ); } - else { - return ( - <> - - {`${Math.round( + + return ( + <> + + + {`${Math.round( progress, - )}%`} - - ) - } - } else if (itemClass === 'button') { + )}%`} + + + ); + } if (itemClass === 'button') { return ( - ) - } else { - return ( - - { data || `` } - - ) + ); } -} + return ( + + { data || '' } + + ); +}; diff --git a/dashboard/origin-mlx/src/components/Detail/ComponentDetail.tsx b/dashboard/origin-mlx/src/components/Detail/ComponentDetail.tsx index 1c49331f..c0d89f87 100644 --- a/dashboard/origin-mlx/src/components/Detail/ComponentDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/ComponentDetail.tsx @@ -1,20 +1,20 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import * as React from 'react'; -import StoreContext from '../../lib/stores/context' +import Grid from '@material-ui/core/Grid'; +import Tabs from '@material-ui/core/Tabs'; +import Tab from '@material-ui/core/Tab'; +import Typography from '@material-ui/core/Typography'; +import StoreContext from '../../lib/stores/context'; import { getUserInfo, hasRole } from '../../lib/util'; -import Grid from '@material-ui/core/Grid' -import SourceCodeDisplay from '../SourceCodeDisplay' -import RunView from '../RunView' -import LoadingMessage from '../LoadingMessage' -import MetadataView from '../MetadataView' -import Tabs from '@material-ui/core/Tabs' -import Tab from '@material-ui/core/Tab' -import Typography from '@material-ui/core/Typography' +import SourceCodeDisplay from '../SourceCodeDisplay'; +import RunView from '../RunView'; +import LoadingMessage from '../LoadingMessage'; +import MetadataView from '../MetadataView'; const isAdmin = hasRole(getUserInfo(), 'admin'); @@ -35,13 +35,12 @@ export interface ComponentDetailState { } function purifyData(data: any, defaultString: string): string { - if (typeof data === "string") - return data - return defaultString + if (typeof data === 'string') return data; + return defaultString; } export default class ComponentDetail extends React.Component { - static contextType = StoreContext + static contextType = StoreContext; constructor(props: any) { super(props); @@ -49,141 +48,160 @@ export default class ComponentDetail extends React.Component - + + item + xs={6} + >
- this.setState({ leftTab })}> - this.setState({ leftTab })} + > + - {canRun && isAdmin && - - } + )}
- {this.state.leftTab === 'detail' && + {this.state.leftTab === 'detail' + && ( - + - { - purifyData(component.template.implementation.container.command, 'python ') + variant="h6" + inline + > + { + purifyData(component.template.implementation.container.command, 'python ') } - {(component.template.implementation.container.args || []).map((arg: any, i: number) => - (typeof arg === 'string') - ? - {arg + ' '} + {(component.template.implementation.container.args || []).map((arg: any, i: number) => ((typeof arg === 'string') + ? ( + + {`${arg} `} + + ) + : ( + + + {`${arg.inputValue || arg.outputPath} `} - : - - {(arg.inputValue || arg.outputPath) + ' '} - - - )} + + )))}
, { name: 'command', description: purifyData(component.template.implementation.container.command, 'python ') }, - { name: 'image', description: purifyData(component.template.implementation.container.image, "") } - ] + { name: 'image', description: purifyData(component.template.implementation.container.image, '') }, + ], }} /> - } - { this.state.leftTab === 'runCreation' && - - } + )} + { this.state.leftTab === 'runCreation' + && } - + item + xs={6} + >
- this.setState({ rightTab })}> - - - + this.setState({ rightTab })} + > + + +
- {this.state.rightTab === "source" && - } + /> + )} {this.state.rightTab === 'sample' && (!component.code ? - : + ) )}
diff --git a/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx b/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx index 19cdc1e1..b367da59 100644 --- a/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/DatasetDetail.tsx @@ -1,17 +1,17 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import * as React from 'react'; -import StoreContext from '../../lib/stores/context' -import SourceCodeDisplay from '../SourceCodeDisplay'; import Tabs from '@material-ui/core/Tabs'; import Tab from '@material-ui/core/Tab'; -import yaml # noqa: F401 from 'js-yaml'; +import yaml from 'js-yaml'; +import SourceCodeDisplay from '../SourceCodeDisplay'; +import StoreContext from '../../lib/stores/context'; import { getUserInfo, hasRole } from '../../lib/util'; -import RunView from '../RunView' +import RunView from '../RunView'; import RelatedAssetView from '../RelatedAssetView'; import MarkdownViewer from '../MarkdownViewer'; import MetadataView from '../MetadataView'; @@ -25,7 +25,7 @@ export interface IPipelineDetailProps { } export default class PipelineDetail extends React.Component { - static contextType = StoreContext + static contextType = StoreContext; constructor(props: any) { super(props); @@ -33,107 +33,112 @@ export default class PipelineDetail extends React.Component { const nodeCode = this.state.pipeline.template.spec.templates - .find((container:any) => - container.name.includes(name.slice(0, name.length - (name.includes('...') && 3)))) - return yaml.safeDump(nodeCode) - } - + .find((container:any) => container.name.includes(name.slice(0, name.length - (name.includes('...') && 3)))); + return yaml.safeDump(nodeCode); + }; + public render() { - const { store } = this.context - const { execute } = store.settings.capabilities - const canRun = execute.value !== null ? execute.value : execute.default + const { store } = this.context; + const { execute } = store.settings.capabilities; + const canRun = execute.value !== null ? execute.value : execute.default; - const dataset = this.state.dataset - const setRunLink = this.props.setRunLink + const { dataset } = this.state; + const { setRunLink } = this.props; return (
this.setState({ leftTab })}> - this.setState({ leftTab })} + > + - - { this.state.selectedGraphNode && - - } - {canRun && isAdmin && + )} + {canRun && isAdmin + && ( - } - { dataset.related_assets && dataset.related_assets.length !== 0 && + )} + { dataset.related_assets && dataset.related_assets.length !== 0 + && ( - } + )}
{ this.state.leftTab === 'description' && ((dataset.template && dataset.template.readme_url) - ? - : + : ( + + ) )} - { this.state.leftTab === 'relatedAssets' && + { this.state.leftTab === 'relatedAssets' + && ( - } - {this.state.leftTab === 'runCreation' && - - } - { this.state.leftTab === "source" && - } + { this.state.leftTab === 'source' && ( + - } - { this.state.leftTab === "component-code" && - - } + )}
- ) + ); } } diff --git a/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx b/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx index c6041b1c..826d5263 100644 --- a/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx +++ b/dashboard/origin-mlx/src/components/Detail/KFServingDetail.tsx @@ -1,31 +1,31 @@ -/* +/* * Copyright 2021 The MLX Contributors -* +* * SPDX-License-Identifier: Apache-2.0 -*/ +*/ import * as React from 'react'; -import StoreContext from '../../lib/stores/context' -import { capitalize, getUserInfo, hasRole } from '../../lib/util' import Grid from '@material-ui/core/Grid'; -import SourceCodeDisplay from '../SourceCodeDisplay'; import Tabs from '@material-ui/core/Tabs'; import Tab from '@material-ui/core/Tab'; -import yaml # noqa: F401 from 'js-yaml'; +import yaml from 'js-yaml'; import CheckCircleIcon from '@material-ui/icons/CheckCircle'; import ErrorIcon from '@material-ui/icons/Error'; -//import errorIcon from '../../images/error.png' -import closeButton from '../../images/close.png' -import updateIcon from '../../images/update-icon.png' - +// import errorIcon from '../../images/error.png' import Typography from '@material-ui/core/Typography'; -import Button from '../../components/Button' -import Icon from '@material-ui/core/Icon' -import { upload } from '../../lib/api/upload' +import Icon from '@material-ui/core/Icon'; +import Popup from 'reactjs-popup'; +import closeButton from '../../images/close.png'; +import updateIcon from '../../images/update-icon.png'; + +import Button from '../Button'; +import { upload } from '../../lib/api/upload'; import MetadataView from '../MetadataView'; -import Popup from "reactjs-popup"; +import SourceCodeDisplay from '../SourceCodeDisplay'; +import { capitalize, getUserInfo, hasRole } from '../../lib/util'; +import StoreContext from '../../lib/stores/context'; const isAdmin = hasRole(getUserInfo(), 'admin'); @@ -54,7 +54,7 @@ export interface KFServingDetailState { } export default class KFServingDetail extends React.Component { - static contextType = StoreContext + static contextType = StoreContext; constructor(props: any) { super(props); @@ -64,354 +64,381 @@ export default class KFServingDetail extends React.Component { switch (tab) { case 'serve': - return this.state.service.servable_tested_platforms + return this.state.service.servable_tested_platforms; case 'train': - return this.state.service.trainable_tested_platforms + return this.state.service.trainable_tested_platforms; default: - return [''] + return ['']; } - } + }; - getCode = (platform: string) => - this.state.service.code - .find(({ execution_platform }: any) => - execution_platform === platform).script_code + getCode = (platform: string) => this.state.service.code + .find(({ execution_platform }: any) => execution_platform === platform).script_code; public render() { - - const API = this.props.API || "" - const namespace = this.props.namespace || "" - const service = this.state.service - console.log("Asset:") - console.log(this.props.asset) + const API = this.props.API || ''; + const namespace = this.props.namespace || ''; + const { service } = this.state; + console.log('Asset:'); + console.log(this.props.asset); const handleFile = (e: React.ChangeEvent) => { - this.setState({...this.state, file: e.currentTarget.files[0]}) - } + this.setState({ ...this.state, file: e.currentTarget.files[0] }); + }; const handleUpload = async () => { - - const file = this.state.file - const response = await upload(API, 'inferenceservice', file, {}) - let uploadStatus = "" + const { file } = this.state; + const response = await upload(API, 'inferenceservice', file, {}); + let uploadStatus = ''; if (response.status < 200 || response.status >= 300) { - uploadStatus = "Upload of " + file.name + " failed" + uploadStatus = `Upload of ${file.name} failed`; } - else{ - uploadStatus = "Upload Suceeded." + else { + uploadStatus = 'Upload Suceeded.'; } - this.setState({...this.state, uploadStatus, file: null}) - } - - /////////////KIALI/////////////// - const kiali = `http${process.env.HTTPS ? 's' : ""}://${process.env.REACT_APP_KIALI}` - const grafana = `http${process.env.HTTPS ? 's' : ""}://${process.env.REACT_APP_GRAFANA}` - const kialiLink = `${kiali}/kiali/console/services?duration=60&namespaces=${namespace}&servicename=` + service.metadata.name + "&kiosk=true" - const graphLink = `${kiali}/kiali/console/graph/namespaces/?edges=requestsPercentage&graphType=service&namespaces=${namespace}&unusedNodes=false&injectServiceNodes=true&duration=21600&refresh=10000&layout=dagre&kiosk=true` - const grafanaLink = `${grafana}/d/UbsSZTDik/istio-workload-dashboard?orgId=1&var-namespace=${namespace}&var-workload=` + service.metadata.name + `-deployment&var-srcns=All&var-srcwl=All&var-dstsvc=All` - - /////////////PREDICTOR///////////// - let predictorStatusIcon = - let predictorTimestamp = "" - let predictorHost = "" + this.setState({ ...this.state, uploadStatus, file: null }); + }; - /////////////EXPLAINER///////////// - let explainerStatusIcon = - let explainerTimestamp = "" - let explainerHost = "" - //let explainerErrorIcon = "" - let explainerReason = "" - let explainerMessage = "" - //let explainerSeverity = "" + /// //////////KIALI/////////////// + const kiali = `http${process.env.HTTPS ? 's' : ''}://${process.env.REACT_APP_KIALI}`; + const grafana = `http${process.env.HTTPS ? 's' : ''}://${process.env.REACT_APP_GRAFANA}`; + const kialiLink = `${kiali}/kiali/console/services?duration=60&namespaces=${namespace}&servicename=${service.metadata.name}&kiosk=true`; + const graphLink = `${kiali}/kiali/console/graph/namespaces/?edges=requestsPercentage&graphType=service&namespaces=${namespace}&unusedNodes=false&injectServiceNodes=true&duration=21600&refresh=10000&layout=dagre&kiosk=true`; + const grafanaLink = `${grafana}/d/UbsSZTDik/istio-workload-dashboard?orgId=1&var-namespace=${namespace}&var-workload=${service.metadata.name}-deployment&var-srcns=All&var-srcwl=All&var-dstsvc=All`; + /// //////////PREDICTOR///////////// + let predictorStatusIcon = ; + let predictorTimestamp = ''; + let predictorHost = ''; - /////////////TRANSFORMER///////////// - let transformerStatusIcon = - let transformerTimestamp = "" - let transformerHost = "" - //let transformerErrorIcon = "" - let transformerReason = "" - let transformerMessage = "" - //let transformerSeverity = "" + /// //////////EXPLAINER///////////// + let explainerStatusIcon = ; + let explainerTimestamp = ''; + let explainerHost = ''; + // let explainerErrorIcon = "" + let explainerReason = ''; + let explainerMessage = ''; + // let explainerSeverity = "" + /// //////////TRANSFORMER///////////// + let transformerStatusIcon = ; + let transformerTimestamp = ''; + let transformerHost = ''; + // let transformerErrorIcon = "" + let transformerReason = ''; + let transformerMessage = ''; + // let transformerSeverity = "" if (service.status?.condition) { - for (let conditionIter=0; conditionIter + if (condition.type === 'DefaultPredictorReady') { + predictorTimestamp = condition.lastTransitionTime; + predictorHost = service.status.default.predictor.host; + if (condition.status === 'True') { + predictorStatusIcon = ; } } } - for (let conditionIter=0; conditionIter - explainerHost = service.status.default.transformer?.host || service.status.default.explainer.host + for (let conditionIter = 0; conditionIter < service.status.conditions.length; conditionIter += 1) { + const condition = service.status.conditions[conditionIter]; + if (condition.type === 'DefaultExplainerReady') { + explainerTimestamp = condition.lastTransitionTime; + if (condition.status === 'True') { + explainerStatusIcon = ; + explainerHost = service.status.default.transformer?.host || service.status.default.explainer.host; } else { - //explainerErrorIcon = errorIcon - explainerReason = condition.reason - explainerMessage = condition.message - //explainerSeverity = condition.severity + // explainerErrorIcon = errorIcon + explainerReason = condition.reason; + explainerMessage = condition.message; + // explainerSeverity = condition.severity } } } - for (let i=0; i - transformerHost = service.status.default.transformer.host + for (let i = 0; i < service.status.conditions.length; i += 1) { + if (service.status.conditions[i].type === 'DefaultTransformerReady') { + transformerTimestamp = service.status.conditions[i].lastTransitionTime; + if (service.status.conditions[i].status === 'True') { + transformerStatusIcon = ; + transformerHost = service.status.default.transformer.host; } else { - //transformerErrorIcon = errorIcon - transformerReason = service.status.conditions[i].reason - transformerMessage = service.status.conditions[i].message - //transformerSeverity = service.status.conditions[i].severity + // transformerErrorIcon = errorIcon + transformerReason = service.status.conditions[i].reason; + transformerMessage = service.status.conditions[i].message; + // transformerSeverity = service.status.conditions[i].severity } } } } else { - predictorTimestamp = service.metadata.creationTimestamp - predictorStatusIcon = service.status.activeModelState === "Ready" - ? - : + predictorTimestamp = service.metadata.creationTimestamp; + predictorStatusIcon = service.status.activeModelState === 'Ready' + ? + : ; } - + return ( <> - this.setState({ topTab: value })} > - - - - - { this.state.topTab === "info" && + { this.state.topTab === 'info' && ( -
- this.setState({ leftTab: value })}> - this.setState({ leftTab: value })} + > +
- {this.state.leftTab === 'detail' && + {this.state.leftTab === 'detail' + && (
- - Configure KFServices + + Configure KFServices
- { isAdmin && (!this.state.file ? - <> - - -

{this.state.uploadStatus}

- - : - <> - -

{this.state.file.name}

- + { isAdmin && (!this.state.file + ? ( + <> + + +

{this.state.uploadStatus}

+ + ) + : ( + <> + +

{this.state.file.name}

+ + ) )}
- { predictorTimestamp && - - } - { explainerTimestamp && - - } - { transformerTimestamp && - - } + )}
- } + )} {/* Temporarily remove for first pass (using 'false && ') */} - { false && - UPDATE SERVICE} position="top center" contentStyle={{ marginTop: '30px', marginLeft: '300px', width: '1165px', height: '670px', zIndex: 100, opacity: 1, boxShadow: '0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19)', borderWidth: '5px', borderColor: '#1BCDC7' }} arrow={false}> -
- - close button - -
+ { false + && ( + + {' '} + + UPDATE SERVICE + +)} + position="top center" + contentStyle={{ + marginTop: '30px', marginLeft: '300px', width: '1165px', height: '670px', zIndex: 100, opacity: 1, boxShadow: '0 4px 8px 0 rgba(0, 0, 0, 0.2), 0 6px 20px 0 rgba(0, 0, 0, 0.19)', borderWidth: '5px', borderColor: '#1BCDC7', + }} + arrow={false} + > +
+ + close button + +
- } + )}
-
- this.setState({ rightTab: value, - codeTab: this.getCodeType(value)[0] + codeTab: this.getCodeType(value)[0], })} > -
- { (this.state.rightTab === "train" || this.state.rightTab === "serve") && + { (this.state.rightTab === 'train' || this.state.rightTab === 'serve') + && (
this.setState({ codeTab: value })} > - {this.getCodeType(this.state.rightTab).map((platform: string) => - ( + - )} + ))}
- } - {this.state.rightTab === "source" && - - } -
+ )} +
- } - { this.state.topTab === "kiali" && - - } - { this.state.topTab === "grafana" && - - } - { this.state.topTab === "graph" && - - } + )} + { this.state.topTab === 'kiali' + && } + )} + {this.state.rightTab === 'notebook' + && } ); diff --git a/tools/python/regenerate_catalog_upload_json.py b/tools/python/regenerate_catalog_upload_json.py index 6b988126..66e60b2f 100755 --- a/tools/python/regenerate_catalog_upload_json.py +++ b/tools/python/regenerate_catalog_upload_json.py @@ -7,7 +7,7 @@ from __future__ import print_function import json -import yaml # noqa: F401 +import yaml from glob import glob from os.path import abspath, dirname, relpath @@ -25,9 +25,7 @@ project_dir = dirname(script_path) katalog_dir = f"{project_dir}/../katalog" # TODO: don't assume user cloned katalog and mlx repos into same parent folder -katalog_url = ( - "https://raw.githubusercontent.com/machine-learning-exchange/katalog/main/" -) +katalog_url = "https://raw.githubusercontent.com/machine-learning-exchange/katalog/main/" catalog_upload_json_files = [ f"{project_dir}/bootstrapper/catalog_upload.json", @@ -39,11 +37,8 @@ def get_list_of_yaml_files_in_katalog(asset_type: str): yaml_files = glob(f"{katalog_dir}/{asset_type}-samples/**/*.yaml", recursive=True) - yaml_files = [ - filepath - for filepath in yaml_files - if not any(word in filepath for word in ["template", "test", "src"]) - ] + yaml_files = [filepath for filepath in yaml_files + if not any(word in filepath for word in ["template", "test", "src"])] return sorted(yaml_files) @@ -61,17 +56,15 @@ def generate_katalog_dict() -> dict: with open(yaml_file) as f: yaml_dict = yaml.load(f, Loader=yaml.FullLoader) - asset_name = ( - yaml_dict.get("name") - or yaml_dict.get("metadata", {}) - .get("name", "") - .replace("-", " ") - .title() - or "" - ) + asset_name = yaml_dict.get("name") or \ + yaml_dict.get("metadata", {}).get("name", "").replace("-", " ").title() \ + or "" asset_url = katalog_url + relpath(yaml_file, katalog_dir) - katalog_asset_item = {"name": asset_name, "url": asset_url} + katalog_asset_item = { + "name": asset_name, + "url": asset_url + } katalog_asset_list.append(katalog_asset_item) @@ -105,6 +98,6 @@ def main(): print("Done. Use git diff to evaluate if and which changes are desired!") -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/python/update_doc_table.py b/tools/python/update_doc_table.py index ebc8a60b..710cb2f6 100755 --- a/tools/python/update_doc_table.py +++ b/tools/python/update_doc_table.py @@ -15,7 +15,9 @@ from glob import glob from os.path import abspath, dirname, split -md_file_path_expressions = ["/docs/*.md"] +md_file_path_expressions = [ + "/docs/*.md" +] script_folder = abspath(dirname(__file__)) project_root_dir = abspath(dirname(dirname(script_folder))) @@ -28,10 +30,8 @@ def find_md_files() -> [str]: print(" " + path_expr.lstrip("/")) print("") - md_files_list_of_lists = [ - glob(project_root_dir + path_expr, recursive=True) - for path_expr in md_file_path_expressions - ] + md_files_list_of_lists = [glob(project_root_dir + path_expr, recursive=True) + for path_expr in md_file_path_expressions] return sorted(list(itertools.chain(*md_files_list_of_lists))) @@ -54,12 +54,14 @@ def update_doc_table() -> [str]: md_file_paths = find_md_files() # 2. extract all descriptions using headers (first line) from files - descriptions = [get_header_from_md_file(file) for file in md_file_paths] + descriptions = [ + get_header_from_md_file(file) + for file in md_file_paths + ] # 3. format filenames as Markdown hyperlinks: [name](url) - md_filenames = [ - "[" + split(file)[1] + "](./" + split(file)[1] + ")" for file in md_file_paths - ] + md_filenames = ["[" + split(file)[1] + "](./" + split(file)[1] + ")" + for file in md_file_paths] table = [] table.append( @@ -70,14 +72,16 @@ def update_doc_table() -> [str]: ) for i in range(len(md_filenames)): - if "README.md" in md_filenames[i]: + if("README.md" in md_filenames[i]): continue - table.append(f"| {md_filenames[i]} | {descriptions[i]} |") + table.append( + f"| {md_filenames[i]} | {descriptions[i]} |" + ) f = open("./docs/README.md", "w") f.write("\n".join(table)) f.close() -if __name__ == "__main__": +if __name__ == '__main__': update_doc_table() diff --git a/tools/python/verify_doc_links.py b/tools/python/verify_doc_links.py index 9c168dac..fb66650b 100755 --- a/tools/python/verify_doc_links.py +++ b/tools/python/verify_doc_links.py @@ -10,16 +10,14 @@ import requests from glob import glob -from os import environ as env # noqa: F401 +from os import environ as env from os.path import abspath, dirname, exists, relpath from random import randint from time import sleep from urllib3.util.url import parse_url from urllib3.exceptions import LocationParseError -GITHUB_REPO = env.get( - "GITHUB_REPO", "https://github.com/machine-learning-exchange/mlx/" -) +GITHUB_REPO = env.get("GITHUB_REPO", "https://github.com/machine-learning-exchange/mlx/") md_file_path_expressions = [ "/**/*.md", @@ -35,9 +33,7 @@ project_root_dir = abspath(dirname(dirname(script_folder))) github_repo_master_path = "{}/blob/master".format(GITHUB_REPO.rstrip("/")) -parallel_requests = ( - 60 # GitHub rate limiting is 60 requests per minute, then we sleep a bit -) +parallel_requests = 60 # GitHub rate limiting is 60 requests per minute, then we sleep a bit url_status_cache = dict() @@ -49,23 +45,18 @@ def find_md_files() -> [str]: print(" " + path_expr.lstrip("/")) print("") - list_of_lists = [ - glob(project_root_dir + path_expr, recursive=True) - for path_expr in md_file_path_expressions - ] + list_of_lists = [glob(project_root_dir + path_expr, recursive=True) + for path_expr in md_file_path_expressions] flattened_list = list(itertools.chain(*list_of_lists)) - filtered_list = [ - path for path in flattened_list if not any(s in path for s in excluded_paths) - ] + filtered_list = [path for path in flattened_list + if not any(s in path for s in excluded_paths)] return sorted(filtered_list) -def get_links_from_md_file( - md_file_path: str, -) -> [(int, str, str)]: # -> [(line, link_text, URL)] +def get_links_from_md_file(md_file_path: str) -> [(int, str, str)]: # -> [(line, link_text, URL)] with open(md_file_path, "r") as f: try: @@ -80,41 +71,26 @@ def get_links_from_md_file( md_file_content = re.sub( r"\[([^]]+)\]\((?!http|#|/)([^)]+)\)", r"[\1]({}/{}/\2)".format(github_repo_master_path, folder).replace("/./", "/"), - md_file_content, - ) + md_file_content) # replace links that are relative to the project root, i.e. [link text](/sdk/FEATURES.md) md_file_content = re.sub( r"\[([^]]+)\]\(/([^)]+)\)", r"[\1]({}/\2)".format(github_repo_master_path), - md_file_content, - ) + md_file_content) # find all the links line_text_url = [] for line_number, line_text in enumerate(md_file_content.splitlines()): # find markdown-styled links [text](url) - for (link_text, url) in re.findall( - r"\[([^]]+)\]\((%s[^)]+)\)" % "http", line_text - ): + for (link_text, url) in re.findall(r"\[([^]]+)\]\((%s[^)]+)\)" % "http", line_text): line_text_url.append((line_number + 1, link_text, url)) # find plain http(s)-style links for url in re.findall(r"[\n\r\s\"'](https?://[^\s]+)[\n\r\s\"']", line_text): - if not any( - s in url - for s in [ - "localhost", - "...", - "lorem", - "ipsum", - "/path/to/", - "address", - "port", - "${OS}", - ] - ): + if not any(s in url for s in + ["localhost", "...", "lorem", "ipsum", "/path/to/", "address", "port", "${OS}"]): try: parse_url(url) line_text_url.append((line_number + 1, "", url)) @@ -125,9 +101,7 @@ def get_links_from_md_file( return line_text_url -def test_url( - file: str, line: int, text: str, url: str -) -> (str, int, str, str, int): # (file, line, text, url, status) +def test_url(file: str, line: int, text: str, url: str) -> (str, int, str, str, int): # (file, line, text, url, status) short_url = url.split("#", maxsplit=1)[0] @@ -142,18 +116,12 @@ def test_url( status = 404 else: try: - status = requests.head( - short_url, allow_redirects=True, timeout=5 - ).status_code + status = requests.head(short_url, allow_redirects=True, timeout=5).status_code if status == 405: # method not allowed, use GET instead of HEAD - status = requests.get( - short_url, allow_redirects=True, timeout=5 - ).status_code + status = requests.get(short_url, allow_redirects=True, timeout=5).status_code if status == 429: # GitHub rate limiting, try again after 1 minute sleep(randint(60, 90)) - status = requests.head( - short_url, allow_redirects=True, timeout=5 - ).status_code + status = requests.head(short_url, allow_redirects=True, timeout=5).status_code except requests.exceptions.Timeout as e: status = 408 except requests.exceptions.RequestException as e: @@ -166,14 +134,10 @@ def test_url( return file, line, text, url, status -def verify_urls_concurrently( - file_line_text_url: [(str, int, str, str)] -) -> [(str, int, str, str)]: +def verify_urls_concurrently(file_line_text_url: [(str, int, str, str)]) -> [(str, int, str, str)]: file_line_text_url_status = [] - with concurrent.futures.ThreadPoolExecutor( - max_workers=parallel_requests - ) as executor: + with concurrent.futures.ThreadPoolExecutor(max_workers=parallel_requests) as executor: check_urls = ( executor.submit(test_url, file, line, text, url) for (file, line, text, url) in file_line_text_url @@ -186,12 +150,8 @@ def verify_urls_concurrently( print(str(type(e))) file_line_text_url_status.append((file, line, text, url, 500)) finally: - print( - "{}/{}".format( - len(file_line_text_url_status), len(file_line_text_url) - ), - end="\r", - ) + print("{}/{}".format(len(file_line_text_url_status), + len(file_line_text_url)), end="\r") return file_line_text_url_status @@ -212,42 +172,31 @@ def verify_doc_links() -> [(str, int, str, str)]: file_line_text_url_status = verify_urls_concurrently(file_line_text_url) # 4. filter for the invalid URLs (status 404: "Not Found") to be reported - file_line_text_url_404 = [ - (f, l, t, u, s) for (f, l, t, u, s) in file_line_text_url_status if s == 404 - ] + file_line_text_url_404 = [(f, l, t, u, s) + for (f, l, t, u, s) in file_line_text_url_status + if s == 404] # 5. print some stats for confidence - print( - "{} {} links ({} unique URLs) in {} Markdown files.\n".format( - "Checked" if file_line_text_url_404 else "Verified", - len(file_line_text_url_status), - len(url_status_cache), - len(md_file_paths), - ) - ) + print("{} {} links ({} unique URLs) in {} Markdown files.\n".format( + "Checked" if file_line_text_url_404 else "Verified", + len(file_line_text_url_status), + len(url_status_cache), + len(md_file_paths))) # 6. report invalid links, exit with error for CI/CD if file_line_text_url_404: for (file, line, text, url, status) in file_line_text_url_404: - print( - "{}:{}: {} -> {}".format( - relpath(file, project_root_dir), - line, - url.replace(github_repo_master_path, ""), - status, - ) - ) + print("{}:{}: {} -> {}".format( + relpath(file, project_root_dir), line, + url.replace(github_repo_master_path, ""), status)) # print a summary line for clear error discovery at the bottom of Travis job log - print( - "\nERROR: Found {} invalid Markdown links".format( - len(file_line_text_url_404) - ) - ) + print("\nERROR: Found {} invalid Markdown links".format( + len(file_line_text_url_404))) exit(1) -if __name__ == "__main__": +if __name__ == '__main__': verify_doc_links() diff --git a/tools/python/verify_npm_packages.py b/tools/python/verify_npm_packages.py index 85cb317b..d67d5686 100755 --- a/tools/python/verify_npm_packages.py +++ b/tools/python/verify_npm_packages.py @@ -82,22 +82,15 @@ def verify_npm_packages(): packages_outdated = f"\n\nFound outdated npm packages\n" packages_up_to_date = "All packages up to date" - check_vulnerabilities = ( - run("npm audit", cwd="./dashboard/origin-mlx/", stdout=PIPE, shell=True) - .stdout.decode("utf-8") - .split("\n") - ) - vulnerabilities = [ - word for word in check_vulnerabilities if "vulnerabilities" in word - ] - packages_vulnerable = f"""\nFound vulnerable packages\n\n{colorText.RED}{vulnerabilities[0]}{colorText.END}\n - \rRun {colorText.BLUE}make update_npm_packages{colorText.END} to secure/update\n""" + check_vulnerabilities = run("npm audit", cwd="./dashboard/origin-mlx/", stdout=PIPE, shell=True + ).stdout.decode("utf-8").split('\n') + vulnerabilities = [word for word in check_vulnerabilities if 'vulnerabilities' in word] + packages_vulnerable = f'''\nFound vulnerable packages\n\n{colorText.RED}{vulnerabilities[0]}{colorText.END}\n + \rRun {colorText.BLUE}make update_npm_packages{colorText.END} to secure/update\n''' packages_safe = "\nNo vulnerabilities found" - print(packages_up_to_date) if check_outdated.returncode == 0 else print( - packages_outdated - ) - print("-" * 40) + print(packages_up_to_date) if check_outdated.returncode == 0 else print(packages_outdated) + print('-' * 40) if "0 vulnerabilities" not in vulnerabilities[0]: print(packages_vulnerable)