diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 160af875a8..f7f73af512 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,6 +41,7 @@ jobs: # Lint checks which don't depend on any service containes, etc. to be running. lint-checks: needs: pre_job + if: false # generally won't work w/ pants venv # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} @@ -72,12 +73,18 @@ jobs: python-version: '3.9.14' env: + # the python binary used by the Makefile + PYTHON_VERSION: 'python${{ matrix.python-version-short }}' + TASK: '${{ matrix.task }}' COLUMNS: '120' PYLINT_CONCURRENCY: '6' steps: - name: Checkout repository uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh @@ -85,21 +92,21 @@ jobs: uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v5-python-${{ matrix.python }}- + #- name: Cache Python Dependencies + # uses: actions/cache@v4 + # with: + # path: | + # ~/.cache/pip + # virtualenv + # ~/virtualenv + # # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # # !virtualenv/lib/python*/site-packages/st2* + # # !virtualenv/bin/st2* + # key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # # Don't use alternative key as if requirements.txt has altered we + # # don't want to retrieve previous cache + # #restore-keys: | + # # ${{ runner.os }}-v5-python-${{ matrix.python }}- - name: Cache APT Dependencies id: cache-apt-deps uses: actions/cache@v4 @@ -116,6 +123,29 @@ jobs: # install dev dependencies for Python YAML and LDAP packages # https://github.com/StackStorm/st2-auth-ldap ./scripts/github/install-apt-packages-use-cache.sh + + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v6-scie-pants + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we're not using remote caching. + cache-lmdb-store: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -136,6 +166,13 @@ jobs: run: | ./scripts/ci/run-nightly-make-task-if-exists.sh "${TASK}" + - name: Upload pants log + uses: actions/upload-artifact@v2 + with: + name: pants-log-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + self-check: needs: pre_job name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' @@ -163,6 +200,9 @@ jobs: - 15672:15672/tcp # Management: HTTP, CLI env: + # the python binary used by the Makefile + PYTHON_VERSION: 'python${{ matrix.python-version-short }}' + # CI st2.conf (with ST2_CI_USER user instead of stanley) ST2_CONF: 'conf/st2.ci.conf' @@ -177,6 +217,9 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh @@ -184,19 +227,19 @@ jobs: uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- + #- name: Cache Python Dependencies + # uses: actions/cache@v4 + # with: + # path: | + # ~/.cache/pip + # virtualenv + # ~/virtualenv + # # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # # !virtualenv/lib/python*/site-packages/st2* + # # !virtualenv/bin/st2* + # key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # restore-keys: | + # ${{ runner.os }}-python-${{ matrix.python }}- - name: Cache APT Dependencies id: cache-apt-deps uses: actions/cache@v4 @@ -214,6 +257,29 @@ jobs: # install dev dependencies for Python YAML and LDAP packages # https://github.com/StackStorm/st2-auth-ldap ./scripts/github/install-apt-packages-use-cache.sh + + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v6-scie-pants + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we're not using remote caching. + cache-lmdb-store: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -264,16 +330,17 @@ jobs: echo ${GITHUB_WORKSPACE}/virtualenv/bin | tee -a $GITHUB_PATH - name: Create symlinks to find the binaries when running st2 actions # st2 is actually a console_script entry point, not just a 'script' - # so it IS included in the virtualenv. But, st2-run-pack-tests might not be included. + # so it IS included in the virtualenv. But, st2-run-pack-tests is not. run: | ln -s ${GITHUB_WORKSPACE}/virtualenv/bin/st2 /usr/local/bin/st2 ln -s ${GITHUB_WORKSPACE}/st2common/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests - - name: Install st2client - timeout-minutes: 5 - run: | - cd ./st2client - pip3 install --upgrade pip - python3 setup.py develop +# st2client already installed by pants in the virtualenv. +# - name: Install st2client +# timeout-minutes: 5 +# run: | +# cd ./st2client +# pip3 install --upgrade pip +# python3 setup.py develop - name: Run self-verification script env: ST2_CONF: /home/runner/work/st2/st2/conf/st2.ci.conf @@ -295,6 +362,13 @@ jobs: if: "${{ always() }}" run: docker rm --force redis || true + - name: Upload pants log + uses: actions/upload-artifact@v2 + with: + name: pants-log-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + unit-tests: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code @@ -360,6 +434,9 @@ jobs: NODE_TOTAL: '${{ matrix.nosetests_node_total }}' NODE_INDEX: '${{ matrix.nosetests_node_index }}' + # the python binary used by the Makefile + PYTHON_VERSION: 'python${{ matrix.python-version-short }}' + # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. COLUMNS: '120' @@ -381,6 +458,9 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh @@ -388,21 +468,21 @@ jobs: uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v5-python-${{ matrix.python }}- + #- name: Cache Python Dependencies + # uses: actions/cache@v4 + # with: + # path: | + # ~/.cache/pip + # virtualenv + # ~/virtualenv + # # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # # !virtualenv/lib/python*/site-packages/st2* + # # !virtualenv/bin/st2* + # key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # # Don't use alternative key as if requirements.txt has altered we + # # don't want to retrieve previous cache + # #restore-keys: | + # # ${{ runner.os }}-v5-python-${{ matrix.python }}- - name: Cache APT Dependencies id: cache-apt-deps uses: actions/cache@v4 @@ -419,6 +499,29 @@ jobs: # install dev dependencies for Python YAML and LDAP packages # https://github.com/StackStorm/st2-auth-ldap ./scripts/github/install-apt-packages-use-cache.sh + + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v6-scie-pants + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we're not using remote caching. + cache-lmdb-store: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -469,6 +572,13 @@ jobs: env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + - name: Upload pants log + uses: actions/upload-artifact@v2 + with: + name: pants-log-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + integration-tests: needs: pre_job # NOTE: We always want to run job on master since we run some additional checks there (code @@ -555,7 +665,6 @@ jobs: #- 15671:15671/tcp # Management: SSL port #- 25672:25672/tcp # inter-node or CLI #- 4369:4369/tcp # epmd - # # Used for the coordination backend for integration tests # NOTE: To speed things up, we only start redis for integration tests @@ -579,6 +688,9 @@ jobs: NODE_TOTAL: '${{ matrix.nosetests_node_total }}' NODE_INDEX: '${{ matrix.nosetests_node_index }}' + # the python binary used by the Makefile + PYTHON_VERSION: 'python${{ matrix.python-version-short }}' + # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. COLUMNS: '120' @@ -601,6 +713,9 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh @@ -608,21 +723,21 @@ jobs: uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v5-python-${{ matrix.python }}- + #- name: Cache Python Dependencies + # uses: actions/cache@v4 + # with: + # path: | + # ~/.cache/pip + # virtualenv + # ~/virtualenv + # # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # # !virtualenv/lib/python*/site-packages/st2* + # # !virtualenv/bin/st2* + # key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # # Don't use alternative key as if requirements.txt has altered we + # # don't want to retrieve previous cache + # #restore-keys: | + # # ${{ runner.os }}-v5-python-${{ matrix.python }}- - name: Cache APT Dependencies id: cache-apt-deps uses: actions/cache@v4 @@ -639,6 +754,29 @@ jobs: # install dev dependencies for Python YAML and LDAP packages # https://github.com/StackStorm/st2-auth-ldap ./scripts/github/install-apt-packages-use-cache.sh + + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v6-scie-pants + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we're not using remote caching. + cache-lmdb-store: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -671,7 +809,6 @@ jobs: ./scripts/github/configure-rabbitmq.sh - name: Print versions run: | - ./scripts/ci/print-versions.sh - name: make #timeout-minutes: 7 @@ -704,9 +841,16 @@ jobs: if: "${{ always() }}" run: docker rm --force redis || true + + - name: Upload pants log + uses: actions/upload-artifact@v2 + with: + name: pants-log-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + slack-notification: name: Slack notification for failed master builds - if: always() needs: - lint-checks - unit-tests diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 851cb3e1c0..a7ad7e5bf2 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -72,7 +72,7 @@ jobs: gha-cache-key: cache0 # This hash should include all of our lockfiles so that the pip/pex caches # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} # enable the optional lmdb_store cache since we're not using remote caching. cache-lmdb-store: 'true' # install whatever version of python we need for our in-repo pants-plugins diff --git a/.github/workflows/orquesta-integration-tests.yaml b/.github/workflows/orquesta-integration-tests.yaml index d7f54a4715..bbf9aee09d 100644 --- a/.github/workflows/orquesta-integration-tests.yaml +++ b/.github/workflows/orquesta-integration-tests.yaml @@ -121,9 +121,13 @@ jobs: # GitHub is juggling how to set vars for multiple shells. Protect our PATH assumptions. PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + steps: - name: Checkout repository uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh @@ -131,21 +135,21 @@ jobs: uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v4 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v5-python-${{ matrix.python }}- + #- name: Cache Python Dependencies + # uses: actions/cache@v4 + # with: + # path: | + # ~/.cache/pip + # virtualenv + # ~/virtualenv + # # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # # !virtualenv/lib/python*/site-packages/st2* + # # !virtualenv/bin/st2* + # key: ${{ runner.os }}-v5-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # # Don't use alternative key as if requirements.txt has altered we + # # don't want to retrieve previous cache + # #restore-keys: | + # # ${{ runner.os }}-v5-python-${{ matrix.python }}- - name: Cache APT Dependencies id: cache-apt-deps uses: actions/cache@v4 @@ -162,6 +166,29 @@ jobs: # install dev dependencies for Python YAML and LDAP packages # https://github.com/StackStorm/st2-auth-ldap ./scripts/github/install-apt-packages-use-cache.sh + + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v6-scie-pants + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0 + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we're not using remote caching. + cache-lmdb-store: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -214,21 +241,29 @@ jobs: - name: Compress Service Logs Before upload if: ${{ failure() }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() }} uses: actions/upload-artifact@v4 with: - name: logs-py${{ matrix.python-version }} + name: logs-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} path: logs.tar.gz retention-days: 7 - name: Stop Redis Service Container if: "${{ always() }}" run: docker rm --force redis || true + + - name: Upload pants log + uses: actions/upload-artifact@v2 + with: + name: pants-log-py${{ matrix.python-version }}-nose-${{ matrix.nosetests_node_index }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + slack-notification: name: Slack notification for failed master builds - if: always() needs: - integration-tests runs-on: ubuntu-20.04 diff --git a/.github/workflows/pants.yaml b/.github/workflows/pants.yaml index d695aefc23..3f42391e2b 100644 --- a/.github/workflows/pants.yaml +++ b/.github/workflows/pants.yaml @@ -45,7 +45,7 @@ jobs: gha-cache-key: cache0-BUILD # This hash should include all of our lockfiles so that the pip/pex caches # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} # enable the optional lmdb_store cache since we're not using remote caching. cache-lmdb-store: 'true' # install whatever version of python we need for our in-repo pants-plugins diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8d7e94a6b4..4fa6f3608f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -117,7 +117,7 @@ jobs: gha-cache-key: cache0-py${{ matrix.python-version }} # This hash should include all of our lockfiles so that the pip/pex caches # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} # enable the optional lmdb_store cache since we're not using remote caching. cache-lmdb-store: 'true' # install whatever version of python we need for our in-repo pants-plugins diff --git a/Makefile b/Makefile index bcdf1fbd6f..183e6f8b7a 100644 --- a/Makefile +++ b/Makefile @@ -5,12 +5,12 @@ OS := $(shell uname) # We separate the OSX X and Linux virtualenvs so we can run in a Docker # container (st2devbox) while doing things on our host Mac machine ifeq ($(OS),Darwin) - VIRTUALENV_DIR ?= virtualenv-osx + #VIRTUALENV_DIR ?= virtualenv-osx VIRTUALENV_ST2CLIENT_DIR ?= virtualenv-st2client-osx VIRTUALENV_ST2CLIENT_PYPI_DIR ?= virtualenv-st2client-pypi-osx VIRTUALENV_COMPONENTS_DIR ?= virtualenv-components-osx else - VIRTUALENV_DIR ?= virtualenv + #VIRTUALENV_DIR ?= virtualenv VIRTUALENV_ST2CLIENT_DIR ?= virtualenv-st2client VIRTUALENV_ST2CLIENT_PYPI_DIR ?= virtualenv-st2client-pypi VIRTUALENV_COMPONENTS_DIR ?= virtualenv-components @@ -18,6 +18,9 @@ endif # Assign PYTHON_VERSION if it doesn't already exist PYTHON_VERSION ?= python3 +PYTHON_BINARY = $(shell which $(PYTHON_VERSION)) +PYTHON_BINARY_VERSION = $(shell $(PYTHON_BINARY) -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}")') +VIRTUALENV_DIR = dist/export/python/virtualenvs/st2/$(PYTHON_BINARY_VERSION) BINARIES := bin @@ -173,7 +176,7 @@ install-runners: @echo "================== INSTALL RUNNERS ====================" @echo "" # NOTE: We use xargs to speed things up by installing runners in parallel - echo -e "$(COMPONENTS_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c ". $(VIRTUALENV_DIR)/bin/activate; cd {} ; python setup.py develop --no-deps" +# echo -e "$(COMPONENTS_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c ". $(VIRTUALENV_DIR)/bin/activate; cd {} ; python setup.py develop --no-deps" #@for component in $(COMPONENTS_RUNNERS); do \ # echo "==========================================================="; \ # echo "Installing runner:" $$component; \ @@ -187,7 +190,7 @@ install-mock-runners: @echo "================== INSTALL MOCK RUNNERS ====================" @echo "" # NOTE: We use xargs to speed things up by installing runners in parallel - echo -e "$(MOCK_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c ". $(VIRTUALENV_DIR)/bin/activate; cd {} ; python setup.py develop --no-deps" +# echo -e "$(MOCK_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c ". $(VIRTUALENV_DIR)/bin/activate; cd {} ; python setup.py develop --no-deps" #@for component in $(MOCK_RUNNERS); do \ # echo "==========================================================="; \ # echo "Installing mock runner:" $$component; \ @@ -646,10 +649,10 @@ distclean: clean .PHONY: .sdist-requirements .sdist-requirements: # Copy over shared dist utils module which is needed by setup.py - @for component in $(COMPONENTS_WITH_RUNNERS); do\ - cp -f ./scripts/dist_utils.py $$component/dist_utils.py;\ - scripts/write-headers.sh $$component/dist_utils.py || break;\ - done +# @for component in $(COMPONENTS_WITH_RUNNERS); do\ +# cp -f ./scripts/dist_utils.py $$component/dist_utils.py;\ +# scripts/write-headers.sh $$component/dist_utils.py || break;\ +# done # Copy over CHANGELOG.RST, CONTRIBUTING.RST and LICENSE file to each component directory #@for component in $(COMPONENTS_TEST); do\ @@ -660,18 +663,18 @@ distclean: clean .PHONY: .requirements .requirements: virtualenv - $(VIRTUALENV_DIR)/bin/pip install --upgrade "pip==$(PIP_VERSION)" +# $(VIRTUALENV_DIR)/bin/pip install --upgrade "pip==$(PIP_VERSION)" # Print out pip version $(VIRTUALENV_DIR)/bin/pip --version # Generate all requirements to support current CI pipeline. - $(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s st2*/in-requirements.txt contrib/runners/*/in-requirements.txt -f fixed-requirements.txt -o requirements.txt +# $(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s st2*/in-requirements.txt contrib/runners/*/in-requirements.txt -f fixed-requirements.txt -o requirements.txt # Remove any *.egg-info files which polute PYTHONPATH - rm -rf *.egg-info* +# rm -rf *.egg-info* # Generate finall requirements.txt file for each component # NOTE: We use xargs to speed things up by running commands in parallel - echo -e "$(COMPONENTS_WITH_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c "$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s {}/in-requirements.txt -f fixed-requirements.txt -o {}/requirements.txt" +# echo -e "$(COMPONENTS_WITH_RUNNERS)" | tr -d "\n" | xargs -P $(XARGS_CONCURRENCY) -d " " -n1 -i sh -c "$(VIRTUALENV_DIR)/bin/python scripts/fixate-requirements.py --skip=virtualenv,virtualenv-osx -s {}/in-requirements.txt -f fixed-requirements.txt -o {}/requirements.txt" #@for component in $(COMPONENTS_WITH_RUNNERS); do\ # echo "==========================================================="; \ @@ -693,40 +696,41 @@ requirements: virtualenv .requirements .sdist-requirements install-runners insta # Note: Use the verison of virtualenv pinned in fixed-requirements.txt so we # only have to update it one place when we change the version - $(VIRTUALENV_DIR)/bin/pip install --upgrade $(shell grep "^virtualenv" fixed-requirements.txt) - $(VIRTUALENV_DIR)/bin/pip install --upgrade "setuptools==$(SETUPTOOLS_VERSION)" # workaround for pbr issue +# $(VIRTUALENV_DIR)/bin/pip install --upgrade $(shell grep "^virtualenv" fixed-requirements.txt) +# $(VIRTUALENV_DIR)/bin/pip install --upgrade "setuptools==$(SETUPTOOLS_VERSION)" # workaround for pbr issue # Install requirements - for req in $(REQUIREMENTS); do \ - echo "Installing $$req..." ; \ - $(VIRTUALENV_DIR)/bin/pip install $(PIP_OPTIONS) -r $$req ; \ - done +# for req in $(REQUIREMENTS); do \ +# echo "Installing $$req..." ; \ +# $(VIRTUALENV_DIR)/bin/pip install $(PIP_OPTIONS) -r $$req ; \ +# done # Install st2common package to load drivers defined in st2common setup.py # NOTE: We pass --no-deps to the script so we don't install all the # package dependencies which are already installed as part of "requirements" # make targets. This speeds up the build - (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) +# (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) # Install st2common to register metrics drivers # NOTE: We pass --no-deps to the script so we don't install all the # package dependencies which are already installed as part of "requirements" # make targets. This speeds up the build - (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) +# (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) # Install st2auth to register SSO drivers # NOTE: We pass --no-deps to the script so we don't install all the # package dependencies which are already installed as part of "requirements" # make targets. This speeds up the build - (cd ${ROOT_DIR}/st2auth; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) +# (cd ${ROOT_DIR}/st2auth; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) # Some of the tests rely on submodule so we need to make sure submodules are check out git submodule update --init --recursive --remote + git submodule foreach --recursive git fetch --tags --verbose # Show currently install requirements - echo "" - $(VIRTUALENV_DIR)/bin/pip list - echo "" +# echo "" +# $(VIRTUALENV_DIR)/bin/pip list +# echo "" .PHONY: check-dependency-conflicts check-dependency-conflicts: @@ -745,27 +749,32 @@ virtualenv: @echo @echo "==================== virtualenv ====================" @echo - test -f $(VIRTUALENV_DIR)/bin/activate || $(PYTHON_VERSION) -m venv $(VIRTUALENV_DIR) +# test -f $(VIRTUALENV_DIR)/bin/activate || $(PYTHON_VERSION) -m venv $(VIRTUALENV_DIR) + test -f $(VIRTUALENV_DIR)/bin/activate || pants export --resolve=st2 \ + --python-bootstrap-search-path=[] --python-bootstrap-search-path=$(PYTHON_BINARY) + # workaround pants+pex default of hermetic scripts so we can run nosetest with PYTHONPATH + # sed -i -e 's/^#!\(.*\) -sE$$/#!\1 -s/' $(VIRTUALENV_DIR)/bin/* + test -L virtualenv || (rm -rf virtualenv && ln -s $(VIRTUALENV_DIR) virtualenv) # Setup PYTHONPATH in bash activate script... # Delete existing entries (if any) -ifeq ($(OS),Darwin) - echo 'Setting up virtualenv on $(OS)...' - sed -i '' '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate - sed -i '' '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate - sed -i '' '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate -else - echo 'Setting up virtualenv on $(OS)...' - sed -i '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate - sed -i '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate - sed -i '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate -endif - - echo '_OLD_PYTHONPATH=$$PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate - #echo 'PYTHONPATH=$$_OLD_PYTHONPATH:$(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_DIR)/bin/activate - echo 'PYTHONPATH=${ROOT_DIR}:$(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_DIR)/bin/activate - echo 'export PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate - touch $(VIRTUALENV_DIR)/bin/activate +#ifeq ($(OS),Darwin) +# echo 'Setting up virtualenv on $(OS)...' +# sed -i '' '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate +# sed -i '' '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate +# sed -i '' '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate +#else +# echo 'Setting up virtualenv on $(OS)...' +# sed -i '/_OLD_PYTHONPATHp/d' $(VIRTUALENV_DIR)/bin/activate +# sed -i '/PYTHONPATH=/d' $(VIRTUALENV_DIR)/bin/activate +# sed -i '/export PYTHONPATH/d' $(VIRTUALENV_DIR)/bin/activate +#endif + +# echo '_OLD_PYTHONPATH=$$PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate +# #echo 'PYTHONPATH=$$_OLD_PYTHONPATH:$(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_DIR)/bin/activate +# echo 'PYTHONPATH=${ROOT_DIR}:$(COMPONENT_PYTHONPATH)' >> $(VIRTUALENV_DIR)/bin/activate +# echo 'export PYTHONPATH' >> $(VIRTUALENV_DIR)/bin/activate +# touch $(VIRTUALENV_DIR)/bin/activate # Setup PYTHONPATH in fish activate script... #echo '' >> $(VIRTUALENV_DIR)/bin/activate.fish @@ -1052,7 +1061,7 @@ packs-tests: requirements .packs-tests @echo "==================== packs-tests ====================" @echo # Install st2common to register metrics drivers - (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) +# (cd ${ROOT_DIR}/st2common; ${ROOT_DIR}/$(VIRTUALENV_DIR)/bin/python setup.py develop --no-deps) . $(VIRTUALENV_DIR)/bin/activate; find ${ROOT_DIR}/contrib/* -maxdepth 0 -type d -print0 | xargs -0 -I FILENAME ./st2common/bin/st2-run-pack-tests -c -t -x -p FILENAME diff --git a/lockfiles/st2-constraints.txt b/lockfiles/st2-constraints.txt index 3f5bfdc03e..1a7df078e9 100644 --- a/lockfiles/st2-constraints.txt +++ b/lockfiles/st2-constraints.txt @@ -74,7 +74,7 @@ dnspython>=1.16.0,<2.0.0 # greenlet 3 adds py3.12 support, drops py3.6 support, fixes various crash conditions # NOTE: If constrained, bump carefully. Tests seem to be passing without this constraint. # DROPS RESOLVED VERSION: 1.1.3.post0 -#greenlet==1.0.0 +#greenlet<2 # REQUIRED BY: argcomplete, click, debtcollector, kombu, pluggy, prettytable, # pytest, virtualenv diff --git a/requirements-pants.txt b/requirements-pants.txt index bf3b6472e9..3ef3dda76e 100644 --- a/requirements-pants.txt +++ b/requirements-pants.txt @@ -24,6 +24,20 @@ gunicorn jinja2 jsonpath-rw jsonschema>=3,<4 +# Kombu 5.1: +# - dropped py2 +# - increase pickle protocol from 2 to 4 (only affects serialization. deserialization detects version) +# - from 5.1 up to at least 5.3.5 use pickle protocol 4 +# - switches login_method from AMQPLAIN to PLAIN (rabbitmq supports both by default, AMQPLAIN only for backwards compat) +# - adds named_tuple_as_object=False to simplejson.dump kwargs +# Kombu 5.2: +# - dropped py3.6 +# - drops support for librabbitmq +# - switches from simplejson to json +# Kombu 5.3: +# - dropped py3.7 +# - drops simplejson support +#kombu>=5.0,<5.1 kombu lockfile mock diff --git a/scripts/github/prepare-integration.sh b/scripts/github/prepare-integration.sh index a9011bc080..7667211c00 100755 --- a/scripts/github/prepare-integration.sh +++ b/scripts/github/prepare-integration.sh @@ -27,7 +27,7 @@ cat conf/st2.ci.conf || true echo "" # install st2 client -python ./st2client/setup.py develop +#python ./st2client/setup.py develop st2 --version # Clean up old st2 log files diff --git a/setup.cfg b/setup.cfg index c585334f84..304ced707c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,3 @@ [nosetests] -logging-level=INFO +logging-level=DEBUG +detailed-errors=1 diff --git a/st2common/bin/st2-run-pack-tests b/st2common/bin/st2-run-pack-tests index e358aa1e63..511004321d 100755 --- a/st2common/bin/st2-run-pack-tests +++ b/st2common/bin/st2-run-pack-tests @@ -322,7 +322,7 @@ fi echo "Running tests..." # Note: We run nosetests with "--exe" option so it also runs test files which are executable # (pack install command automatically makes all the files, including test files executable) -NOSE_OPTS=(-s -v --exe --rednose --immediate) +NOSE_OPTS=(-s -v --exe --rednose --immediate --detailed-error --logging-level=DEBUG) # Is test coverage reporting enabled? if [ "${ENABLE_COVERAGE}" = true ]; then